diff --git a/Externals_smoke_dust.cfg b/Externals_smoke_dust.cfg
new file mode 100644
index 0000000000..7245ce8967
--- /dev/null
+++ b/Externals_smoke_dust.cfg
@@ -0,0 +1,29 @@
+[ufs_utils]
+protocol = git
+repo_url = https://github.com/ufs-community/UFS_UTILS
+# Specify either a branch name or a hash but not both.
+#branch = develop
+hash = 33cc663
+local_path = sorc/UFS_UTILS
+required = True
+
+[ufs-weather-model]
+protocol = git
+repo_url = https://github.com/ufs-community/ufs-weather-model
+# Specify either a branch name or a hash but not both.
+#branch = production/RRFS.v1
+hash = ce43a6f
+local_path = sorc/ufs-weather-model
+required = True
+
+[UPP]
+protocol = git
+repo_url = https://github.com/NOAA-EMC/UPP
+# Specify either a branch name or a hash but not both.
+#branch = develop
+hash = fc85241
+local_path = sorc/UPP
+required = True
+
+[externals_description]
+schema_version = 1.0.0
diff --git a/devbuild.sh b/devbuild.sh
index 014fbdb3b7..e52951653f 100755
--- a/devbuild.sh
+++ b/devbuild.sh
@@ -15,7 +15,7 @@ OPTIONS
compiler to use; default depends on platform
(e.g. intel | gnu | cray | gccgfortran)
-a, --app=APPLICATION
- weather model application to build; for example, ATMAQ for Online-CMAQ
+ weather model application to build; for example, ATMAQ for SRW-AQM
(e.g. ATM | ATMAQ | ATMW | S2S | S2SW)
--ccpp="CCPP_SUITE1,CCPP_SUITE2..."
CCPP suites (CCPP_SUITES) to include in build; delimited with ','
@@ -50,6 +50,8 @@ OPTIONS
number of build jobs; defaults to 4
--use-sub-modules
Use sub-component modules instead of top-level level SRW modules
+ --smoke
+ Build the app for Smoke and Dust (with production branch)
-v, --verbose
build with verbose output
@@ -82,6 +84,7 @@ Settings:
DISABLE_OPTIONS=${DISABLE_OPTIONS}
REMOVE=${REMOVE}
CONTINUE=${CONTINUE}
+ SMOKE=${SMOKE}
BUILD_TYPE=${BUILD_TYPE}
BUILD_JOBS=${BUILD_JOBS}
VERBOSE=${VERBOSE}
@@ -114,6 +117,7 @@ BUILD_TYPE="RELEASE"
BUILD_JOBS=4
REMOVE=false
CONTINUE=false
+SMOKE=false
VERBOSE=false
# Turn off all apps to build and choose default later
@@ -160,6 +164,7 @@ while :; do
--clean) CLEAN=true ;;
--build) BUILD=true ;;
--move) MOVE=true ;;
+ --smoke) SMOKE=true ;;
--build-dir=?*) BUILD_DIR=${1#*=} ;;
--build-dir|--build-dir=) usage_error "$1 requires argument." ;;
--install-dir=?*) INSTALL_DIR=${1#*=} ;;
@@ -314,7 +319,11 @@ if [ -f ${RUN_VERSION_FILE} ]; then
fi
# set MODULE_FILE for this platform/compiler combination
-MODULE_FILE="build_${PLATFORM}_${COMPILER}"
+if [ "${SMOKE}" = true ]; then
+ MODULE_FILE="build_${PLATFORM}_${COMPILER}_prod"
+else
+ MODULE_FILE="build_${PLATFORM}_${COMPILER}"
+fi
if [ ! -f "${SRW_DIR}/modulefiles/${MODULE_FILE}.lua" ]; then
printf "ERROR: module file does not exist for platform/compiler\n" >&2
printf " MODULE_FILE=${MODULE_FILE}\n" >&2
diff --git a/environment.yml b/environment.yml
index a735213198..e05a1e7cee 100644
--- a/environment.yml
+++ b/environment.yml
@@ -6,3 +6,10 @@ dependencies:
- pylint=2.17*
- pytest=7.2*
- uwtools=2.3*
+ - esmpy=8.6.*
+ - netcdf4=1.6.*
+ - numpy=1.23.*
+ - pandas=1.5.*
+ - scipy=1.10.*
+ - xarray=2022.11.*
+
diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS
index 10a3b36fb7..fa8f6a5d44 100755
--- a/jobs/JREGIONAL_MAKE_ICS
+++ b/jobs/JREGIONAL_MAKE_ICS
@@ -34,7 +34,7 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-for sect in user nco workflow ; do
+for sect in user nco workflow cpl_aqm_parm smoke_dust_parm ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
. $USHdir/job_preamble.sh
@@ -103,6 +103,9 @@ if [ $RUN_ENVIR = "community" ]; then
mkdir -p $DATA
cd $DATA
fi
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] || [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ mkdir -p ${DATA_SHARE}
+fi
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS
index 91d9d3edbe..b05051ae33 100755
--- a/jobs/JREGIONAL_MAKE_LBCS
+++ b/jobs/JREGIONAL_MAKE_LBCS
@@ -32,7 +32,7 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-for sect in user nco workflow ; do
+for sect in user nco workflow cpl_aqm_parm smoke_dust_parm ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
. $USHdir/job_preamble.sh
@@ -100,6 +100,9 @@ if [ "${RUN_ENVIR}" = "community" ]; then
mkdir -p $DATA
cd $DATA
fi
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] || [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ mkdir -p ${DATA_SHARE}
+fi
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JSRW_AQM_ICS b/jobs/JSRW_AQM_ICS
index 5d5f6d970e..d9ffd6c5c1 100755
--- a/jobs/JSRW_AQM_ICS
+++ b/jobs/JSRW_AQM_ICS
@@ -105,10 +105,10 @@ export RUN="${RUN:-${RUN_default}}"
[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
if [ "${MACHINE}" = "WCOSS2" ]; then
- export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver})}"
export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
else
- export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}}"
export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
fi
@@ -140,20 +140,6 @@ fi
#
#-----------------------------------------------------------------------
#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
-#
-#-----------------------------------------------------------------------
-#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
-else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
-fi
-mkdir -p "${INPUT_DATA}"
-#
-#-----------------------------------------------------------------------
-#
# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
diff --git a/jobs/JSRW_AQM_LBCS b/jobs/JSRW_AQM_LBCS
index 9279dbe190..3e7332d865 100755
--- a/jobs/JSRW_AQM_LBCS
+++ b/jobs/JSRW_AQM_LBCS
@@ -142,20 +142,6 @@ fi
#
#-----------------------------------------------------------------------
#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
-#
-#-----------------------------------------------------------------------
-#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
-else
- export INPUT_DATA="${EXPTDIR}/${PDY}${cyc}${SLASH_ENSMEM_SUBDIR}/INPUT"
-fi
-mkdir -p "${INPUT_DATA}"
-#
-#-----------------------------------------------------------------------
-#
# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
diff --git a/jobs/JSRW_FORECAST b/jobs/JSRW_FORECAST
new file mode 100755
index 0000000000..ef213fbca8
--- /dev/null
+++ b/jobs/JSRW_FORECAST
@@ -0,0 +1,184 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# The J-Job that runs the forecast
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+#-----------------------------------------------------------------------
+#
+for sect in user nco platform workflow global ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs a forecast with FV3 for
+the specified cycle.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory for RAVE interpolated data files
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_forecast.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_PREPSTART b/jobs/JSRW_PREPSTART
new file mode 100755
index 0000000000..50476d6f45
--- /dev/null
+++ b/jobs/JSRW_PREPSTART
@@ -0,0 +1,165 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs prepstart to update IC files for Smoke/Dust
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global smoke_dust_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that copies or fetches RAVE fire
+emission data files from disk, or HPSS.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_prepstart.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_SMOKE_DUST b/jobs/JSRW_SMOKE_DUST
new file mode 100755
index 0000000000..4341fda03c
--- /dev/null
+++ b/jobs/JSRW_SMOKE_DUST
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script processes smoke and dust
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global smoke_dust_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that copies or fetches RAVE fire
+emission data files from disk, or HPSS.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+export COMINsmoke="${COMINsmoke:-${COMINsmoke_default}}"
+export COMINfire="${COMINfire:-${COMINfire_default}}"
+
+# Create a teomporary share directory for RAVE interpolated data files
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/RAVE_fire_intp}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_smoke_dust.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_UPP_POST b/jobs/JSRW_UPP_POST
new file mode 100755
index 0000000000..0522695c2d
--- /dev/null
+++ b/jobs/JSRW_UPP_POST
@@ -0,0 +1,205 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script processes Post
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global task_run_post smoke_dust_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for Post-processing.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+export COMINsmoke="${COMINsmoke:-${COMINsmoke_default}}"
+export COMINfire="${COMINfire:-${COMINfire_default}}"
+
+# Create a teomporary share directory for RAVE interpolated data files
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Make sure that fhr is a non-empty string consisting of only digits.
+#
+#-----------------------------------------------------------------------
+#
+export fhr=$( printf "%s" "${fhr}" | $SED -n -r -e "s/^([0-9]+)$/\1/p" )
+if [ -z "$fhr" ]; then
+ print_err_msg_exit "\
+The forecast hour (fhr) must be a non-empty string consisting of only
+digits:
+ fhr = \"${fhr}\""
+fi
+if [ $(boolify "${SUB_HOURLY_POST}") != "TRUE" ]; then
+ export fmn="00"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_upp_post.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a flag file to make rocoto aware that the run_post task has
+# successfully completed. This flag is necessary for varying forecast
+# hours (FCST_LEN_HRS: -1)
+#
+#-----------------------------------------------------------------------
+#
+if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
+ cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} ))
+ CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} ))
+ FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]}
+
+ if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" )
+ if [ "${fhr}" = "${fcst_len_hrs}" ]; then
+ touch "${DATA_SHARE}/post_${PDY}${cyc}_task_complete.txt"
+ fi
+ fi
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/modulefiles/build_hera_intel_prod.lua b/modulefiles/build_hera_intel_prod.lua
new file mode 100644
index 0000000000..7280a735fc
--- /dev/null
+++ b/modulefiles/build_hera_intel_prod.lua
@@ -0,0 +1,57 @@
+help([[
+This module loads libraries for building the production branch on
+the NOAA RDHPC machine Hera using Intel-2022.1.2
+]])
+
+whatis([===[Loads libraries needed for building SRW-smoke/dust on Hera ]===])
+
+prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon-env-rocky8/install/modulefiles/Core")
+load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.5.0"))
+load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver") or "2021.5.1"))
+load(pathJoin("cmake", os.getenv("cmake_ver") or "3.23.1"))
+
+load(pathJoin("jasper", os.getenv("jasper_ver") or "2.0.32"))
+load(pathJoin("zlib", os.getenv("zlib_ver") or "1.2.13"))
+load(pathJoin("libpng", os.getenv("libpng_ver") or "1.6.37"))
+load(pathJoin("parallelio", os.getenv("pio_ver") or "2.5.10"))
+--loading parallelio will load netcdf_c, netcdf_fortran, hdf5, zlib, etc
+load(pathJoin("esmf", os.getenv("esmf_ver") or "8.5.0"))
+load(pathJoin("fms", os.getenv("fms_ver") or "2023.02.01"))
+
+load(pathJoin("bacio", os.getenv("bacio_ver") or "2.4.1"))
+load(pathJoin("crtm", os.getenv("crtm_ver") or "2.4.0"))
+load(pathJoin("g2", os.getenv("g2_ver") or "3.4.5"))
+load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver") or "1.10.2"))
+load(pathJoin("ip", os.getenv("ip_ver") or "4.3.0"))
+load(pathJoin("sp", os.getenv("sp_ver") or "2.3.3"))
+
+load(pathJoin("gftl-shared", os.getenv("gftl-shared_ver") or "1.6.1"))
+load(pathJoin("mapl", os.getenv("mapl_ver") or "2.40.3-esmf-8.5.0"))
+load(pathJoin("scotch", os.getenv("scotch_ver") or "7.0.4"))
+
+load(pathJoin("bufr", os.getenv("bufr_ver") or "11.7.0"))
+load(pathJoin("sigio", os.getenv("sigio_ver") or "2.3.2"))
+load(pathJoin("sfcio", os.getenv("sfcio_ver") or "1.4.1"))
+load(pathJoin("nemsio", os.getenv("nemsio_ver") or "2.5.4"))
+load(pathJoin("wrf-io", os.getenv("wrf_io_ver") or "1.2.0"))
+load(pathJoin("ncio", os.getenv("ncio_ver") or "1.1.2"))
+load(pathJoin("gsi-ncdiag", os.getenv("gsi-ncdiag_ver") or "1.1.2"))
+load(pathJoin("w3emc", os.getenv("w3emc_ver") or "2.10.0"))
+load(pathJoin("w3nco", os.getenv("w3nco_ver") or "2.4.1"))
+
+load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
+
+load(pathJoin("wgrib2", os.getenv("wgrib2_ver") or "2.0.8"))
+
+prepend_path("MODULEPATH", "/scratch2/BMC/rtrr/gge/lua")
+load("prod_util/2.0.15")
+
+unload("fms/2023.02.01")
+unload("g2tmpl/1.10.2")
+setenv("g2tmpl_ROOT","/scratch1/BMC/wrfruc/mhu/rrfs/lib/g2tmpl/install")
+setenv("FMS_ROOT","/scratch1/BMC/wrfruc/mhu/rrfs/lib/fms.2024.01/build")
+
+setenv("CMAKE_C_COMPILER","mpiicc")
+setenv("CMAKE_CXX_COMPILER","mpiicpc")
+setenv("CMAKE_Fortran_COMPILER","mpiifort")
+setenv("CMAKE_Platform","hera.intel")
diff --git a/modulefiles/build_orion_intel_prod.lua b/modulefiles/build_orion_intel_prod.lua
new file mode 100644
index 0000000000..a4c656a2be
--- /dev/null
+++ b/modulefiles/build_orion_intel_prod.lua
@@ -0,0 +1,57 @@
+help([[
+This module loads libraries for building the production branch on
+the MSU machine Orion using Intel compiler
+]])
+
+whatis([===[Loads libraries needed for building SRW-smoke/dust on Orion ]===])
+
+load("contrib")
+load("noaatools")
+
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon-rocky9/install/modulefiles/Core")
+load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.9.0"))
+load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver") or "2021.9.0"))
+load(pathJoin("cmake", os.getenv("cmake_ver") or "3.23.1"))
+
+load(pathJoin("jasper", os.getenv("jasper_ver") or "2.0.32"))
+load(pathJoin("zlib", os.getenv("zlib_ver") or "1.2.13"))
+load(pathJoin("libpng", os.getenv("libpng_ver") or "1.6.37"))
+load(pathJoin("parallelio", os.getenv("pio_ver") or "2.5.10"))
+--loading parallelio will load netcdf_c, netcdf_fortran, hdf5, zlib, etc
+load(pathJoin("esmf", os.getenv("esmf_ver") or "8.5.0"))
+--load(pathJoin("fms", os.getenv("fms_ver") or "2023.02.01"))
+
+load(pathJoin("bacio", os.getenv("bacio_ver") or "2.4.1"))
+load(pathJoin("crtm", os.getenv("crtm_ver") or "2.4.0"))
+load(pathJoin("g2", os.getenv("g2_ver") or "3.4.5"))
+--load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver") or "1.10.2"))
+load(pathJoin("ip", os.getenv("ip_ver") or "4.3.0"))
+load(pathJoin("sp", os.getenv("sp_ver") or "2.3.3"))
+
+load(pathJoin("gftl-shared", os.getenv("gftl-shared_ver") or "1.6.1"))
+load(pathJoin("mapl", os.getenv("mapl_ver") or "2.40.3-esmf-8.5.0"))
+load(pathJoin("scotch", os.getenv("scotch_ver") or "7.0.4"))
+
+load(pathJoin("bufr", os.getenv("bufr_ver") or "11.7.0"))
+load(pathJoin("sigio", os.getenv("sigio_ver") or "2.3.2"))
+load(pathJoin("sfcio", os.getenv("sfcio_ver") or "1.4.1"))
+load(pathJoin("nemsio", os.getenv("nemsio_ver") or "2.5.4"))
+load(pathJoin("wrf-io", os.getenv("wrf_io_ver") or "1.2.0"))
+load(pathJoin("ncio", os.getenv("ncio_ver") or "1.1.2"))
+load(pathJoin("gsi-ncdiag", os.getenv("gsi-ncdiag_ver") or "1.1.2"))
+load(pathJoin("w3emc", os.getenv("w3emc_ver") or "2.10.0"))
+load(pathJoin("w3nco", os.getenv("w3nco_ver") or "2.4.1"))
+
+load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
+load(pathJoin("wgrib2", os.getenv("wgrib2_ver") or "3.1.1"))
+
+setenv("FMS_ROOT","/work/noaa/epic/chjeon/FMS_2.4.1_orion/FMS/build")
+setenv("g2tmpl_ROOT","/work/noaa/epic/chjeon/LIB_g2tmpl_1.13.0_orion/NCEPLIBS-g2tmpl")
+
+prepend_path("MODULEPATH", "/work/noaa/rtrr/gge/lua")
+load("prod_util/2.0.15")
+
+setenv("CMAKE_C_COMPILER","mpiicc")
+setenv("CMAKE_CXX_COMPILER","mpiicpc")
+setenv("CMAKE_Fortran_COMPILER","mpiifort")
+setenv("CMAKE_Platform","orion.intel")
diff --git a/modulefiles/tasks/hera/prepstart.local.lua b/modulefiles/tasks/hera/prepstart.local.lua
new file mode 100644
index 0000000000..63063d4fab
--- /dev/null
+++ b/modulefiles/tasks/hera/prepstart.local.lua
@@ -0,0 +1,2 @@
+load("python_srw")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/modulefiles/tasks/hera/smoke_dust.local.lua b/modulefiles/tasks/hera/smoke_dust.local.lua
new file mode 100644
index 0000000000..63063d4fab
--- /dev/null
+++ b/modulefiles/tasks/hera/smoke_dust.local.lua
@@ -0,0 +1,2 @@
+load("python_srw")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/parm/FV3.input.yml b/parm/FV3.input.yml
index efb6c85f5b..b157b19009 100644
--- a/parm/FV3.input.yml
+++ b/parm/FV3.input.yml
@@ -100,6 +100,164 @@ FV3_HRRR:
nst_anl: null
nstf_name: null
+FV3_HRRR_gf:
+ atmos_model_nml:
+ avg_max_length: 3600.
+ ignore_rst_cksum: true
+ external_ic_nml:
+ levp: 66
+ fv_core_nml:
+ agrid_vel_rst: true
+ d_con: 0.5
+ d2_bg_k2: 0.04
+ dz_min: 2.0
+ fv_sg_adj: 7200
+ hord_dp: 6
+ hord_mt: 6
+ hord_tm: 6
+ hord_tr: 8
+ hord_vt: 6
+ k_split: 2
+ kord_mt: 9
+ kord_tm: -9
+ kord_tr: 9
+ kord_wz: 9
+ n_split: 5
+ n_sponge: 65
+ nord_tr: 0
+ npz: 65
+ psm_bc: 1
+ range_warn: False
+ regional_bcs_from_gsi: false
+ rf_cutoff: 2000.0
+ sg_cutoff: -1
+ vtdm4: 0.02
+ write_restart_with_bcs: false
+ gfs_physics_nml:
+ addsmoke_flag: 1
+ aero_dir_fdb: true
+ aero_ind_fdb: false
+ bl_mynn_edmf: 1
+ bl_mynn_edmf_mom: 1
+ bl_mynn_tkeadvect: true
+ cal_pre: false
+ cdmbgwd: [3.5, 1.0]
+ clm_debug_print: false
+ clm_lake_debug: false
+ cnvcld: false
+ cnvgwd: false
+ coarsepm_settling: 1
+ debug: false
+ diag_log: true
+ do_deep: true
+ do_gsl_drag_ls_bl: true
+ do_gsl_drag_ss: true
+ do_gsl_drag_tofd: true
+ do_mynnedmf: true
+ do_mynnsfclay: true
+ do_plumerise: true
+ do_sfcperts: null
+ do_smoke_transport: true
+ do_tofd: false
+ do_ugwp: false
+ do_ugwp_v0: false
+ do_ugwp_v0_nst_only: false
+ do_ugwp_v0_orog_only: false
+ drydep_opt: 1
+ dspheat: true
+ dt_inner: 36
+ dust_alpha: 10.0
+ dust_drylimit_factor: 0.5
+ dust_gamma: 1.3
+ dust_moist_correction: 2.0
+ dust_opt: 1
+ effr_in: true
+ enh_mix : false
+ fhcyc: 0.0
+ fhlwr: 900.0
+ fhswr: 900.0
+ fhzero: 1.0
+ frac_ice: true
+ gwd_opt: 3
+ h2o_phys: true
+ hwp_method : 1
+ hybedmf: false
+ iaer: 1011
+ ialb: 2
+ iau_delthrs: 6
+ iaufhrs: 30
+ iccn: 2
+ icliq_sw: 2
+ iems: 2
+ imfdeepcnv: 3
+ imfshalcnv: -1
+ imp_physics: 8
+ iopt_alb: 2
+ iopt_btr: 1
+ iopt_crs: 1
+ iopt_dveg: 2
+ iopt_frz: 1
+ iopt_inf: 1
+ iopt_rad: 1
+ iopt_run: 1
+ iopt_sfc: 1
+ iopt_snf: 4
+ iopt_stc: 1
+ iopt_tbot: 2
+ iovr: 3
+ isncond_opt: 2
+ isncovr_opt: 3
+ isol: 2
+ isot: 1
+ isubc_lw: 2
+ isubc_sw: 2
+ ivegsrc: 1
+ ldiag3d: false
+ ldiag_ugwp: false
+ lgfdlmprad: false
+ lheatstrg: false
+ lightning_threat: true
+ lkm: 1
+ lradar: true
+ lrefres: true
+ lsm: 3
+ lsoil_lsm: 9
+ ltaerosol: true
+ lwhtr: true
+ min_lakeice: 0.15
+ min_seaice: 0.15
+ mix_chem : true
+ mosaic_lu: 0
+ mosaic_soil: 0
+ nsfullradar_diag: 3600
+ oz_phys: false
+ oz_phys_2015: true
+ pdfcld: false
+ plume_wind_eff : 1
+ plumerisefire_frq : 30
+ pre_rad: false
+ print_diff_pgr: true
+ prslrd0: 0.0
+ random_clds: false
+ redrag: true
+ rrfs_sd : false
+ rrfs_smoke_debug : false
+ satmedmf: false
+ sc_factor : 1.0
+ seas_opt : 0
+ sfclay_compute_flux: true
+ shal_cnv: false
+ smoke_conv_wet_coef : [0.5, 0.5, 0.5]
+ swhtr: true
+ thsfc_loc: false
+ trans_trac: true
+ ttendlim: -999
+ use_ufo: true
+ wetdep_ls_alpha : 0.5
+ wetdep_ls_opt : 1
+ fv_diagnostics_nml:
+ do_hailcast: true
+
FV3_RAP:
fv_core_nml:
<<: *HRRR_fv_core
diff --git a/parm/diag_table_smoke_dust.FV3_HRRR_gf b/parm/diag_table_smoke_dust.FV3_HRRR_gf
new file mode 100644
index 0000000000..547ac3aab0
--- /dev/null
+++ b/parm/diag_table_smoke_dust.FV3_HRRR_gf
@@ -0,0 +1,430 @@
+{{ starttime.strftime("%Y%m%d.%H") }}Z.{{ cres }}.32bit.non-hydro.regional
+{{ starttime.strftime("%Y %m %d %H %M %S") }}
+
+"grid_spec", -1, "months", 1, "days", "time"
+"atmos_static", -1, "hours", 1, "hours", "time"
+#"atmos_4xdaily", 1, "hours", 1, "days", "time"
+"fv3_history", 1, "years", 1, "hours", "time"
+"fv3_history2d", 1, "years", 1, "hours", "time"
+
+#
+#=======================
+# ATMOSPHERE DIAGNOSTICS
+#=======================
+###
+# grid_spec
+###
+ "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2,
+ "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2,
+ "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2,
+ "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2,
+ "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2,
+###
+# 4x daily output
+###
+# "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2
+####
+# "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2
+####
+# "dynamics", "z1000", "z1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z850", "z850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z700", "z700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z500", "z500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z200", "z200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z100", "z100", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z50", "z50", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "z10", "z10", "atmos_4xdaily", "all", .false., "none", 2
+####
+# "dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2
+####
+# "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2
+####
+# "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2
+# "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2
+###
+# gfs static data
+###
+ "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2
+ "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2
+ "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2
+ "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2
+ "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2
+###
+# FV3 variabls needed for NGGPS evaluation
+###
+"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "refl_10cm" "refl_10cm" "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "rain_nc", "rain_nc", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "water_nc", "water_nc", "fv3_history", "all", .false., "none", 2
+
+"gfs_dyn", "wmaxup", "upvvelmax", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "wmaxdn", "dnvvelmax", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "uhmax03", "uhmax03", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "uhmax25", "uhmax25", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "uhmin03", "uhmin03", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "uhmin25", "uhmin25", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "maxvort01", "maxvort01", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "maxvort02", "maxvort02", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "maxvorthy1", "maxvorthy1", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "hailcast_dhail_max", "hailcast_dhail", "fv3_history2d", "all", .false., "none", 2
+
+"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRFCI", "dswrf_clr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pwat", "pwatclm", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2
+#"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2
+#"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2
+#"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2
+#"gfs_phys", "cnvw", "cnvcldwat", "fv3_history2d", "all", .false., "none", 2
+
+"gfs_phys", "u10max", "u10max", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "v10max", "v10max", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spd10max", "spd10max", "fv3_history2d", "all", .false., "none", 2
+"gfs_dyn", "ustm", "ustm", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "vstm", "vstm", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "srh01", "srh01", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "srh03", "srh03", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "pratemax", "pratemax", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "refdmax", "refdmax", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "refdmax263k","refdmax263k","fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "t02max", "t02max", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "t02min", "t02min", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "rh02max", "rh02max", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "rh02min", "rh02min", "fv3_history2d", "all", .false., "none", 2
+
+"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "dpt2m", "dpt2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "wetness", "wetness", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt5", "soilt5" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt6", "soilt6" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt7", "soilt7" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt8", "soilt8" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt9", "soilt9" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw5", "soilw5" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw6", "soilw6" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw7", "soilw7" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw8", "soilw8" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw9", "soilw9" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_5", "soill5", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_6", "soill6", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_7", "soill7", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_8", "soill8", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_9", "soill9", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2
+
+"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2
+#"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2
+#"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snowc", "snowc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2
+# "gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "nirbmdi", "nirbmdi", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "nirdfdi", "nirdfdi", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "visbmdi", "visbmdi", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "visdfdi", "visdfdi", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "trans_ave", "transp_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "evbs_ave", "direvap_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "evcw_ave", "canevap_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sbsno", "sublim", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "evbs", "direvap", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "evcw", "canevap", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "trans", "transp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snowmt_land", "snom_land", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snowmt_ice", "snom_ice", "fv3_history2d", "all", .false., "none", 2
+# Aerosols (CCN, IN) from Thompson microphysics
+"gfs_phys", "nwfa", "nwfa", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "nifa", "nifa", "fv3_history", "all", .false., "none", 2
+"gfs_sfc", "nwfa2d", "nwfa2d", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "nifa2d", "nifa2d", "fv3_history2d", "all", .false., "none", 2
+# Cloud effective radii from Thompson and WSM6 microphysics
+"gfs_phys", "cleffr", "cleffr", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "cieffr", "cieffr", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "cseffr", "cseffr", "fv3_history", "all", .false., "none", 2
+# Prognostic/diagnostic variables from MYNN
+"gfs_phys", "QC_BL", "qc_bl", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "CLDFRA", "cldfra", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "EL_PBL", "el_pbl", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "QKE", "qke", "fv3_history", "all", .false., "none", 2
+"gfs_sfc", "maxmf", "maxmf", "fv3_history2d", "all", .false., "none", 2
+#"gfs_sfc", "nupdraft", "nupdrafts", "fv3_history2d", "all", .false., "none", 2
+#"gfs_sfc", "ktop_shallow", "ktop_shallow", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "zol", "zol", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "flhc", "flhc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "flqc", "flqc", "fv3_history2d", "all", .false., "none", 2
+# Prognostic/diagnostic variables from RUC LSM
+"gfs_sfc", "rhofr", "rhofr", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "snowfall_acc_land", "snacc_land", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "acsnow_land", "accswe_land", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "snowfall_acc_ice", "snacc_ice", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "acsnow_ice", "accswe_ice", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xlaixy", "xlaixy", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "sfalb", "sfalb", "fv3_history2d", "all", .false., "none", 2
+# Stochastic physics
+#"gfs_phys", "sppt_wts", "sppt_wts", "fv3_history", "all", .false., "none", 2
+#"gfs_phys", "skebu_wts", "skebu_wts", "fv3_history", "all", .false., "none", 2
+#"gfs_phys", "skebv_wts", "skebv_wts", "fv3_history", "all", .false., "none", 2
+#"dynamics", "diss_est", "diss_est", "fv3_history", "all", .false., "none", 2
+#"gfs_phys", "shum_wts", "shum_wts", "fv3_history", "all", .false., "none", 2
+#
+"gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frzrb", "frzrb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frozr", "frozr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frozrb", "frozrb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tsnowp", "tsnowp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tsnowpb", "tsnowpb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "rhonewsn", "rhonewsn", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snow", "snow", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "graupel", "graupel", "fv3_history2d", "all", .false., "none", 2
+# lightning threat indices
+"gfs_sfc", "ltg1_max", "ltg1_max", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "ltg2_max", "ltg2_max", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "ltg3_max", "ltg3_max", "fv3_history2d", "all", .false., "none", 2
+#=============================================================================================
+#
+#====> This file can be used with diag_manager/v2.0a (or higher) <====
+#
+#
+# FORMATS FOR FILE ENTRIES (not all input values are used)
+# ------------------------
+#
+#"file_name", output_freq, "output_units", format, "time_units", "long_name",
+#
+#
+#output_freq: > 0 output frequency in "output_units"
+# = 0 output frequency every time step
+# =-1 output frequency at end of run
+#
+#output_units = units used for output frequency
+# (years, months, days, minutes, hours, seconds)
+#
+#time_units = units used to label the time axis
+# (days, minutes, hours, seconds)
+#
+#
+# FORMAT FOR FIELD ENTRIES (not all input values are used)
+# ------------------------
+#
+#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing
+#
+#time_avg = .true. or .false.
+#
+#packing = 1 double precision
+# = 2 float
+# = 4 packed 16-bit integers
+# = 8 packed 1-byte (not tested?)
+# This file contains diag_table entries for the RRFS-SD.
+# It should be appended to the end of the diag_table before execution of the test.
+
+# Tracers
+"gfs_dyn", "smoke", "smoke", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "dust", "dust", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "coarsepm", "coarsepm", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "smoke_ave", "smoke_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_dyn", "dust_ave", "dust_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_dyn", "coarsepm_ave", "coarsepm_ave", "fv3_history2d", "all", .false., "none", 2
+
+# Aerosols emission for smoke
+"gfs_sfc", "emdust", "emdust", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "coef_bb_dc", "coef_bb_dc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "min_fplume", "min_fplume", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "max_fplume", "max_fplume", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "hwp", "hwp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "hwp_ave", "hwp_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "frp_output", "frp_output", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ebu_smoke", "ebu_smoke", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "ext550", "ext550", "fv3_history", "all", .false., "none", 2
diff --git a/parm/field_table_smoke_dust.FV3_HRRR_gf b/parm/field_table_smoke_dust.FV3_HRRR_gf
new file mode 100644
index 0000000000..3d4e0afcd4
--- /dev/null
+++ b/parm/field_table_smoke_dust.FV3_HRRR_gf
@@ -0,0 +1,80 @@
+# added by FRE: sphum must be present in atmos
+# specific humidity for moist runs
+ "TRACER", "atmos_mod", "sphum"
+ "longname", "specific humidity"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e-6" /
+# prognostic cloud water mixing ratio
+ "TRACER", "atmos_mod", "liq_wat"
+ "longname", "cloud water mixing ratio"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e30" /
+# prognostic ice water mixing ratio
+ "TRACER", "atmos_mod", "ice_wat"
+ "longname", "cloud ice mixing ratio"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e30" /
+# prognostic rain water mixing ratio
+ "TRACER", "atmos_mod", "rainwat"
+ "longname", "rain water mixing ratio"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e30" /
+# prognostic snow water mixing ratio
+ "TRACER", "atmos_mod", "snowwat"
+ "longname", "snow water mixing ratio"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e30" /
+# prognostic graupel mixing ratio
+ "TRACER", "atmos_mod", "graupel"
+ "longname", "graupel mixing ratio"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e30" /
+# prognostic cloud water number concentration
+ "TRACER", "atmos_mod", "water_nc"
+ "longname", "cloud liquid water number concentration"
+ "units", "/kg"
+ "profile_type", "fixed", "surface_value=0.0" /
+# prognostic cloud ice number concentration
+ "TRACER", "atmos_mod", "ice_nc"
+ "longname", "cloud ice water number concentration"
+ "units", "/kg"
+ "profile_type", "fixed", "surface_value=0.0" /
+# prognostic rain number concentration
+ "TRACER", "atmos_mod", "rain_nc"
+ "longname", "rain number concentration"
+ "units", "/kg"
+ "profile_type", "fixed", "surface_value=0.0" /
+# prognostic ozone mixing ratio tracer
+ "TRACER", "atmos_mod", "o3mr"
+ "longname", "ozone mixing ratio"
+ "units", "kg/kg"
+ "profile_type", "fixed", "surface_value=1.e30" /
+# water- and ice-friendly aerosols (Thompson)
+ "TRACER", "atmos_mod", "liq_aero"
+ "longname", "water-friendly aerosol number concentration"
+ "units", "/kg"
+ "profile_type", "fixed", "surface_value=0.0" /
+ "TRACER", "atmos_mod", "ice_aero"
+ "longname", "ice-friendly aerosol number concentration"
+ "units", "/kg"
+ "profile_type", "fixed", "surface_value=0.0" /
+# prognostic subgrid scale turbulent kinetic energy
+ "TRACER", "atmos_mod", "sgs_tke"
+ "longname", "subgrid scale turbulent kinetic energy"
+ "units", "m2/s2"
+ "profile_type", "fixed", "surface_value=0.0" /
+# prognostic smoke mixing ratio tracer
+ "TRACER", "atmos_mod", "smoke"
+ "longname", "smoke mixing ratio"
+ "units", "ug/kg"
+ "profile_type", "fixed", "surface_value=1.e-12" /
+# prognostic dust mixing ratio tracer
+ "TRACER", "atmos_mod", "dust"
+ "longname", "dust mixing ratio"
+ "units", "ug/kg"
+ "profile_type", "fixed", "surface_value=1.e-12" /
+# prognostic coarsepm mixing ratio tracer
+ "TRACER", "atmos_mod", "coarsepm"
+ "longname", "coarsepm mixing ratio"
+ "units", "ug/kg"
+ "profile_type", "fixed", "surface_value=1.e-12" /
diff --git a/parm/input.nml.FV3 b/parm/input.nml.FV3
index bd6244d2d9..53cd6e5a1d 100644
--- a/parm/input.nml.FV3
+++ b/parm/input.nml.FV3
@@ -28,7 +28,7 @@
/
&diag_manager_nml
- max_output_fields = 450
+ max_output_fields = 500
prepend_date = .false.
/
diff --git a/parm/model_configure b/parm/model_configure
index aeb45f4719..2f4c1ec603 100644
--- a/parm/model_configure
+++ b/parm/model_configure
@@ -17,8 +17,10 @@ atmos_nthreads: {{ atmos_nthreads }}
restart_interval: {{ restart_interval }}
output_1st_tstep_rst: .false.
write_dopost: {{ write_dopost }}
+zstandard_level: 0
ideflate: 0
-nbits: 0
+quantize_mode: quantize_bitround
+quantize_nsd: 0
ichunk2d: -1
jchunk2d: -1
ichunk3d: -1
@@ -26,6 +28,7 @@ jchunk3d: -1
kchunk3d: -1
itasks: {{ itasks }}
quilting: {{ quilting }}
+quilting_restart: {{ quilting }}
{% if quilting %}
#
# Write-component (quilting) computational parameters.
@@ -34,7 +37,7 @@ write_groups: {{ write_groups }}
write_tasks_per_group: {{ write_tasks_per_group }}
num_files: 2
filename_base: 'dyn' 'phy'
-output_file: 'netcdf' 'netcdf'
+output_file: 'netcdf_parallel'
#
# Write-component output frequency parameter definitions:
#
diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml
index d90bbde60f..384da120ba 100644
--- a/parm/wflow/aqm_prep.yaml
+++ b/parm/wflow/aqm_prep.yaml
@@ -96,15 +96,14 @@ task_point_source:
left: staged_grid
right: '{% if not rocoto.get("tasks", {}).get("task_make_grid") %}staged_grid{% endif %}'
-task_aqm_ics_ext:
+task_aqm_ics:
<<: *default_aqm
attrs:
- cycledefs: at_start
+ cycledefs: forecast
maxtries: '2'
command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
envars:
<<: *default_vars
- PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 64G
dependency:
@@ -113,40 +112,22 @@ task_aqm_ics_ext:
attrs:
task: make_ics_mem000
or:
- datadep_date_tag_tracer:
+ datadep_coldstart:
attrs:
age: 00:00:00:05
- text: !cycstr '&WARMSTART_CYCLE_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc'
- datadep_tracer:
+ text: !cycstr '{{ workflow.EXPTDIR }}/task_skip_coldstart_@Y@m@d@H00.txt'
+ datadep_warmstart_tracer:
attrs:
age: 00:00:00:05
- text: !cycstr '&WARMSTART_CYCLE_DIR;/RESTART/fv_tracer.res.tile1.nc'
-
-task_aqm_ics:
- <<: *default_aqm
- attrs:
- cycledefs: cycled_from_second
- maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
- envars:
- <<: *default_vars
- PREV_CYCLE_DIR: '&COMIN_DIR;'
- join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
- memory: 64G
- dependency:
- and:
- taskdep:
- attrs:
- task: make_ics_mem000
- or:
- datadep_date_tag_tracer:
+ text: !cycstr '&WARMSTART_CYCLE_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc'
+ datadep_comin_tracer:
attrs:
- age: 00:00:00:05
+ age: 00:00:03:00
text: '&COMIN_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc'
- datadep_tracer:
+ datadep_datashare_tracer:
attrs:
- age: 00:00:00:05
- text: '&COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc'
+ age: 00:00:03:00
+ text: '&DATASHARE_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc'
task_aqm_lbcs:
<<: *default_aqm
diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml
index 6fad0b8d83..493de95bf8 100644
--- a/parm/wflow/coldstart.yaml
+++ b/parm/wflow/coldstart.yaml
@@ -10,6 +10,14 @@ default_task: &default_task
cyc: !cycstr "@H"
nprocs: '{{ parent.nnodes * parent.ppn }}'
subcyc: !cycstr "@M"
+ HOMEdir: '&HOMEdir;'
+ envir: '&envir;'
+ model_ver: '&model_ver;'
+ KEEPDATA: '&KEEPDATA;'
+ SENDCOM: '&SENDCOM;'
+ COMROOT: '&COMROOT;'
+ DATAROOT: '&DATAROOT;'
+ DCOMROOT: '&DCOMROOT;'
LOGDIR: !cycstr "&LOGDIR;"
ENSMEM_INDX: '#mem#'
native: '{{ platform.SCHED_NATIVE_CMD }}'
@@ -142,7 +150,7 @@ metatask_run_ensemble:
task_run_fcst_mem#mem#:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"'
+ command: '{% if smoke_dust_parm.DO_SMOKE_DUST or cpl_aqm_parm.CPL_AQM %}&LOAD_MODULES_RUN_TASK; "forecast" "&JOBSdir;/JSRW_FORECAST" {% else %}&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST" {% endif %}'
envars:
<<: *default_vars
SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
@@ -209,3 +217,21 @@ metatask_run_ensemble:
taskdep:
attrs:
task: aqm_lbcs
+ or_smoke_dust:
+ not:
+ taskvalid:
+ attrs:
+ task: smoke_dust
+ taskdep:
+ attrs:
+ task: smoke_dust
+ or_prepstart:
+ not:
+ taskvalid:
+ attrs:
+ task: prepstart
+ taskdep:
+ attrs:
+ task: prepstart
+
+
diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml
index e37fdae1ea..efc945f51a 100644
--- a/parm/wflow/default_workflow.yaml
+++ b/parm/wflow/default_workflow.yaml
@@ -37,9 +37,10 @@ rocoto:
COMROOT: '{{ nco.PTMP }}/&envir;/com'
DATAROOT: '{{ nco.PTMP }}/&envir;/tmp'
DCOMROOT: '{{ nco.PTMP }}/&envir;/dcom'
- COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}'
+ COMIN_DIR: '{% if smoke_dust_parm.get("DO_SMOKE_DUST") or cpl_aqm_parm.get("CPL_AQM") %}&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}'
+ DATASHARE_DIR: '&DATAROOT;/DATA_SHARE/@Y@m@d@H'
FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}&DATAROOT;/run_fcst_mem#mem#_@Y@m@d@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}'
- LOGDIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/output/logs/@Y@m@d{% else %}{{ workflow.EXPTDIR }}/log{% endif %}'
+ LOGDIR: '{% if smoke_dust_parm.get("DO_SMOKE_DUST") or cpl_aqm_parm.get("CPL_AQM") %}&COMROOT;/output/logs/@Y@m@d{% else %}{{ workflow.EXPTDIR }}/log{% endif %}'
attrs:
cyclethrottle: "200"
realtime: "F"
diff --git a/parm/wflow/smoke_dust.yaml b/parm/wflow/smoke_dust.yaml
new file mode 100644
index 0000000000..18bab50787
--- /dev/null
+++ b/parm/wflow/smoke_dust.yaml
@@ -0,0 +1,90 @@
+default_smoke_dust_task: &default_smoke_dust
+ account: '&ACCOUNT;'
+ attrs:
+ cycledefs: forecast
+ maxtries: '2'
+ envars: &default_vars
+ GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;'
+ HOMEdir: '&HOMEdir;'
+ envir: '&envir;'
+ model_ver: '&model_ver;'
+ KEEPDATA: '&KEEPDATA;'
+ SENDCOM: '&SENDCOM;'
+ COMROOT: '&COMROOT;'
+ DATAROOT: '&DATAROOT;'
+ DCOMROOT: '&DCOMROOT;'
+ LOGDIR: !cycstr "&LOGDIR;"
+ PDY: !cycstr "@Y@m@d"
+ cyc: !cycstr "@H"
+ nprocs: '{{ parent.nnodes * parent.ppn // 1 }}'
+ subcyc: !cycstr "@M"
+ SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
+ ENSMEM_INDX: '#mem#'
+ native: '{{ platform.SCHED_NATIVE_CMD }}'
+ nnodes: 1
+ nodes: '{{ nnodes }}:ppn={{ ppn }}'
+ partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}'
+ ppn: 1
+ queue: '&QUEUE_DEFAULT;'
+ walltime: 00:30:00
+
+task_smoke_dust:
+ <<: *default_smoke_dust
+ command: '&LOAD_MODULES_RUN_TASK; "smoke_dust" "&HOMEdir;/jobs/JSRW_SMOKE_DUST"'
+ join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
+ memory: 120G
+ dependency:
+ and:
+ or_grid:
+ datadep_grid:
+ attrs:
+ age: 00:00:00:05
+ text: '{{ task_make_grid.GRID_DIR }}/make_grid_task_complete.txt'
+ streq:
+ left: staged_grid
+ right: '{% if not rocoto.get("tasks", {}).get("task_make_grid") %}staged_grid{% endif %}'
+ or_orog:
+ datadep_orog:
+ attrs:
+ age: 00:00:00:05
+ text: '{{ task_make_orog.OROG_DIR }}/make_orog_task_complete.txt'
+ streq:
+ left: staged_orog
+ right: '{% if not rocoto.get("tasks", {}).get("task_make_orog") %}staged_orog{% endif %}'
+ or_sfc_climo:
+ datadep_sfc_climo:
+ attrs:
+ age: 00:00:00:05
+ text: '{{ task_make_sfc_climo.SFC_CLIMO_DIR }}/make_sfc_climo_task_complete.txt'
+ streq:
+ left: staged_sfc_climo
+ right: '{% if not rocoto.get("tasks", {}).get("task_make_sfc_climo") %}staged_sfc_climo{% endif %}'
+ or:
+ datadep_coldstart:
+ attrs:
+ age: 00:00:00:05
+ text: !cycstr '{{ workflow.EXPTDIR }}/task_skip_coldstart_@Y@m@d@H00.txt'
+ datadep_warmstart_tracer:
+ attrs:
+ age: 00:00:00:05
+ text: !cycstr '&WARMSTART_CYCLE_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc'
+ datadep_comin_tracer:
+ attrs:
+ age: 00:00:00:05
+ text: '&COMIN_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc'
+
+task_prepstart:
+ <<: *default_smoke_dust
+ command: '&LOAD_MODULES_RUN_TASK; "prepstart" "&HOMEdir;/jobs/JSRW_PREPSTART"'
+ join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
+ memory: 120G
+ dependency:
+ or_smoke_dust:
+ not:
+ taskvalid:
+ attrs:
+ task: smoke_dust
+ taskdep:
+ attrs:
+ task: smoke_dust
+
diff --git a/parm/wflow/upp_post.yaml b/parm/wflow/upp_post.yaml
new file mode 100644
index 0000000000..5b791118c6
--- /dev/null
+++ b/parm/wflow/upp_post.yaml
@@ -0,0 +1,55 @@
+default_upp_post_task: &default_upp_post
+ account: '&ACCOUNT;'
+ attrs:
+ cycledefs: forecast
+ maxtries: '2'
+ envars: &default_vars
+ GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;'
+ HOMEdir: '&HOMEdir;'
+ envir: '&envir;'
+ model_ver: '&model_ver;'
+ KEEPDATA: '&KEEPDATA;'
+ SENDCOM: '&SENDCOM;'
+ COMROOT: '&COMROOT;'
+ DATAROOT: '&DATAROOT;'
+ DCOMROOT: '&DCOMROOT;'
+ LOGDIR: !cycstr "&LOGDIR;"
+ PDY: !cycstr "@Y@m@d"
+ cyc: !cycstr "@H"
+ nprocs: '{{ parent.nnodes * parent.ppn // 1 }}'
+ subcyc: !cycstr "@M"
+ SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
+ ENSMEM_INDX: '#mem#'
+ native: '{{ platform.SCHED_NATIVE_CMD }}'
+ nnodes: 1
+ nodes: '{{ nnodes }}:ppn={{ ppn }}'
+ partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}'
+ ppn: 1
+ queue: '&QUEUE_DEFAULT;'
+ walltime: 00:30:00
+
+metatask_post:
+ var:
+ mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}'
+ metatask_post_mem#mem#_all_fhrs:
+ var:
+ fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}'
+ cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_CYCL|min %}forecast {% else %}long_forecast {% endif %}{% endfor %}'
+ task_post_mem#mem#_f#fhr#:
+ <<: *default_upp_post
+ attrs:
+ cycledefs: '#cycledef#'
+ maxtries: '2'
+ envars:
+ <<: *default_vars
+ fhr: '#fhr#'
+ command: '&LOAD_MODULES_RUN_TASK; "upp_post" "&HOMEdir;/jobs/JSRW_UPP_POST"'
+ join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
+ nnodes: 2
+ ppn: 24
+ memory: 64G
+ dependency:
+ taskdep:
+ attrs:
+ task: run_fcst_mem#mem#
+
diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh
index debf526798..90bf69ea65 100755
--- a/scripts/exregional_make_ics.sh
+++ b/scripts/exregional_make_ics.sh
@@ -86,7 +86,7 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_ics task_make_ics ; do
+for sect in user nco platform workflow global cpl_aqm_parm smoke_dust_parm constants task_get_extrn_ics task_make_ics ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
@@ -195,6 +195,7 @@ case "${CCPP_PHYS_SUITE}" in
"FV3_GFS_v17_p8" | \
"FV3_WoFS_v0" | \
"FV3_HRRR" | \
+ "FV3_HRRR_gf" | \
"FV3_RAP" )
if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \
[ "${EXTRN_MDL_NAME_ICS}" = "RRFS" ] || \
@@ -727,8 +728,9 @@ POST_STEP
#
#-----------------------------------------------------------------------
#
-if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] || [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later
+ mkdir -p ${COMOUT}
if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
data_trans_path="${COMOUT}"
else
@@ -737,7 +739,11 @@ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
cp -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
cp -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc"
cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
- cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc"
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc"
+ else
+ cp -p gfs.bndy.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc"
+ fi
else
mv out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc
mv out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc
diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh
index acbe97a56b..7be6a0f3e6 100755
--- a/scripts/exregional_make_lbcs.sh
+++ b/scripts/exregional_make_lbcs.sh
@@ -87,7 +87,7 @@
#
. $USHdir/source_util_funcs.sh
set -x
-for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_lbcs task_make_lbcs ; do
+for sect in user nco platform workflow global cpl_aqm_parm smoke_dust_parm constants task_get_extrn_lbcs task_make_lbcs ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
@@ -194,6 +194,7 @@ case "${CCPP_PHYS_SUITE}" in
"FV3_GFS_v17_p8" | \
"FV3_WoFS_v0" | \
"FV3_HRRR" | \
+ "FV3_HRRR_gf" | \
"FV3_RAP")
if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \
[ "${EXTRN_MDL_NAME_LBCS}" = "RRFS" ] || \
@@ -648,6 +649,10 @@ located in the following directory:
fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" )
if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
+ elif [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later
+ mkdir -p ${COMOUT}
+ cp -p gfs.bndy.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
else
mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
fi
diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh
index 34b1675d8c..f3d2e8508b 100755
--- a/scripts/exregional_make_orog.sh
+++ b/scripts/exregional_make_orog.sh
@@ -290,7 +290,7 @@ mv "${raw_orog_fp_orig}" "${raw_orog_fp}"
#
#-----------------------------------------------------------------------
#
-suites=( "FV3_RAP" "FV3_HRRR" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" )
+suites=( "FV3_RAP" "FV3_HRRR" "FV3_HRRR_gf" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" )
if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then
DATA="${DATA:-${OROG_DIR}/temp_orog_data}"
mkdir -p ${DATA}
diff --git a/scripts/exsrw_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh
index 4fd040e597..030983a0fa 100755
--- a/scripts/exsrw_aqm_ics.sh
+++ b/scripts/exsrw_aqm_ics.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
@@ -50,108 +50,119 @@ tory (either from disk or HPSS) the external model files from which ini-
tial or boundary condition files for the FV3 will be generated.
========================================================================"
#
-#-----------------------------------------------------------------------
-#
-# Check if restart file exists
-#
-#-----------------------------------------------------------------------
-#
-rst_dir="${PREV_CYCLE_DIR}/RESTART"
-rst_file="fv_tracer.res.tile1.nc"
-rst_file_with_date="${PDY}.${cyc}0000.${rst_file}"
-if [ -e "${rst_dir}/${rst_file_with_date}" ]; then
- fv_tracer_file="${rst_dir}/${rst_file_with_date}"
-elif [ -e "${rst_dir}/${rst_file}" ]; then
- fv_tracer_file="${rst_dir}/${rst_file}"
-else
- message_txt="Tracer restart file: \"${fv_tracer_file}\" is NOT found"
- err_exit "${message_txt}"
- print_err_msg_exit "${message_txt}"
-fi
-print_info_msg "Tracer restart file: \"${fv_tracer_file}\""
-
-cplr_file="coupler.res"
-cplr_file_with_date="${PDY}.${cyc}0000.${cplr_file}"
-if [ -e "${rst_dir}/${cplr_file_with_date}" ]; then
- coupler_file="${rst_dir}/${cplr_file_with_date}"
-elif [ -e "${rst_dir}/${cplr_file}" ]; then
- coupler_file="${rst_dir}/${cplr_file}"
+if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
+ echo "This step is skipped for the first cycle of COLDSTART."
else
- message_txt="Coupler file: \"${coupler_file}\" is NOT found"
- err_exit "${message_txt}"
- print_err_msg_exit "${message_txt}"
-fi
-print_info_msg "Coupler file: \"${coupler_file}\""
+ # Check if restart file exists
+ rst_file="fv_tracer.res.tile1.nc"
+ rst_file_with_date="${PDY}.${cyc}0000.${rst_file}"
-if [ -r ${coupler_file} ]; then
- rst_info=( $( tail -n 1 ${coupler_file} ) )
- # Remove leading zeros from ${rst_info[1]}
- month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}"
- # Remove leading zeros from ${rst_info[2]}
- day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}"
- # Format the date without leading zeros
- rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]})
- if [ "${rst_date}" = "${PDY}${cyc}" ]; then
- if [ -r ${fv_tracer_file} ]; then
- print_info_msg "Tracer restart file is for ${PDY}${cyc}"
- else
- message_txt="Tracer restart file \"${fv_tracer_file}\" is NOT readable."
- err_exit "${message_txt}"
- print_err_msg_exit "${message_txt}"
+ # Warm start
+ if [ $(boolify "${COLDSTART}") = "FALSE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
+ rst_dir="${WARMSTART_CYCLE_DIR}/RESTART"
+ else
+ # after the first cycle
+ CDATEprev=$($NDATE -${INCR_CYCL_FREQ} ${PDY}${cyc})
+ PDYprev=${CDATEprev:0:8}
+ cycprev=${CDATEprev:8:2}
+ COMINprev=${COMIN}/${RUN}.${PDYprev}/${cycprev}${SLASH_ENSMEM_SUBDIR}
+ if [ -e "${COMINprev}/RESTART/${rst_file_with_date}" ]; then
+ rst_dir="${COMINprev}/RESTART"
+ elif [ -e "${DATA_SHARE}/RESTART/${rst_file_with_date}" ]; then
+ rst_dir="${DATA_SHARE}/RESTART"
fi
+ fi
+ if [ -e "${rst_dir}/${rst_file_with_date}" ]; then
+ fv_tracer_file="${rst_dir}/${rst_file_with_date}"
+ elif [ -e "${rst_dir}/${rst_file}" ]; then
+ fv_tracer_file="${rst_dir}/${rst_file}"
+ else
+ message_txt="WARNING: Tracer restart file: \"${fv_tracer_file}\" is NOT found"
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+ print_info_msg "Tracer restart file: \"${fv_tracer_file}\""
+
+ cplr_file="coupler.res"
+ cplr_file_with_date="${PDY}.${cyc}0000.${cplr_file}"
+ if [ -e "${rst_dir}/${cplr_file_with_date}" ]; then
+ coupler_file="${rst_dir}/${cplr_file_with_date}"
+ elif [ -e "${rst_dir}/${cplr_file}" ]; then
+ coupler_file="${rst_dir}/${cplr_file}"
else
- message_txt="Tracer restart file is NOT for ${PDY}${cyc}.
+ message_txt="WARNING: Coupler file: \"${coupler_file}\" is NOT found"
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+ print_info_msg "Coupler file: \"${coupler_file}\""
+
+ if [ -r ${coupler_file} ]; then
+ rst_info=( $( tail -n 1 ${coupler_file} ) )
+ # Remove leading zeros from ${rst_info[1]}
+ month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}"
+ # Remove leading zeros from ${rst_info[2]}
+ day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}"
+ # Format the date without leading zeros
+ rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]})
+ if [ "${rst_date}" = "${PDY}${cyc}" ]; then
+ if [ -r ${fv_tracer_file} ]; then
+ print_info_msg "Tracer restart file is for ${PDY}${cyc}"
+ else
+ message_txt="Tracer restart file \"${fv_tracer_file}\" is NOT readable."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+ else
+ message_txt="Tracer restart file is NOT for ${PDY}${cyc}.
Checking available restart date:
requested date: \"${PDY}${cyc}\"
available date: \"${rst_date}\""
- err_exit "${message_txt}"
- print_err_msg_exit "${message_txt}"
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
fi
-fi
-#
-#-----------------------------------------------------------------------
-#
-# Add air quality tracer variables from previous cycle's restart output
-# to atmosphere's initial condition file according to the steps below:
-#
-# a. Python script to manipulate the files (see comments inside for details)
-# b. Remove checksum attribute to prevent overflow
-# c. Rename reulting file as the expected atmospheric IC file
-#
-#-----------------------------------------------------------------------
-#
-gfs_ic_fn="${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
-gfs_ic_fp="${DATA_SHARE}/${gfs_ic_fn}"
-wrk_ic_fp="${DATA}/gfs.nc"
+ #
+ #-----------------------------------------------------------------------
+ #
+ # Add air quality tracer variables from previous cycle's restart output
+ # to atmosphere's initial condition file according to the steps below:
+ #
+ # a. Python script to manipulate the files (see comments inside for details)
+ # b. Remove checksum attribute to prevent overflow
+ # c. Rename reulting file as the expected atmospheric IC file
+ #
+ #-----------------------------------------------------------------------
+ #
+ gfs_ic_fn="${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ gfs_ic_fp="${DATA_SHARE}/${gfs_ic_fn}"
+ wrk_ic_fp="${DATA}/gfs.nc"
-print_info_msg "
+ print_info_msg "
Adding air quality tracers to atmospheric initial condition file:
tracer file: \"${fv_tracer_file}\"
FV3 IC file: \"${gfs_ic_fp}\""
-cp -p ${gfs_ic_fp} ${wrk_ic_fp}
-${USHsrw}/aqm_utils_python/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_fp}"
-export err=$?
-if [ $err -ne 0 ]; then
- message_txt="Call to python script \"add_aqm_ics.py\" failed."
- err_exit "${message_txt}"
- print_err_msg_exit "${message_txt}"
-fi
-
-ncatted -a checksum,,d,s, tmp1.nc
-export err=$?
-if [ $err -ne 0 ]; then
- message_txt="Call to NCATTED returned with nonzero exit code."
- err_exit "${message_txt}"
- print_err_msg_exit "${message_txt}"
-fi
+ cp -p ${gfs_ic_fp} ${wrk_ic_fp}
+ ${USHsrw}/aqm_utils_python/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_fp}"
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to python script \"add_aqm_ics.py\" failed."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
-mv tmp1.nc ${gfs_ic_fn}
+ ncatted -a checksum,,d,s, tmp1.nc
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to NCATTED returned with nonzero exit code."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
-cp -p ${gfs_ic_fn} ${COMOUT}
+ mv tmp1.nc ${gfs_ic_fn}
-unset fv_tracer_file
-unset wrk_ic_file
+ cp -p ${gfs_ic_fn} ${COMOUT}
+fi
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh
index 7b3058ef34..416413abcc 100755
--- a/scripts/exsrw_aqm_lbcs.sh
+++ b/scripts/exsrw_aqm_lbcs.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_get_extrn_lbcs task_make_lbcs task_make_orog ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
@@ -221,7 +221,7 @@ EOF
export pgm="gefs2lbc_para"
. prep_step
- eval ${RUN_CMD_AQMLBC} ${EXECdir}/$pgm >>$pgmout 2>errfile
+ eval ${RUN_CMD_AQMLBC} ${EXECsrw}/$pgm >>$pgmout 2>errfile
export err=$?; err_chk
print_info_msg "
diff --git a/scripts/exsrw_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh
index 343e7e6f2b..f09626dfbb 100755
--- a/scripts/exsrw_bias_correction_o3.sh
+++ b/scripts/exsrw_bias_correction_o3.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_bias_correction_o3 ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
diff --git a/scripts/exsrw_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh
index 70cf512589..fbde40ecde 100755
--- a/scripts/exsrw_bias_correction_pm25.sh
+++ b/scripts/exsrw_bias_correction_pm25.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_bias_correction_pm25 ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh
index 3ae78422f5..f92d8a43e8 100755
--- a/scripts/exsrw_fire_emission.sh
+++ b/scripts/exsrw_fire_emission.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
diff --git a/scripts/exsrw_forecast.sh b/scripts/exsrw_forecast.sh
new file mode 100755
index 0000000000..ffd889257a
--- /dev/null
+++ b/scripts/exsrw_forecast.sh
@@ -0,0 +1,789 @@
+#!/usr/bin/env bash
+
+
+#
+#-----------------------------------------------------------------------
+#
+# This ex-script is responsible for running the FV3 regional forecast.
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# COMOUT
+# COMROOT
+# DATA
+# DBNROOT
+# GLOBAL_VAR_DEFNS_FP
+# INPUT_DATA
+# NET
+# PDY
+# REDIRECT_OUT_ERR
+# RUN
+# SENDDBN
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# MACHINE
+# PARMdir
+# RUN_ENVIR
+# USHdir
+#
+# platform:
+# PRE_TASK_CMDS
+# RUN_CMD_FCST
+#
+# workflow:
+# CCPP_PHYS_DIR
+# CCPP_PHYS_SUITE
+# COLDSTART
+# CRES
+# DATA_TABLE_FN
+# DATA_TABLE_FP
+# DATE_FIRST_CYCL
+# DOT_OR_USCORE
+# EXPTDIR
+# FCST_LEN_CYCL
+# FCST_LEN_HRS
+# FIELD_DICT_FP
+# FIELD_DICT_FN
+# FIELD_TABLE_FN
+# FIELD_TABLE_FP
+# FIXam
+# FIXclim
+# FIXlam
+# FV3_NML_FN
+# FV3_NML_FP
+# FV3_NML_STOCH_FP
+# INCR_CYCL_FREQ
+# PREDEF_GRID_NAME
+# SYMLINK_FIX_FILES
+# VERBOSE
+#
+# task_get_extrn_lbcs:
+# LBC_SPEC_INTVL_HRS
+#
+# task_run_fcst:
+# DO_FCST_RESTART
+# DT_ATMOS
+# FV3_EXEC_FP
+# KMP_AFFINITY_RUN_FCST
+# OMP_NUM_THREADS_RUN_FCST
+# OMP_STACKSIZE_RUN_FCST
+# PRINT_ESMF
+# RESTART_INTERVAL
+# USE_MERRA_CLIMO
+# WRITE_DOPOST
+#
+# task_run_post:
+# CUSTOM_POST_CONFIG_FP
+# DT_SUBHOURLY_POST_MNTS
+# POST_OUTPUT_DOMAIN_NAME
+# SUB_HOURLY_POST
+# USE_CUSTOM_POST_CONFIG_FILE
+#
+# global:
+# DO_ENSEMBLE
+# DO_LSM_SPP
+# DO_SHUM
+# DO_SKEB
+# DO_SPP
+# DO_SPPT
+#
+# cpl_aqm_parm:
+# AQM_RC_PRODUCT_FN
+# CPL_AQM
+#
+# constants:
+# NH0
+# NH3
+# NH4
+# TILE_RGNL
+#
+# fixed_files:
+# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global cpl_aqm_parm smoke_dust_parm constants fixed_files \
+ task_get_extrn_lbcs task_run_fcst task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that runs a forecast with FV3 for the
+specified cycle.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Set environment variables.
+#
+#-----------------------------------------------------------------------
+#
+export KMP_AFFINITY=${KMP_AFFINITY_RUN_FCST}
+export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST}
+export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST}
+export MPI_TYPE_DEPTH=20
+export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4
+if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then
+ export ESMF_RUNTIME_PROFILE=ON
+ export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Load modules.
+#
+#-----------------------------------------------------------------------
+#
+eval ${PRE_TASK_CMDS}
+
+if [ -z "${RUN_CMD_FCST:-}" ] ; then
+ print_err_msg_exit "\
+ Run command was not set in machine file. \
+ Please set RUN_CMD_FCST for your platform"
+else
+ print_info_msg "$VERBOSE" "
+ All executables will be submitted with command \'${RUN_CMD_FCST}\'."
+fi
+
+if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
+ cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} ))
+ CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} ))
+ FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]}
+fi
+# Set CDATE
+export CDATE="${PDY}${cyc}"
+# Create RESTART directory in the working directory DATA
+mkdir -p ${DATA}/RESTART
+#
+#-----------------------------------------------------------------------
+#
+# Create and set up INPUT subdirectory of the current working directory
+# to run the ufs weather model
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "$VERBOSE" "
+Creating links with names that FV3 looks for in the INPUT subdirectory
+of the current working directory (DATA), where
+ DATA = \"${DATA}\"
+..."
+
+# Create and set up INPUT directory for ufs-weather-model.
+mkdir -p ${DATA}/INPUT
+cd ${DATA}/INPUT
+
+# Symlink to mosaic file with a completely different name.
+target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH3}.nc"
+symlink="grid_spec.nc"
+ln -nsf $target $symlink
+
+# Symlink to halo-3 grid file with "halo3" stripped from name.
+mosaic_fn="grid_spec.nc"
+grid_fn=$( get_charvar_from_netcdf "${mosaic_fn}" "gridfiles" )
+target="${FIXlam}/${grid_fn}"
+symlink="${grid_fn}"
+ln -nsf $target $symlink
+
+# Symlink to halo-4 grid file with "${CRES}_" stripped from name.
+target="${FIXlam}/${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH4}.nc"
+symlink="grid.tile${TILE_RGNL}.halo${NH4}.nc"
+ln -nsf $target $symlink
+
+# Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name.
+target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH0}.nc"
+symlink="oro_data.nc"
+ln -nsf $target $symlink
+
+# Symlink to halo-4 orography file with "${CRES}_" stripped from name.
+target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc"
+symlink="oro_data.tile${TILE_RGNL}.halo${NH4}.nc"
+ln -nsf $target $symlink
+#
+# When using some specific physics suite, there are two files (that contain
+# statistics of the orography) that are needed by the gravity wave drag
+# parameterization in that suite.
+#
+suites=( "FV3_RAP" "FV3_HRRR" "FV3_HRRR_gf" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" )
+if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then
+ file_ids=( "ss" "ls" )
+ for file_id in "${file_ids[@]}"; do
+ target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data_${file_id}.tile${TILE_RGNL}.halo${NH0}.nc"
+ symlink="oro_data_${file_id}.nc"
+ ln -nsf $target $symlink
+ done
+fi
+
+target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+symlink="gfs_data.nc"
+ln -nsf $target $symlink
+
+target="${COMIN}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc"
+symlink="sfc_data.nc"
+ln -nsf $target $symlink
+
+target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
+symlink="gfs_ctrl.nc"
+ln -nsf $target $symlink
+
+for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc"
+ symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc"
+ ln -nsf $target $symlink
+done
+
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc"
+ symlink="NEXUS_Expt.nc"
+ ln -nsf $target $symlink
+
+ # create symlink to PT for point source in SRW-AQM
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.PT.nc"
+ if [ -f ${target} ]; then
+ symlink="PT.nc"
+ ln -nsf $target $symlink
+ fi
+fi
+if [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ ln -nsf ${FIXsmoke}/${PREDEF_GRID_NAME}/dust12m_data.nc .
+ ln -nsf ${FIXsmoke}/${PREDEF_GRID_NAME}/emi_data.nc .
+
+ smokefile="${COMIN}/${SMOKE_DUST_FILE_PREFIX}_${PDY}${cyc}00.nc"
+ if [ -f ${smokefile} ]; then
+ ln -nsf ${smokefile} ${SMOKE_DUST_FILE_PREFIX}.nc
+ else
+ if [ "${EBB_DCYCLE}" = "1" ]; then
+ ln -nsf ${FIXsmoke}/${PREDEF_GRID_NAME}/dummy_24hr_smoke_ebbdc1.nc ${SMOKE_DUST_FILE_PREFIX}.nc
+ echo "WARNING: Smoke file is not available, use dummy_24hr_smoke_ebbdc1.nc instead"
+ else
+ ln -nsf ${FIXsmoke}/${PREDEF_GRID_NAME}/dummy_24hr_smoke.nc ${SMOKE_DUST_FILE_PREFIX}.nc
+ echo "WARNING: Smoke file is not available, use dummy_24hr_smoke.nc instead"
+ fi
+ fi
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create links in the current working directory to fixed (i.e. static) files
+# in the FIXam directory. These links have names that are set to the
+# names of files that the forecast model expects to exist in the current
+# working directory when the forecast model executable is called.
+#
+#-----------------------------------------------------------------------
+#
+cd ${DATA}
+
+print_info_msg "$VERBOSE" "
+Creating links in the current run directory (DATA) to fixed (i.e.
+static) files in the FIXam directory:
+ FIXam = \"${FIXam}\"
+ DATA = \"${DATA}\""
+
+regex_search="^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$"
+num_symlinks=${#CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[@]}
+for (( i=0; i<${num_symlinks}; i++ )); do
+
+ mapping="${CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[$i]}"
+ symlink=$( printf "%s\n" "$mapping" | \
+ $SED -n -r -e "s/${regex_search}/\1/p" )
+ target=$( printf "%s\n" "$mapping" | \
+ $SED -n -r -e "s/${regex_search}/\2/p" )
+
+ symlink="${DATA}/$symlink"
+ target="$FIXam/$target"
+ ln -nsf $target $symlink
+
+done
+#
+#-----------------------------------------------------------------------
+#
+# Create links in the current working directory to the MERRA2 aerosol
+# climatology data files and lookup table for optics properties.
+#
+#-----------------------------------------------------------------------
+#
+if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then
+ for f_nm_path in ${FIXclim}/*; do
+ f_nm=$( basename "${f_nm_path}" )
+ pre_f="${f_nm%%.*}"
+
+ if [ "${pre_f}" = "merra2" ]; then
+ mnth=$( printf "%s\n" "${f_nm}" | grep -o -P '(?<=2014.m).*(?=.nc)' )
+ symlink="${DATA}/aeroclim.m${mnth}.nc"
+ else
+ symlink="${DATA}/${pre_f}.dat"
+ fi
+ target="${f_nm_path}"
+ ln -nsf $target $symlink
+ done
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create links in the current working directory to cycle-independent (and
+# ensemble-member-independent) model input files in the main experiment
+# directory.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "$VERBOSE" "
+Creating links in the current run directory to cycle-independent model
+input files in the main experiment directory..."
+
+ln -nsf ${DATA_TABLE_FP} ${DATA}/${DATA_TABLE_FN}
+
+ln -nsf ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN}
+
+ln -nsf ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN}
+
+if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then
+ cp -p ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
+ if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then
+ post_config_fp="${CUSTOM_POST_CONFIG_FP}"
+ print_info_msg "
+====================================================================
+ CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\"
+===================================================================="
+ else
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt"
+ else
+ post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt"
+ fi
+ print_info_msg "
+====================================================================
+ post_config_fp = \"${post_config_fp}\"
+===================================================================="
+ fi
+ cp -p ${post_config_fp} ./postxconfig-NT_FH00.txt
+ cp -p ${post_config_fp} ./postxconfig-NT.txt
+ cp -p ${PARMdir}/upp/params_grib2_tbl_new .
+ # Set itag for inline-post:
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ post_itag_add="aqf_on=.true.,"
+ else
+ post_itag_add=""
+ fi
+cat > itag <=0; ih_rst-- )); do
+ cdate_restart_hr=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC + ${restart_hrs[ih_rst]} hours" "+%Y%m%d%H" )
+ rst_yyyymmdd="${cdate_restart_hr:0:8}"
+ rst_hh="${cdate_restart_hr:8:2}"
+
+ num_rst_files=0
+ for file_id in "${file_ids[@]}"; do
+ if [ -e "${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" ]; then
+ (( num_rst_files=num_rst_files+1 ))
+ fi
+ done
+ if [ "${num_rst_files}" = "${num_file_ids}" ]; then
+ FHROT="${restart_hrs[ih_rst]}"
+ break
+ fi
+ done
+
+ # Create soft-link of restart files in INPUT directory
+ cd ${DATA}/INPUT
+ for file_id in "${file_ids[@]}"; do
+ rm "${file_id}"
+ target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}"
+ symlink="${file_id}"
+ ln -nsf $target $symlink
+ done
+ cd ${DATA}
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Setup air quality model cold/warm start
+#
+#-----------------------------------------------------------------------
+#
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ if [ $(boolify "${COLDSTART}") = "TRUE" ] && \
+ [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \
+ [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then
+ init_concentrations="true"
+ else
+ init_concentrations="false"
+ fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the function that creates the aqm.rc file within each
+# cycle directory.
+#
+#-----------------------------------------------------------------------
+#
+ ${USHsrw}/create_aqm_rc_file.py \
+ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \
+ --cdate "$CDATE" \
+ --run-dir "${DATA}" \
+ --init_concentrations "${init_concentrations}"
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to function to create an aqm.rc file for the current
+cycle's (cdate) run directory (DATA) failed:
+ cdate = \"${CDATE}\"
+ DATA = \"${DATA}\""
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the function that creates the model configuration file within each
+# cycle directory.
+#
+#-----------------------------------------------------------------------
+#
+${USHsrw}/create_model_configure_file.py \
+ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \
+ --cdate "$CDATE" \
+ --fcst_len_hrs "${FCST_LEN_HRS}" \
+ --fhrot "${FHROT}" \
+ --run-dir "${DATA}" \
+ --sub-hourly-post "${SUB_HOURLY_POST}" \
+ --dt-subhourly-post-mnts "${DT_SUBHOURLY_POST_MNTS}" \
+ --dt-atmos "${DT_ATMOS}"
+export err=$?
+if [ $err -ne 0 ]; then
+ message_txt="Call to function to create a model configuration file
+for the current cycle's (cdate) run directory (DATA) failed:
+ cdate = \"${CDATE}\"
+ DATA = \"${DATA}\""
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the function that creates the diag_table file within each cycle
+# directory.
+#
+#-----------------------------------------------------------------------
+#
+${USHsrw}/create_diag_table_file.py \
+ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \
+ --run-dir "${DATA}"
+export err=$?
+if [ $err -ne 0 ]; then
+ message_txt="Call to function to create a diag table file for the current
+cycle's (cdate) run directory (DATA) failed:
+ DATA = \"${DATA}\""
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Pre-generate symlink to forecast RESTART in DATA for early start of
+# the next cycle in SRW-AQM
+#
+#-----------------------------------------------------------------------
+#
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] || [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ ln -nsf "${DATA}/RESTART" "${DATA_SHARE}/RESTART"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the function that creates the NEMS configuration file within each
+# cycle directory.
+#
+#-----------------------------------------------------------------------
+#
+${USHsrw}/create_ufs_configure_file.py \
+ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \
+ --run-dir "${DATA}"
+export err=$?
+if [ $err -ne 0 ]; then
+ message_txt="Call to function to create a NEMS configuration file for
+the current cycle's (cdate) run directory (DATA) failed:
+ DATA = \"${DATA}\""
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Run the FV3-LAM model.
+#
+#-----------------------------------------------------------------------
+#
+export pgm="${FV3_EXEC_FN}"
+
+. prep_step
+eval ${RUN_CMD_FCST} ${EXECsrw}/$pgm >>$pgmout 2>errfile
+export err=$?; err_chk
+#
+#-----------------------------------------------------------------------
+#
+# Copy RESTART directory to COMOUT.
+# Move dyn and phy files to COMOUT.
+# Copy AQM output product file to COMOUT only for AQM.
+#
+#-----------------------------------------------------------------------
+#
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] || [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ cp -rp RESTART ${COMOUT}
+
+ fhr=0
+ while [ $fhr -le ${FCST_LEN_HRS} ]; do
+ fhr_ct=$(printf "%03d" $fhr)
+ source_dyn="${DATA}/dynf${fhr_ct}.nc"
+ source_phy="${DATA}/phyf${fhr_ct}.nc"
+ target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.${POST_OUTPUT_DOMAIN_NAME}.nc"
+ target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.${POST_OUTPUT_DOMAIN_NAME}.nc"
+ [ -f ${source_dyn} ] && mv ${source_dyn} ${target_dyn}
+ [ -f ${source_phy} ] && mv ${source_phy} ${target_phy}
+ (( fhr=fhr+1 ))
+ done
+
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN}
+ fi
+fi
+#
+#-----------------------------------------------------------------------
+#
+# If doing inline post, create the directory in which the post-processing
+# output will be stored (postprd_dir).
+#
+#-----------------------------------------------------------------------
+#
+if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then
+
+ yyyymmdd=${PDY}
+ hh=${cyc}
+ fmn="00"
+
+ if [ "${RUN_ENVIR}" != "nco" ]; then
+ export COMOUT="${DATA}/postprd"
+ fi
+ mkdir -p "${COMOUT}"
+
+ cd ${COMOUT}
+
+ for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do
+
+ if [ ${fhr:0:1} = "0" ]; then
+ fhr_d=${fhr:1:2}
+ else
+ fhr_d=${fhr}
+ fi
+
+ post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr_d} hours + ${fmn} minutes" "+%Y%m%d%H%M" )
+ post_mn=${post_time:10:2}
+ post_mn_or_null=""
+ post_fn_suffix="GrbF${fhr_d}"
+ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2"
+
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ fids=( "cmaq" )
+ else
+ fids=( "prslev" "natlev" )
+ fi
+
+ for fid in "${fids[@]}"; do
+ FID=$(echo_uppercase $fid)
+ post_orig_fn="${FID}.${post_fn_suffix}"
+ post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}"
+
+ mv ${DATA}/${post_orig_fn} ${post_renamed_fn}
+ if [ $RUN_ENVIR != "nco" ]; then
+ basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M )
+ symlink_suffix="_${basetime}f${fhr}${post_mn}"
+ ln -nsf ${post_renamed_fn} ${FID}${symlink_suffix}
+ fi
+ # DBN alert
+ if [ "$SENDDBN" = "YES" ]; then
+ $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn}
+ fi
+ done
+
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.nc
+ mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.nc
+ fi
+ done
+
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+FV3 forecast completed successfully!!!
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/scripts/exsrw_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh
index 0fa8c48754..92891ca163 100755
--- a/scripts/exsrw_nexus_emission.sh
+++ b/scripts/exsrw_nexus_emission.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_nexus_emission ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
@@ -315,7 +315,7 @@ fi
export pgm="nexus"
. prep_step
-eval ${RUN_CMD_NEXUS} ${EXECdir}/$pgm -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc >>$pgmout 2>${DATA}/errfile
+eval ${RUN_CMD_NEXUS} ${EXECsrw}/$pgm -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc >>$pgmout 2>${DATA}/errfile
export err=$?; err_chk
if [ $err -ne 0 ]; then
print_err_msg_exit "Call to execute nexus failed."
diff --git a/scripts/exsrw_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh
index cadc27b89c..77e6a072e1 100755
--- a/scripts/exsrw_nexus_gfs_sfc.sh
+++ b/scripts/exsrw_nexus_gfs_sfc.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
diff --git a/scripts/exsrw_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh
index 151e0a2ea5..a49ed4399e 100755
--- a/scripts/exsrw_nexus_post_split.sh
+++ b/scripts/exsrw_nexus_post_split.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
diff --git a/scripts/exsrw_point_source.sh b/scripts/exsrw_point_source.sh
index 4cd693506c..caafbec5ef 100755
--- a/scripts/exsrw_point_source.sh
+++ b/scripts/exsrw_point_source.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_point_source task_run_fcst ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
diff --git a/scripts/exsrw_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh
index dfcdd24ffa..edf44e15c9 100755
--- a/scripts/exsrw_post_stat_o3.sh
+++ b/scripts/exsrw_post_stat_o3.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
diff --git a/scripts/exsrw_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh
index bdbf1fcbc5..7160f9caea 100755
--- a/scripts/exsrw_post_stat_pm25.sh
+++ b/scripts/exsrw_post_stat_pm25.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
diff --git a/scripts/exsrw_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh
index f6ec6a9a7d..1290c30bcd 100755
--- a/scripts/exsrw_pre_post_stat.sh
+++ b/scripts/exsrw_pre_post_stat.sh
@@ -8,7 +8,7 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+for sect in user nco platform workflow global verification cpl_aqm_parm \
constants fixed_files grid_params \
task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
diff --git a/scripts/exsrw_prepstart.sh b/scripts/exsrw_prepstart.sh
new file mode 100755
index 0000000000..c454989628
--- /dev/null
+++ b/scripts/exsrw_prepstart.sh
@@ -0,0 +1,147 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global smoke_dust_parm \
+ constants fixed_files grid_params task_run_fcst ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that runs prepstart.
+========================================================================"
+
+#-----------------------------------------------------------------------
+#
+# update IC files
+#
+#-----------------------------------------------------------------------
+if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
+ echo "This step is skipped for the first cycle of COLDSTART."
+else
+ eval ${PRE_TASK_CMDS}
+ if [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ # IC gfs data file: gfs_data.tile7.halo0.nc
+ gfs_ic_fn="${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ gfs_ic_fp="${DATA_SHARE}/${gfs_ic_fn}"
+ gfs_ic_mod_fn="gfs_data.tile7.halo0.nc"
+ cp -p ${gfs_ic_fp} ${gfs_ic_mod_fn}
+
+ # restart tracer file: fv_tracer.res.tile1.nc
+ bkpath_find="missing"
+ if [ "${bkpath_find}" = "missing" ]; then
+ restart_prefix="${PDY}.${cyc}0000."
+ CDATEprev=$($NDATE -${INCR_CYCL_FREQ} ${PDY}${cyc})
+ PDYprev=${CDATEprev:0:8}
+ cycprev=${CDATEprev:8:2}
+ path_restart=${COMIN}/${RUN}.${PDYprev}/${cycprev}${SLASH_ENSMEM_SUBDIR}/RESTART
+
+ n=${INCR_CYCL_FREQ}
+ while [[ $n -le 25 ]] ; do
+ if [ "${IO_LAYOUT_Y}" = "1" ]; then
+ checkfile=${path_restart}/${restart_prefix}fv_tracer.res.tile1.nc
+ else
+ checkfile=${path_restart}/${restart_prefix}fv_tracer.res.tile1.nc.0000
+ fi
+ if [ -r "${checkfile}" ] && [ "${bkpath_find}" = "missing" ]; then
+ bkpath_find=${path_restart}
+ print_info_msg "$VERBOSE" "Found ${checkfile}; Use it for smoke/dust cycle "
+ break
+ fi
+ n=$((n + ${INCR_CYCL_FREQ}))
+ CDATEprev=$($NDATE -$n ${PDY}${cyc})
+ PDYprev=${CDATEprev:0:8}
+ cycprev=${CDATEprev:8:2}
+ path_restart=${COMIN}/${RUN}.${PDYprev}/${cycprev}${SLASH_ENSMEM_SUBDIR}/RESTART
+ print_info_msg "$VERBOSE" "Trying this path: ${path_restart}"
+ done
+ fi
+
+ # cycle smoke/dust
+ if [ "${bkpath_find}" = "missing" ]; then
+ print_info_msg "WARNING: cannot find smoke/dust files from previous cycle"
+ else
+ if [ "${IO_LAYOUT_Y}" = "1" ]; then
+ checkfile=${bkpath_find}/${restart_prefix}fv_tracer.res.tile1.nc
+ if [ -r "${checkfile}" ]; then
+ ncks -A -v smoke,dust,coarsepm ${checkfile} fv_tracer.res.tile1.nc
+ fi
+ else
+ for ii in ${list_iolayout}
+ do
+ iii=$(printf %4.4i $ii)
+ checkfile=${bkpath_find}/${restart_prefix}fv_tracer.res.tile1.nc.${iii}
+ if [ -r "${checkfile}" ]; then
+ ncks -A -v smoke,dust,coarsepm ${checkfile} fv_tracer.res.tile1.nc.${iii}
+ fi
+ done
+ fi
+ echo "${PDY}${cyc}: cycle smoke/dust from ${checkfile} "
+ fi
+
+ ${USHsrw}/add_smoke.py
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="add_smoke.py failed with return code $err"
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+ # copy output to COMOUT
+ cp -p ${gfs_ic_mod_fn} ${COMOUT}/${gfs_ic_fn}
+ fi
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+PREPSTART has successfully been complete !!!!
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exsrw_smoke_dust.sh b/scripts/exsrw_smoke_dust.sh
new file mode 100755
index 0000000000..9fff8fb743
--- /dev/null
+++ b/scripts/exsrw_smoke_dust.sh
@@ -0,0 +1,144 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global smoke_dust_parm \
+ constants fixed_files grid_params task_run_fcst ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that runs Smoke and Dust.
+========================================================================"
+#
+# Set CDATE used in the fire emission generation python script
+#
+export CDATE="${PDY}${cyc}"
+#
+# Check if the fire file exists in the designated directory
+#
+smokeFile="${SMOKE_DUST_FILE_PREFIX}_${CDATE}00.nc"
+if [ -e "${COMINsmoke}/${smokeFile}" ]; then
+ cp -p "${COMINsmoke}/${smokeFile}" ${COMOUT}
+else
+ eval ${PRE_TASK_CMDS}
+ #
+ # Link restart directory of the previous cycle in COMIN/COMOUT
+ #
+ CDATEprev=$($NDATE -${INCR_CYCL_FREQ} ${PDY}${cyc})
+ PDYprev=${CDATEprev:0:8}
+ cycprev=${CDATEprev:8:2}
+ path_restart=${COMIN}/${RUN}.${PDYprev}/${cycprev}${SLASH_ENSMEM_SUBDIR}/RESTART
+ ln -nsf ${path_restart} .
+
+ # Check whether the RAVE files need to be split into hourly files
+ if [ "${EBB_DCYCLE}" -eq 1 ]; then
+ ddhh_to_use="${PDY}${cyc}"
+ else
+ ddhh_to_use="${PDYm1}${cyc}"
+ fi
+ for hour in {00..23}; do
+ fire_hr_cdate=$($NDATE +${hour} ${ddhh_to_use})
+ fire_hr_pdy="${fire_hr_cdate:0:8}"
+ fire_hr_fn="Hourly_Emissions_3km_${fire_hr_cdate}00_${fire_hr_cdate}00.nc"
+ if [ -f "${COMINfire}/${fire_hr_fn}" ]; then
+ echo "Hourly emission file for $hour was found: ${fire_hr_fn}"
+ ln -nsf ${COMINfire}/${fire_hr_fn} .
+ else
+ # Check various version of RAVE raw data files (new and old)
+ rave_raw_fn1="RAVE-HrlyEmiss-3km_v2r0_blend_s${fire_hr_cdate}00000_e${fire_hr_pdy}23*"
+ rave_raw_fn2="Hourly_Emissions_3km_${fire_hr_cdate}00_${fire_hr_pdy}23*"
+ # Find files matching the specified patterns
+ files_found=$(find "${COMINfire}" -type f \( -name "${rave_raw_fn1##*/}" -o -name "${rave_raw_fn2##*/}" \))
+ # Splitting 24-hour RAVE raw data into houly data
+ for file_to_use in $files_found; do
+ echo "Using file: $file_to_use"
+ echo "Splitting data for hour $hour..."
+ ncks -d time,$hour,$hour "${COMINfire}/${file_to_use}" "${DATA}/${fire_hr_fn}"
+ if [ -f "${DATA}/${fire_hr_fn}" ]; then
+ break
+ else
+ echo "WARNING: Hourly emission file for $hour was NOT created from ${file_to_use}."
+ fi
+ done
+ fi
+ done
+ #
+ #-----------------------------------------------------------------------
+ #
+ # Call python script to generate fire emission files.
+ #
+ #-----------------------------------------------------------------------
+ #
+ ${USHsrw}/generate_fire_emissions.py \
+ "${FIXsmoke}/${PREDEF_GRID_NAME}" \
+ "${DATA}" \
+ "${DATA_SHARE}" \
+ "${PREDEF_GRID_NAME}" \
+ "${EBB_DCYCLE}" \
+ "${RESTART_INTERVAL}"
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="generate_fire_emissions.py failed with return code $err"
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+
+ # Copy Smoke file to COMOUT
+ cp -p ${DATA}/${smokeFile} ${COMOUT}
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Smoke and Dust has successfully generated output files !!!!
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exsrw_upp_post.sh b/scripts/exsrw_upp_post.sh
new file mode 100755
index 0000000000..902516bc52
--- /dev/null
+++ b/scripts/exsrw_upp_post.sh
@@ -0,0 +1,357 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${USHsrw}/source_util_funcs.sh
+for sect in user nco platform workflow global cpl_aqm_parm smoke_dust_parm \
+ task_run_fcst task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that runs Smoke and Dust.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Set OpenMP variables.
+#
+#-----------------------------------------------------------------------
+#
+export KMP_AFFINITY=${KMP_AFFINITY_RUN_POST}
+export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_POST}
+export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_POST}
+#
+#-----------------------------------------------------------------------
+#
+# Load modules.
+#
+#-----------------------------------------------------------------------
+#
+eval ${PRE_TASK_CMDS}
+
+if [ -z "${RUN_CMD_POST:-}" ] ; then
+ print_err_msg_exit "\
+ Run command was not set in machine file. \
+ Please set RUN_CMD_POST for your platform"
+else
+ print_info_msg "$VERBOSE" "
+ All executables will be submitted with command \'${RUN_CMD_POST}\'."
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Stage necessary files in the working directory.
+#
+#-----------------------------------------------------------------------
+#
+cp ${PARMsrw}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
+if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then
+ post_config_fp="${CUSTOM_POST_CONFIG_FP}"
+ print_info_msg "
+====================================================================
+Copying the user-defined file specified by CUSTOM_POST_CONFIG_FP:
+ CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\"
+===================================================================="
+else
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ post_config_fp="${PARMsrw}/upp/postxconfig-NT-AQM.txt"
+ else
+ post_config_fp="${PARMsrw}/upp/postxconfig-NT-rrfs.txt"
+ fi
+ print_info_msg "
+====================================================================
+Copying the default post flat file specified by post_config_fp:
+ post_config_fp = \"${post_config_fp}\"
+===================================================================="
+fi
+cp ${post_config_fp} ./postxconfig-NT.txt
+cp ${PARMsrw}/upp/params_grib2_tbl_new .
+
+if [ $(boolify ${DO_SMOKE_DUST}) = "TRUE" ] || [ $(boolify ${USE_CRTM}) = "TRUE" ]; then
+ ln -nsf ${FIXcrtm}/Nalli.IRwater.EmisCoeff.bin .
+ ln -nsf ${FIXcrtm}/FAST*.bin .
+ ln -nsf ${FIXcrtm}/NPOESS.IRland.EmisCoeff.bin .
+ ln -nsf ${FIXcrtm}/NPOESS.IRsnow.EmisCoeff.bin .
+ ln -nsf ${FIXcrtm}/NPOESS.IRice.EmisCoeff.bin .
+ ln -nsf ${FIXcrtm}/AerosolCoeff.bin .
+ ln -nsf ${FIXcrtm}/CloudCoeff.bin .
+ ln -nsf ${FIXcrtm}/*.SpcCoeff.bin .
+ ln -nsf ${FIXcrtm}/*.TauCoeff.bin .
+ print_info_msg "
+====================================================================
+Copying the CRTM fix files from FIXcrtm:
+ FIXcrtm = \"${FIXcrtm}\"
+===================================================================="
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Get the cycle date and hour (in formats of yyyymmdd and hh, respectively)
+# from CDATE.
+#
+#-----------------------------------------------------------------------
+#
+yyyymmdd=${PDY}
+hh=${cyc}
+#
+#-----------------------------------------------------------------------
+#
+# Create the namelist file (itag) containing arguments to pass to the post-
+# processor's executable.
+#
+#-----------------------------------------------------------------------
+#
+# Set the variable (mnts_secs_str) that determines the suffix in the names
+# of the forecast model's write-component output files that specifies the
+# minutes and seconds of the corresponding output forecast time.
+#
+# Note that if the forecast model is instructed to output at some hourly
+# interval (via the output_fh parameter in the MODEL_CONFIG_FN file,
+# with nsout set to a non-positive value), then the write-component
+# output file names will not contain any suffix for the minutes and seconds.
+# For this reason, when SUB_HOURLY_POST is not set to "TRUE", mnts_sec_str
+# must be set to a null string.
+#
+mnts_secs_str=""
+if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then
+ if [ ${fhr}${fmn} = "00000" ]; then
+ mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" )
+ else
+ mnts_secs_str=":${fmn}:00"
+ fi
+fi
+#
+# Set namelist of upp.
+#
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] || [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ dyn_file="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.nc"
+ phy_file="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.nc"
+else
+ dyn_file="${COMIN}/dynf${fhr}${mnts_secs_str}.nc"
+ phy_file="${COMIN}/phyf${fhr}${mnts_secs_str}.nc"
+fi
+
+post_time=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr} hours + ${fmn} minutes" "+%Y%m%d%H%M" )
+post_yyyy=${post_time:0:4}
+post_mm=${post_time:4:2}
+post_dd=${post_time:6:2}
+post_hh=${post_time:8:2}
+post_mn=${post_time:10:2}
+
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ $(boolify "${DO_SMOKE_DUST}") = "FALSE" ]; then
+ post_itag_add="aqf_on=.true.,"
+elif [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ post_itag_add="slrutah_on=.true.,gtg_on=.true."
+else
+ post_itag_add=""
+fi
+cat > itag <>$pgmout 2>errfile
+export err=$?; err_chk
+if [ $err -ne 0 ]; then
+ message_txt="upp.x failed with return code $err"
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# A separate ${post_fhr} forecast hour variable is required for the post
+# files, since they may or may not be three digits long, depending on the
+# length of the forecast.
+#
+# A separate ${subh_fhr} is needed for subhour post.
+#-----------------------------------------------------------------------
+#
+# get the length of the fhr string to decide format of forecast time stamp.
+# 9 is sub-houry forecast and 3 is full hour forecast only.
+len_fhr=${#fhr}
+if [ ${len_fhr} -eq 9 ]; then
+ post_min=${fhr:4:2}
+ if [ ${post_min} -lt ${nsout_min} ]; then
+ post_min=00
+ fi
+else
+ post_min=00
+fi
+
+subh_fhr=${fhr}
+if [ ${len_fhr} -eq 2 ]; then
+ post_fhr=${fhr}
+elif [ ${len_fhr} -eq 3 ]; then
+ if [ "${fhr:0:1}" = "0" ]; then
+ post_fhr="${fhr:1}"
+ else
+ post_fhr=${fhr}
+ fi
+elif [ ${len_fhr} -eq 9 ]; then
+ if [ "${fhr:0:1}" = "0" ]; then
+ if [ ${post_min} -eq 00 ]; then
+ post_fhr="${fhr:1:2}"
+ subh_fhr="${fhr:0:3}"
+ else
+ post_fhr="${fhr:1:2}.${fhr:4:2}"
+ fi
+ else
+ if [ ${post_min} -eq 00 ]; then
+ post_fhr="${fhr:0:3}"
+ subh_fhr="${fhr:0:3}"
+ else
+ post_fhr="${fhr:0:3}.${fhr:4:2}"
+ fi
+ fi
+else
+ err_exit "\
+The \${fhr} variable contains too few or too many characters:
+ fhr = \"$fhr\""
+fi
+
+# replace fhr with subh_fhr
+echo "fhr=${fhr} and subh_fhr=${subh_fhr}"
+fhr=${subh_fhr}
+
+if [ $(boolify "${DO_SMOKE_DUST}") = "TRUE" ]; then
+ bgdawp=${NET}.${cycle}.prslev.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
+ bgrd3d=${NET}.${cycle}.natlev.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
+ bgifi=${NET}.${cycle}.ififip.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
+ bgavi=${NET}.${cycle}.aviati.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
+
+ if [ -f "PRSLEV.GrbF${post_fhr}" ]; then
+ wgrib2 PRSLEV.GrbF${post_fhr} -set center 7 -grib ${bgdawp} >>$pgmout 2>>errfile
+ fi
+ if [ -f "NATLEV.GrbF${post_fhr}" ]; then
+ wgrib2 NATLEV.GrbF${post_fhr} -set center 7 -grib ${bgrd3d} >>$pgmout 2>>errfile
+ fi
+ if [ -f "IFIFIP.GrbF${post_fhr}" ]; then
+ wgrib2 IFIFIP.GrbF${post_fhr} -set center 7 -grib ${bgifi} >>$pgmout 2>>errfile
+ fi
+ if [ -f "AVIATI.GrbF${post_fhr}" ]; then
+ wgrib2 AVIATI.GrbF${post_fhr} -set center 7 -grib ${bgavi} >>$pgmout 2>>errfile
+ fi
+
+ cp -p ${bgdawp} ${COMOUT}
+ cp -p ${bgrd3d} ${COMOUT}
+ cp -p ${bgifi} ${COMOUT}
+ cp -p ${bgavi} ${COMOUT}
+
+else
+
+ post_mn_or_null=""
+ dot_post_mn_or_null=""
+ if [ "${post_mn}" != "00" ]; then
+ post_mn_or_null="${post_mn}"
+ dot_post_mn_or_null=".${post_mn}"
+ fi
+ post_fn_suffix="GrbF${post_fhr}${dot_post_mn_or_null}"
+ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2"
+ basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M )
+ symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}"
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ fids=( "cmaq" )
+ else
+ fids=( "prslev" "natlev" )
+ fi
+ for fid in "${fids[@]}"; do
+ FID=$(echo_uppercase $fid)
+ post_orig_fn="${FID}.${post_fn_suffix}"
+ post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}"
+ mv ${post_orig_fn} ${post_renamed_fn}
+ cp -p ${post_renamed_fn} ${COMOUT}
+ done
+
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+UPP post-processing has successfully generated output files !!!!
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/sorc/CMakeLists.txt b/sorc/CMakeLists.txt
index e84319ad6e..2e69635d59 100644
--- a/sorc/CMakeLists.txt
+++ b/sorc/CMakeLists.txt
@@ -30,7 +30,7 @@ if (BUILD_UFS)
if(CPL_AQM)
set(CCPP_SUITES "FV3_GFS_v15p2,FV3_GFS_v16,FV3_GFS_v17_p8")
else()
- set(CCPP_SUITES "FV3_GFS_v15p2,FV3_GFS_v16,FV3_GFS_v17_p8,FV3_RRFS_v1beta,FV3_HRRR,FV3_RAP,FV3_GFS_v15_thompson_mynn_lam3km,FV3_WoFS_v0")
+ set(CCPP_SUITES "FV3_GFS_v15p2,FV3_GFS_v16,FV3_GFS_v17_p8,FV3_RRFS_v1beta,FV3_HRRR,FV3_HRRR_gf,FV3_RAP,FV3_GFS_v15_thompson_mynn_lam3km,FV3_WoFS_v0")
endif()
endif()
diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py
index 5d4bd81105..663aa04497 100755
--- a/tests/WE2E/run_WE2E_tests.py
+++ b/tests/WE2E/run_WE2E_tests.py
@@ -172,7 +172,6 @@ def run_we2e_tests(homedir, args) -> None:
# if platform section was not in input config, initialize as empty dict
if 'platform' not in test_cfg:
test_cfg['platform'] = dict()
- test_cfg['platform'].update({"BUILD_MOD_FN": args.modulefile})
test_cfg['workflow'].update({"COMPILER": args.compiler})
if args.expt_basedir:
test_cfg['workflow'].update({"EXPT_BASEDIR": args.expt_basedir})
@@ -502,8 +501,6 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N
' none: Do not launch experiments; only create experiment directories',
default="python")
-
- ap.add_argument('--modulefile', type=str, help='Modulefile used for building the app')
ap.add_argument('--run_envir', type=str,
help='Overrides RUN_ENVIR variable to a new value ("nco" or "community") '\
'for all experiments', default='')
@@ -533,8 +530,6 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N
"--launch=cron\nPlease update your workflow accordingly")
#Set defaults that need other argument values
- if args.modulefile is None:
- args.modulefile = f'build_{args.machine.lower()}_{args.compiler}'
if args.procs < 1:
raise argparse.ArgumentTypeError('You can not have less than one parallel process; select a valid value '\
'for --procs')
diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
index 2901d1ebf1..8e742486a6 100644
--- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
+++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
@@ -13,12 +13,14 @@ workflow:
DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16
FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16
DO_REAL_TIME: false
+ COLDSTART: true
nco:
- NET_default: aqm
+ envir_default: we2e_aqm
+ NET_default: we2e_aqm
+ RUN_default: we2e_aqm
rocoto:
tasks:
- taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}'
- task_aqm_ics_ext:
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/upp_post.yaml", "parm/wflow/test.yaml"]|include }}'
metatask_run_ensemble:
task_run_fcst_mem#mem#:
walltime: 01:20:00
diff --git a/tests/WE2E/test_configs/smoke_dust/config.smoke_dust_grid_RRFS_CONUS_3km_suite_HRRR_gf.yaml b/tests/WE2E/test_configs/smoke_dust/config.smoke_dust_grid_RRFS_CONUS_3km_suite_HRRR_gf.yaml
new file mode 100644
index 0000000000..a6ae67df6f
--- /dev/null
+++ b/tests/WE2E/test_configs/smoke_dust/config.smoke_dust_grid_RRFS_CONUS_3km_suite_HRRR_gf.yaml
@@ -0,0 +1,66 @@
+metadata:
+ description: config for Smoke and Dust, RRFS_CONUS_3km
+user:
+ RUN_ENVIR: community
+platform:
+ BUILD_MOD_FN: 'build_{{ user.MACHINE|lower() }}_intel_prod'
+workflow:
+ PREDEF_GRID_NAME: RRFS_CONUS_3km
+ CCPP_PHYS_SUITE: FV3_HRRR_gf
+ DATE_FIRST_CYCL: '2019072200'
+ DATE_LAST_CYCL: '2019072206'
+ INCR_CYCL_FREQ: 6
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+ VERBOSE: true
+ DEBUG: false
+ COMPILER: intel
+ DIAG_TABLE_TMPL_FN: diag_table_smoke_dust.FV3_HRRR_gf
+ FIELD_TABLE_TMPL_FN: field_table_smoke_dust.FV3_HRRR_gf
+ DO_REAL_TIME: false
+ COLDSTART: true
+nco:
+ envir_default: we2e_smoke_dust
+ NET_default: we2e_smoke_dust
+ RUN_default: we2e_smoke_dust
+rocoto:
+ tasks:
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/smoke_dust.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/upp_post.yaml"]|include }}'
+ metatask_run_ensemble:
+ task_run_fcst_mem#mem#:
+ walltime: 02:00:00
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: RAP
+ EXTRN_MDL_ICS_OFFSET_HRS: 0
+ USE_USER_STAGED_EXTRN_FILES: true
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: RAP
+ LBC_SPEC_INTVL_HRS: 6
+ EXTRN_MDL_LBCS_OFFSET_HRS: 0
+ USE_USER_STAGED_EXTRN_FILES: true
+task_make_ics:
+ VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev_fcst_rrfsL65.txt"
+task_make_lbcs:
+ VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev_fcst_rrfsL65.txt"
+task_run_fcst:
+ DT_ATMOS: 36
+ LAYOUT_X: 15
+ LAYOUT_Y: 20
+ BLOCKSIZE: 32
+ WRTCMP_write_tasks_per_group: 40
+ RESTART_INTERVAL: 6 12 18 24
+ QUILTING: true
+ PRINT_ESMF: false
+ DO_FCST_RESTART: false
+task_run_post:
+ POST_OUTPUT_DOMAIN_NAME: conus3km
+ USE_CUSTOM_POST_CONFIG_FILE: false
+global:
+ DO_ENSEMBLE: false
+ NUM_ENS_MEMBERS: 2
+ HALO_BLEND: 20
+smoke_dust_parm:
+ DO_SMOKE_DUST: true
+ EBB_DCYCLE: 1
+ SMOKE_DUST_FILE_PREFIX: "SMOKE_RRFS_data"
+
diff --git a/tests/WE2E/test_configs/wflow_features/config.different_vcoord.yaml b/tests/WE2E/test_configs/wflow_features/config.different_vcoord.yaml
deleted file mode 100644
index 16f2d10e74..0000000000
--- a/tests/WE2E/test_configs/wflow_features/config.different_vcoord.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-metadata:
- description: |-
- This test is to ensure that the workflow running in community mode
- completes successfully when the RRFS vertical coordinates are chosen.
-user:
- RUN_ENVIR: community
-workflow:
- CCPP_PHYS_SUITE: FV3_HRRR
- PREDEF_GRID_NAME: RRFS_CONUS_25km
- DATE_FIRST_CYCL: '2019070100'
- DATE_LAST_CYCL: '2019070100'
- FCST_LEN_HRS: 3
- PREEXISTING_DIR_METHOD: rename
-task_get_extrn_ics:
- EXTRN_MDL_NAME_ICS: FV3GFS
- USE_USER_STAGED_EXTRN_FILES: true
-task_get_extrn_lbcs:
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 3
- USE_USER_STAGED_EXTRN_FILES: true
-task_make_ics:
- VCOORD_FILE: "{{ user.PARMdir }}/global_hyblev_fcst_rrfsL65.txt"
-task_make_lbcs:
- VCOORD_FILE: "{{ user.PARMdir }}/global_hyblev_fcst_rrfsL65.txt"
diff --git a/ush/HWP_tools.py b/ush/HWP_tools.py
new file mode 100755
index 0000000000..7b175241e1
--- /dev/null
+++ b/ush/HWP_tools.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+
+import numpy as np
+import os
+import datetime as dt
+import shutil
+from datetime import timedelta
+import xarray as xr
+import fnmatch
+
+def check_restart_files(hourly_hwpdir, fcst_dates):
+ hwp_avail_hours = []
+ hwp_non_avail_hours = []
+
+ for cycle in fcst_dates:
+ restart_file = f"{cycle[:8]}.{cycle[8:10]}0000.phy_data.nc"
+ file_path = os.path.join(hourly_hwpdir, restart_file)
+
+ if os.path.exists(file_path):
+ print(f'Restart file available for: {restart_file}')
+ hwp_avail_hours.append(cycle)
+ else:
+ print(f'Copy restart file for: {restart_file}')
+ hwp_non_avail_hours.append(cycle)
+
+ print(f'Available restart at: {hwp_avail_hours}, Non-available restart files at: {hwp_non_avail_hours}')
+ return(hwp_avail_hours, hwp_non_avail_hours)
+
+def copy_missing_restart(nwges_dir, hwp_non_avail_hours, hourly_hwpdir, len_restart_interval):
+ restart_avail_hours = []
+ restart_nonavail_hours_test = []
+
+ for cycle in hwp_non_avail_hours:
+ try:
+ YYYYMMDDHH = dt.datetime.strptime(cycle, "%Y%m%d%H")
+ HH = cycle[8:10]
+ prev_hr = YYYYMMDDHH - timedelta(hours=1)
+ prev_hr_str = prev_hr.strftime("%Y%m%d%H")
+
+ source_restart_dir = os.path.join(nwges_dir, prev_hr_str, 'fcst_fv3lam', 'RESTART')
+ wildcard_name = '*.phy_data.nc'
+
+ if len_restart_interval > 1:
+ print('ENTERING LOOP for len_restart_interval > 1')
+ if os.path.exists(source_restart_dir):
+ matching_files_found = False
+ print('PATH EXISTS')
+ for file in sorted(os.listdir(source_restart_dir)):
+ if fnmatch.fnmatch(file, wildcard_name):
+ matching_files_found = True
+ print('MATCHING FILES FOUND')
+ source_file_path = os.path.join(source_restart_dir, file)
+ target_file_path = os.path.join(hourly_hwpdir, file)
+ var1, var2 = 'rrfs_hwp_ave', 'totprcp_ave'
+ if os.path.exists(source_file_path):
+ with xr.open_dataset(source_file_path) as ds:
+ try:
+ if var1 in ds.variables and var2 in ds.variables:
+ ds = ds[[var1, var2]]
+ ds.to_netcdf(target_file_path)
+ restart_avail_hours.append(cycle)
+ print(f'Restart file copied: {file}')
+ else:
+ print(f'Missing variables {var1} or {var2} in {file}. Skipping file.')
+ except AttributeError as e:
+ print(f"AttributeError processing NetCDF file {source_file_path}: {e}")
+ else:
+ print(f"Source file not found: {source_file_path}")
+ if not matching_files_found:
+ print('No matching files found')
+ restart_nonavail_hours_test.append(cycle)
+ else:
+ print(f"Source directory not found: {source_restart_dir}")
+ restart_nonavail_hours_test.append(cycle)
+ else:
+ if os.path.exists(source_restart_dir):
+ try:
+ matching_files = [f for f in os.listdir(source_restart_dir) if fnmatch.fnmatch(f, wildcard_name)]
+ if not matching_files:
+ print(f"No matching files for cycle {cycle} in {source_restart_dir}")
+ restart_nonavail_hours_test.append(cycle)
+ continue
+
+ for matching_file in matching_files:
+ source_file_path = os.path.join(source_restart_dir, matching_file)
+ target_file_path = os.path.join(hourly_hwpdir, matching_file)
+ var1, var2 = 'rrfs_hwp_ave', 'totprcp_ave'
+
+ if os.path.exists(source_file_path):
+ try:
+ with xr.open_dataset(source_file_path) as ds:
+ if var1 in ds.variables and var2 in ds.variables:
+ ds = ds[[var1, var2]]
+ ds.to_netcdf(target_file_path)
+ restart_avail_hours.append(cycle)
+ print(f'Restart file copied: {matching_file}')
+ else:
+ print(f'Missing variables {var1} or {var2} in {matching_file}. Skipping file.')
+ except (FileNotFoundError, IOError, OSError, RuntimeError, ValueError, TypeError, KeyError, IndexError, MemoryError) as e:
+ print(f"Error processing NetCDF file {source_file_path}: {e}")
+ restart_nonavail_hours_test.append(cycle)
+ else:
+ print(f"Source file not found: {source_file_path}")
+ restart_nonavail_hours_test.append(cycle)
+ except (FileNotFoundError, IOError, OSError, RuntimeError) as e:
+ print(f"Error accessing directory {source_restart_dir}: {e}")
+ restart_nonavail_hours_test.append(cycle)
+ else:
+ print(f"Source directory not found: {source_restart_dir}")
+ restart_nonavail_hours_test.append(cycle)
+
+ except (ValueError, TypeError) as e:
+ print(f"Error processing cycle {cycle}: {e}")
+ restart_nonavail_hours_test.append(cycle)
+
+ return(restart_avail_hours, restart_nonavail_hours_test)
+
+def process_hwp(fcst_dates, hourly_hwpdir, cols, rows, intp_dir, rave_to_intp):
+ hwp_ave = []
+ totprcp = np.zeros((cols*rows))
+ var1, var2 = 'rrfs_hwp_ave', 'totprcp_ave'
+
+ for cycle in fcst_dates:
+ try:
+ print(f'Processing restart file for date: {cycle}')
+ file_path = os.path.join(hourly_hwpdir, f"{cycle[:8]}.{cycle[8:10]}0000.phy_data.nc")
+ rave_path = os.path.join(intp_dir, f"{rave_to_intp}{cycle}00_{cycle}59.nc")
+
+ if os.path.exists(file_path) and os.path.exists(rave_path):
+ try:
+ with xr.open_dataset(file_path) as nc:
+ if var1 in nc.variables and var2 in nc.variables:
+ hwp_values = nc.rrfs_hwp_ave.values.ravel()
+ tprcp_values = nc.totprcp_ave.values.ravel()
+ totprcp += np.where(tprcp_values > 0, tprcp_values, 0)
+ hwp_ave.append(hwp_values)
+ print(f'Restart file processed for: {cycle}')
+ else:
+ print(f'Missing variables {var1} or {var2} in file: {file_path}')
+ except (FileNotFoundError, IOError, OSError, RuntimeError, ValueError, TypeError, KeyError, IndexError, MemoryError) as e:
+ print(f"Error processing NetCDF file {file_path}: {e}")
+ else:
+ print(f'One or more files non-available for this cycle: {file_path}, {rave_path}')
+ except (ValueError, TypeError) as e:
+ print(f"Error processing cycle {cycle}: {e}")
+
+ # Calculate the mean HWP values if available
+ if hwp_ave:
+ hwp_ave_arr = np.nanmean(hwp_ave, axis=0).reshape(cols, rows)
+ totprcp_ave_arr = totprcp.reshape(cols, rows)
+ else:
+ hwp_ave_arr = np.zeros((cols, rows))
+ totprcp_ave_arr = np.zeros((cols, rows))
+
+ xarr_hwp = xr.DataArray(hwp_ave_arr)
+ xarr_totprcp = xr.DataArray(totprcp_ave_arr)
+
+ return(hwp_ave_arr, xarr_hwp, totprcp_ave_arr, xarr_totprcp)
+
diff --git a/ush/add_smoke.py b/ush/add_smoke.py
new file mode 100755
index 0000000000..b49f672e6d
--- /dev/null
+++ b/ush/add_smoke.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python3
+
+import xarray as xr
+import numpy as np
+import os
+
+def populate_data(data, target_shape):
+ """
+ Extracted variables need to match the target shape so we first populating it into a zero array.
+
+ Parameters:
+ data (np.ndarray): The extracted data to be adjusted.
+ target_shape (tuple): The shape of the target data array.
+
+ Returns:
+ np.ndarray: The adjusted data array.
+ """
+ target_lev, target_lat, target_lon = target_shape
+ populated_data = np.zeros(target_shape)
+ populated_data[:data.shape[0], :, :] = data
+ return populated_data
+
+def main():
+ # File paths
+ source_file = "fv_tracer.res.tile1.nc"
+ target_file = 'gfs_data.tile7.halo0.nc'
+
+ # Check if the source file exists
+ if not os.path.exists(source_file):
+ print(f"Source file '{source_file}' does not exist. Exiting...")
+ return
+
+ # Open the source file and extract data
+ data_to_extract = xr.open_dataset(source_file)
+ print("DATA FILE:",data_to_extract)
+
+ smoke_2_add = data_to_extract['smoke'][0,:,:, :]
+ dust_2_add = data_to_extract['dust'][0,:,:, :]
+ coarsepm_2_add = data_to_extract['coarsepm'][0,:, :, :]
+
+ print('Max values in source file:', smoke_2_add.max())
+
+ # Open the target file and load it into memory
+ file_input = xr.open_dataset(target_file).load()
+ file_input.close() # to remove permission error below
+ print("TARGET FILE:",file_input)
+ # Drop the 'smoke' variable if it exists in both the source and target files
+ if 'smoke' in file_input.variables and 'smoke' in data_to_extract.variables:
+ file_input = file_input.drop('smoke')
+
+ # Determine the shape of the new variables based on the target file dimensions
+ lev_dim = file_input.dims['lev']
+ lat_dim = file_input.dims['lat']
+ lon_dim = file_input.dims['lon']
+
+ # Populate the extracted data to match the target shape
+ #smoke_2_add_populated = populate_data(smoke_2_add, (lev_dim, lat_dim, lon_dim))
+ #dust_2_add_populated = populate_data(dust_2_add, (lev_dim, lat_dim, lon_dim))
+ #coarsepm_2_add_populated = populate_data(coarsepm_2_add, (lev_dim, lat_dim, lon_dim))
+
+ #print('Max values in populated data:', smoke_2_add_populated.max(), dust_2_add_populated.max(), coarsepm_2_add_populated.max())
+
+ # Create new data arrays filled with zeros
+ smoke_zero = xr.DataArray(np.zeros((lev_dim, lat_dim, lon_dim)), dims=['lev', 'lat', 'lon'], attrs={'units': 'ug/kg'})
+ dust_zero = xr.DataArray(np.zeros((lev_dim, lat_dim, lon_dim)), dims=['lev', 'lat', 'lon'], attrs={'units': 'ug/kg'})
+ coarsepm_zero = xr.DataArray(np.zeros((lev_dim, lat_dim, lon_dim)), dims=['lev', 'lat', 'lon'], attrs={'units': 'ug/kg'})
+
+ # Assign the data arrays to the dataset, initially with zeros
+ file_input['smoke'] = smoke_zero
+ file_input['dust'] = dust_zero
+ file_input['coarsepm']= coarsepm_zero
+
+ # Populate the variables with the adjusted data
+ file_input['smoke'][1:66,:,:] = smoke_2_add
+ file_input['dust'][1:66,:,:] = dust_2_add
+ file_input['coarsepm'][1:66,:,:] = coarsepm_2_add
+
+ print("FINAL FILE:", file_input)
+ # Save the modified dataset back to the file
+ file_input.to_netcdf(target_file, mode='w')
+
+ # Reopen the target file to check the variables
+ with xr.open_dataset(target_file) as file_input:
+ print('Max values in target file after update:')
+ print('smoke:', file_input['smoke'].max().item())
+ print('dust:', file_input['dust'].max().item())
+ print('coarsepm:', file_input['coarsepm'].max().item())
+
+if __name__ == "__main__":
+ main()
diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml
index 21a73591ee..e3e5707bc5 100644
--- a/ush/config.aqm.yaml
+++ b/ush/config.aqm.yaml
@@ -31,8 +31,7 @@ nco:
RUN_default: aqm
rocoto:
tasks:
- taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}'
-# task_aqm_ics_ext: # uncomment this in case of COLDSTART: true
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/upp_post.yaml"]|include }}'
metatask_run_ensemble:
task_run_fcst_mem#mem#:
walltime: 01:20:00
diff --git a/ush/config.smoke_dust.yaml b/ush/config.smoke_dust.yaml
new file mode 100644
index 0000000000..97d908e28f
--- /dev/null
+++ b/ush/config.smoke_dust.yaml
@@ -0,0 +1,75 @@
+metadata:
+ description: config for Smoke and Dust, RRFS_CONUS_3km
+user:
+ RUN_ENVIR: community
+ MACHINE: hera
+ ACCOUNT: [account name]
+platform:
+ BUILD_MOD_FN: 'build_{{ user.MACHINE|lower() }}_intel_prod'
+# EXTRN_MDL_DATA_STORES: disk
+workflow:
+ USE_CRON_TO_RELAUNCH: true
+ CRON_RELAUNCH_INTVL_MNTS: 3
+ EXPT_SUBDIR: smoke_dust_conus3km
+ PREDEF_GRID_NAME: RRFS_CONUS_3km
+ CCPP_PHYS_SUITE: FV3_HRRR_gf
+ DATE_FIRST_CYCL: '2019072200'
+ DATE_LAST_CYCL: '2019072206'
+ INCR_CYCL_FREQ: 6
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+ VERBOSE: true
+ DEBUG: false
+ COMPILER: intel
+ DIAG_TABLE_TMPL_FN: diag_table_smoke_dust.FV3_HRRR_gf
+ FIELD_TABLE_TMPL_FN: field_table_smoke_dust.FV3_HRRR_gf
+ DO_REAL_TIME: false
+ COLDSTART: true # set to true for cold start
+# WARMSTART_CYCLE_DIR: '/path/to/warm/start/files'
+nco:
+ envir_default: test_smoke_dust
+ NET_default: smoke_dust
+ RUN_default: smoke_dust
+rocoto:
+ tasks:
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/smoke_dust.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/upp_post.yaml"]|include }}'
+ metatask_run_ensemble:
+ task_run_fcst_mem#mem#:
+ walltime: 02:00:00
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: RAP
+ EXTRN_MDL_ICS_OFFSET_HRS: 0
+# USE_USER_STAGED_EXTRN_FILES: true
+# EXTRN_MDL_SOURCE_BASEDIR_ICS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmddhh}
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: RAP
+ LBC_SPEC_INTVL_HRS: 6
+ EXTRN_MDL_LBCS_OFFSET_HRS: 0
+# USE_USER_STAGED_EXTRN_FILES: true
+# EXTRN_MDL_SOURCE_BASEDIR_LBCS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmddhh}
+task_make_ics:
+ VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev_fcst_rrfsL65.txt"
+task_make_lbcs:
+ VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev_fcst_rrfsL65.txt"
+task_run_fcst:
+ DT_ATMOS: 36
+ LAYOUT_X: 15
+ LAYOUT_Y: 20
+ BLOCKSIZE: 32
+ WRTCMP_write_tasks_per_group: 40
+ RESTART_INTERVAL: 6 12 18 24
+ QUILTING: true
+ PRINT_ESMF: false
+ DO_FCST_RESTART: false
+task_run_post:
+ POST_OUTPUT_DOMAIN_NAME: conus3km
+ USE_CUSTOM_POST_CONFIG_FILE: false
+global:
+ DO_ENSEMBLE: false
+ NUM_ENS_MEMBERS: 2
+ HALO_BLEND: 20
+smoke_dust_parm:
+ DO_SMOKE_DUST: true
+ EBB_DCYCLE: 1
+ SMOKE_DUST_FILE_PREFIX: SMOKE_RRFS_data
+
diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml
index 90651c1b7f..d1924ae896 100644
--- a/ush/config_defaults.yaml
+++ b/ush/config_defaults.yaml
@@ -426,11 +426,14 @@ platform:
# FIXemis:
# System directory where AQM emission data files are located.
#
+ # FIXsmoke:
+ # System directory where Smoke and Dust data files are located.
+ #
# FIXcrtm:
# System directory where CRTM fixed files are located
#
- # FIXcrtmupp:
- # System directory where CRTM fixed files specifically for UPP are located
+ # FIXupp:
+ # System directory where UPP fixed files are located
#
#-----------------------------------------------------------------------
#
@@ -442,8 +445,9 @@ platform:
FIXshp: ""
FIXaqm: ""
FIXemis: ""
+ FIXsmoke: ""
+ FIXupp: ""
FIXcrtm: ""
- FIXcrtmupp: ""
#
#-----------------------------------------------------------------------
#
@@ -1965,7 +1969,7 @@ task_run_fcst:
#
#-----------------------------------------------------------------------
#
- USE_MERRA_CLIMO: '{{ workflow.CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km" or workflow.CCPP_PHYS_SUITE == "FV3_GFS_v17_p8" }}'
+ USE_MERRA_CLIMO: '{{ workflow.CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km" or workflow.CCPP_PHYS_SUITE == "FV3_HRRR_gf" or workflow.CCPP_PHYS_SUITE == "FV3_GFS_v17_p8" }}'
#
#-----------------------------------------------------------------------
#
@@ -2677,6 +2681,36 @@ cpl_aqm_parm:
NEXUS_GFS_SFC_DIR: ""
NEXUS_GFS_SFC_ARCHV_DIR: "/NCEPPROD/hpssprod/runhistory"
+#------------------------------
+# Smoke/Dust config parameters
+#------------------------------
+smoke_dust_parm:
+ #
+ #-----------------------------------------------------------------------
+ #
+ # DO_SMOKE_DUST:
+ # Flag turning on/off Smoke and Dust
+ #
+ # EBB_DCYCLE:
+ # 1: for retro, 2: for forecast
+ #
+ # COMINsmoke_default:
+ # Path to the directory containing smoke and dust files
+ #
+ # COMINfire_default:
+ # Path to the directory containing RAVE fire files
+ #
+ # SMOKE_DUST_FILE_PREFIX:
+ # Prefix of Smoke and Dust file name
+ #
+ #-----------------------------------------------------------------------
+ #
+ DO_SMOKE_DUST: false
+ EBB_DCYCLE: 1
+ COMINsmoke_default: ""
+ COMINfire_default: ""
+ SMOKE_DUST_FILE_PREFIX: "SMOKE_RRFS_data"
+
rocoto:
attrs: ""
cycledefs: ""
diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py
old mode 100644
new mode 100755
diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py
old mode 100644
new mode 100755
diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py
old mode 100644
new mode 100755
diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py
old mode 100644
new mode 100755
diff --git a/ush/fire_emiss_tools.py b/ush/fire_emiss_tools.py
new file mode 100755
index 0000000000..fa68628e10
--- /dev/null
+++ b/ush/fire_emiss_tools.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python3
+
+import os
+import numpy as np
+import xarray as xr
+from datetime import datetime
+from netCDF4 import Dataset
+import interp_tools as i_tools
+
+#Compute average FRP from raw RAVE for the previous 24 hours
+def averaging_FRP(ebb_dcycle, fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_area, beta, fg_to_ug, to_s):
+ base_array = np.zeros((cols*rows))
+ frp_daily = base_array
+ ebb_smoke_total = []
+ ebb_smoke_hr = []
+ frp_avg_hr = []
+
+ try:
+ ef_map = xr.open_dataset(veg_map)
+ emiss_factor = ef_map.emiss_factor.values
+ target_area = tgt_area.values
+ except (FileNotFoundError, IOError, OSError, RuntimeError, ValueError, TypeError, KeyError, IndexError, MemoryError) as e:
+ print(f"Error loading vegetation map: {e}")
+ return np.zeros((cols, rows)), np.zeros((cols, rows))
+
+ num_files = 0
+ for cycle in fcst_dates:
+ try:
+ file_path = os.path.join(intp_dir, f'{rave_to_intp}{cycle}00_{cycle}59.nc')
+ if os.path.exists(file_path):
+ try:
+ with xr.open_dataset(file_path) as nc:
+ open_fre = nc.FRE[0, :, :].values
+ open_frp = nc.frp_avg_hr[0, :, :].values
+ num_files += 1
+ if ebb_dcycle == 1:
+ print('Processing emissions for ebb_dcyc 1')
+ print(file_path)
+ frp_avg_hr.append(open_frp)
+ ebb_hourly = (open_fre * emiss_factor * beta * fg_to_ug) / (target_area * to_s)
+ ebb_smoke_total.append(np.where(open_frp > 0, ebb_hourly, 0))
+ else:
+ print('Processing emissions for ebb_dcyc 2')
+ ebb_hourly = open_fre * emiss_factor * beta * fg_to_ug / target_area
+ ebb_smoke_total.append(np.where(open_frp > 0, ebb_hourly, 0).ravel())
+ frp_daily += np.where(open_frp > 0, open_frp, 0).ravel()
+ except (FileNotFoundError, IOError, OSError, RuntimeError, ValueError, TypeError, KeyError, IndexError, MemoryError) as e:
+ print(f"Error processing NetCDF file {file_path}: {e}")
+ if ebb_dcycle == 1:
+ frp_avg_hr.append(np.zeros((cols, rows)))
+ ebb_smoke_total.append(np.zeros((cols, rows)))
+ else:
+ if ebb_dcycle == 1:
+ frp_avg_hr.append(np.zeros((cols, rows)))
+ ebb_smoke_total.append(np.zeros((cols, rows)))
+ except Exception as e:
+ print(f"Error processing cycle {cycle}: {e}")
+ if ebb_dcycle == 1:
+ frp_avg_hr.append(np.zeros((cols, rows)))
+ ebb_smoke_total.append(np.zeros((cols, rows)))
+
+ if num_files > 0:
+ if ebb_dcycle == 1:
+ frp_avg_reshaped = np.stack(frp_avg_hr, axis=0)
+ ebb_total_reshaped = np.stack(ebb_smoke_total, axis=0)
+ else:
+ summed_array = np.sum(np.array(ebb_smoke_total), axis=0)
+ num_zeros = len(ebb_smoke_total) - np.sum([arr == 0 for arr in ebb_smoke_total], axis=0)
+ safe_zero_count = np.where(num_zeros == 0, 1, num_zeros)
+ result_array = [summed_array[i] / 2 if safe_zero_count[i] == 1 else summed_array[i] / safe_zero_count[i] for i in range(len(safe_zero_count))]
+ result_array = np.array(result_array)
+ result_array[num_zeros == 0] = summed_array[num_zeros == 0]
+ ebb_total = result_array.reshape(cols, rows)
+ ebb_total_reshaped = ebb_total / 3600
+ temp_frp = [frp_daily[i] / 2 if safe_zero_count[i] == 1 else frp_daily[i] / safe_zero_count[i] for i in range(len(safe_zero_count))]
+ temp_frp = np.array(temp_frp)
+ temp_frp[num_zeros == 0] = frp_daily[num_zeros == 0]
+ frp_avg_reshaped = temp_frp.reshape(cols, rows)
+ else:
+ if ebb_dcycle == 1:
+ frp_avg_reshaped = np.zeros((24, cols, rows))
+ ebb_total_reshaped = np.zeros((24, cols, rows))
+ else:
+ frp_avg_reshaped = np.zeros((cols, rows))
+ ebb_total_reshaped = np.zeros((cols, rows))
+
+ return(frp_avg_reshaped, ebb_total_reshaped)
+
+def estimate_fire_duration(intp_avail_hours, intp_dir, fcst_dates, current_day, cols, rows, rave_to_intp):
+ # There are two steps here.
+ # 1) First day simulation no RAVE from previous 24 hours available (fire age is set to zero)
+ # 2) previus files are present (estimate fire age as the difference between the date of the current cycle and the date whe the fire was last observed whiting 24 hours)
+ t_fire = np.zeros((cols, rows))
+
+ for date_str in fcst_dates:
+ try:
+ date_file = int(date_str[:10])
+ print('Date processing for fire duration', date_file)
+ file_path = os.path.join(intp_dir, f'{rave_to_intp}{date_str}00_{date_str}59.nc')
+
+ if os.path.exists(file_path):
+ try:
+ with xr.open_dataset(file_path) as open_intp:
+ FRP = open_intp.frp_avg_hr[0, :, :].values
+ dates_filtered = np.where(FRP > 0, date_file, 0)
+ t_fire = np.maximum(t_fire, dates_filtered)
+ except (FileNotFoundError, IOError, OSError,RuntimeError,ValueError, TypeError, KeyError, IndexError, MemoryError) as e:
+ print(f"Error processing NetCDF file {file_path}: {e}")
+ except Exception as e:
+ print(f"Error processing date {date_str}: {e}")
+
+ t_fire_flattened = t_fire.flatten()
+ t_fire_flattened = [int(i) if i != 0 else 0 for i in t_fire_flattened]
+
+ try:
+ fcst_t = datetime.strptime(current_day, '%Y%m%d%H')
+ hr_ends = [datetime.strptime(str(hr), '%Y%m%d%H') if hr != 0 else 0 for hr in t_fire_flattened]
+ te = [(fcst_t - i).total_seconds() / 3600 if i != 0 else 0 for i in hr_ends]
+ except ValueError as e:
+ print(f"Error processing forecast time {current_day}: {e}")
+ te = np.zeros((rows, cols))
+
+ return(te)
+
+def save_fire_dur(cols, rows, te):
+ fire_dur = np.array(te).reshape(cols, rows)
+ return(fire_dur)
+
+def produce_emiss_24hr_file(ebb_dcycle, frp_reshaped, intp_dir, current_day, tgt_latt, tgt_lont, ebb_smoke_reshaped, cols, rows):
+ file_path = os.path.join(intp_dir, f'SMOKE_RRFS_data_{current_day}00.nc')
+ with Dataset(file_path, 'w') as fout:
+ i_tools.create_emiss_file(fout, cols, rows)
+ i_tools.Store_latlon_by_Level(fout, 'geolat', tgt_latt, 'cell center latitude', 'degrees_north', '2D', '-9999.f', '1.f')
+ i_tools.Store_latlon_by_Level(fout, 'geolon', tgt_lont, 'cell center longitude', 'degrees_east', '2D', '-9999.f', '1.f')
+
+ i_tools.Store_by_Level(fout,'frp_avg_hr','mean Fire Radiative Power','MW','3D','0.f','1.f')
+ fout.variables['frp_avg_hr'][:, :, :] = frp_reshaped
+ i_tools.Store_by_Level(fout,'ebb_smoke_hr','EBB emissions','ug m-2 s-1','3D','0.f','1.f')
+ fout.variables['ebb_smoke_hr'][:, :, :] = ebb_smoke_reshaped
+
+def produce_emiss_file(xarr_hwp, frp_avg_reshaped, totprcp_ave_arr, xarr_totprcp, intp_dir, current_day, tgt_latt, tgt_lont, ebb_tot_reshaped, fire_age, cols, rows):
+ # Ensure arrays are not negative or NaN
+ frp_avg_reshaped = np.clip(frp_avg_reshaped, 0, None)
+ frp_avg_reshaped = np.nan_to_num(frp_avg_reshaped)
+
+ ebb_tot_reshaped = np.clip(ebb_tot_reshaped, 0, None)
+ ebb_tot_reshaped = np.nan_to_num(ebb_tot_reshaped)
+
+ fire_age = np.clip(fire_age, 0, None)
+ fire_age = np.nan_to_num(fire_age)
+
+ # Filter HWP Prcp arrays to be non-negative and replace NaNs
+ filtered_hwp = xarr_hwp.where(frp_avg_reshaped > 0, 0).fillna(0)
+ filtered_prcp = xarr_totprcp.where(frp_avg_reshaped > 0, 0).fillna(0)
+
+ # Filter based on ebb_rate
+ ebb_rate_threshold = 0 # Define an appropriate threshold if needed
+ mask = (ebb_tot_reshaped > ebb_rate_threshold)
+
+ filtered_hwp = filtered_hwp.where(mask, 0).fillna(0)
+ filtered_prcp = filtered_prcp.where(mask, 0).fillna(0)
+ frp_avg_reshaped = frp_avg_reshaped * mask
+ ebb_tot_reshaped = ebb_tot_reshaped * mask
+ fire_age = fire_age * mask
+
+ # Produce emiss file
+ file_path = os.path.join(intp_dir, f'SMOKE_RRFS_data_{current_day}00.nc')
+
+ try:
+ with Dataset(file_path, 'w') as fout:
+ i_tools.create_emiss_file(fout, cols, rows)
+ i_tools.Store_latlon_by_Level(fout, 'geolat', tgt_latt, 'cell center latitude', 'degrees_north', '2D', '-9999.f', '1.f')
+ i_tools.Store_latlon_by_Level(fout, 'geolon', tgt_lont, 'cell center longitude', 'degrees_east', '2D', '-9999.f', '1.f')
+
+ print('Storing different variables')
+ i_tools.Store_by_Level(fout, 'frp_davg', 'Daily mean Fire Radiative Power', 'MW', '3D', '0.f', '1.f')
+ fout.variables['frp_davg'][0, :, :] = frp_avg_reshaped
+ i_tools.Store_by_Level(fout, 'ebb_rate', 'Total EBB emission', 'ug m-2 s-1', '3D', '0.f', '1.f')
+ fout.variables['ebb_rate'][0, :, :] = ebb_tot_reshaped
+ i_tools.Store_by_Level(fout, 'fire_end_hr', 'Hours since fire was last detected', 'hrs', '3D', '0.f', '1.f')
+ fout.variables['fire_end_hr'][0, :, :] = fire_age
+ i_tools.Store_by_Level(fout, 'hwp_davg', 'Daily mean Hourly Wildfire Potential', 'none', '3D', '0.f', '1.f')
+ fout.variables['hwp_davg'][0, :, :] = filtered_hwp
+ i_tools.Store_by_Level(fout, 'totprcp_24hrs', 'Sum of precipitation', 'm', '3D', '0.f', '1.f')
+ fout.variables['totprcp_24hrs'][0, :, :] = filtered_prcp
+
+ print("Emissions file created successfully")
+ return "Emissions file created successfully"
+
+ except (OSError, IOError) as e:
+ print(f"Error creating or writing to NetCDF file {file_path}: {e}")
+ return f"Error creating or writing to NetCDF file {file_path}: {e}"
+
+ return "Emissions file created successfully"
diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py
index c671a69da8..fa214f93b4 100755
--- a/ush/generate_FV3LAM_wflow.py
+++ b/ush/generate_FV3LAM_wflow.py
@@ -418,6 +418,11 @@ def generate_FV3LAM_wflow(
"print_diff_pgr": PRINT_DIFF_PGR,
})
+ if DO_SMOKE_DUST:
+ gfs_physics_nml_dict.update({
+ "ebb_dcycle": EBB_DCYCLE,
+ })
+
if CPL_AQM:
gfs_physics_nml_dict.update({
"cplaqm": True,
diff --git a/ush/generate_fire_emissions.py b/ush/generate_fire_emissions.py
new file mode 100755
index 0000000000..d5634c671c
--- /dev/null
+++ b/ush/generate_fire_emissions.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python3
+
+#########################################################################
+# #
+# Python script for fire emissions preprocessing from RAVE FRP and FRE #
+# (Li et al.,2022). #
+# johana.romero-alvarez@noaa.gov #
+# #
+#########################################################################
+
+import sys
+import os
+import time
+import numpy as np
+import fire_emiss_tools as femmi_tools
+import HWP_tools
+import interp_tools as i_tools
+
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# Workflow
+#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+def generate_emiss_workflow(staticdir, ravedir, intp_dir, predef_grid, ebb_dcycle, restart_interval):
+
+ # staticdir: path to FIX files
+ # ravedir: path to RAVE fire data files (hourly), typically workding directory (DATA)
+ # intp_dir: path to interpolated RAVE data file for previous cycles (DATA_SHARE)
+ # nwges_dir: path to restart files, working directory (DATA)
+ # ----------------------------------------------------------------------
+ # Import envs from workflow and get the predifying grid
+ # Set variable names, constants and unit conversions
+ # Set predefined grid
+ # Set directories
+ # ----------------------------------------------------------------------
+ beta = 0.3
+ fg_to_ug = 1e6
+ to_s = 3600
+ current_day = os.environ.get("CDATE")
+# nwges_dir = os.environ.get("NWGES_DIR")
+ nwges_dir = os.environ.get("DATA")
+ vars_emis = ["FRP_MEAN","FRE"]
+ cols, rows = (2700, 3950) if predef_grid == 'RRFS_NA_3km' else (1092, 1820)
+ print('PREDEF GRID',predef_grid,'cols,rows',cols,rows)
+ print('WARNING, EBB_DCYCLE set to', ebb_dcycle, 'emissions are comes from same day satellite obs')
+ #used later when working with ebb_dcyle 1 or 2
+ ebb_dcycle = float(ebb_dcycle)
+
+ print("CDATE:",current_day)
+ print("DATA:", nwges_dir)
+
+ #This is used later when copying the rrfs restart file
+ restart_interval = restart_interval.split()
+ restart_interval_list = [float(num) for num in restart_interval]
+ len_restart_interval = len(restart_interval_list)
+
+ #Setting the directories
+ veg_map = staticdir+'/veg_map.nc'
+ RAVE= ravedir
+ rave_to_intp = predef_grid+"_intp_"
+ grid_in = staticdir+'/grid_in.nc'
+ weightfile = staticdir+'/weight_file.nc'
+ grid_out = staticdir+'/ds_out_base.nc'
+ hourly_hwpdir = os.path.join(nwges_dir,'RESTART')
+
+ # ----------------------------------------------------------------------
+ # Workflow
+ # ----------------------------------------------------------------------
+
+ # ----------------------------------------------------------------------
+ # Sort raw RAVE, create source and target filelds, and compute emissions
+ # ----------------------------------------------------------------------
+ fcst_dates = i_tools.date_range(current_day, ebb_dcycle)
+ intp_avail_hours, intp_non_avail_hours, inp_files_2use = i_tools.check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp)
+ rave_avail, rave_avail_hours, rave_nonavail_hours_test, first_day = i_tools.check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours)
+ srcfield, tgtfield, tgt_latt, tgt_lont, srcgrid, tgtgrid, src_latt, tgt_area = i_tools.creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours)
+
+ if not first_day:
+ regridder, use_dummy_emiss = i_tools.generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_files_2use, intp_avail_hours)
+ if use_dummy_emiss:
+ print('RAVE files corrupted, no data to process')
+ i_tools.create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows)
+ else:
+ i_tools.interpolate_rave(RAVE, rave_avail, rave_avail_hours,
+ use_dummy_emiss, vars_emis, regridder, srcgrid, tgtgrid, rave_to_intp,
+ intp_dir, src_latt, tgt_latt, tgt_lont, cols, rows)
+
+ if ebb_dcycle == 1:
+ print('Processing emissions forebb_dcyc 1')
+ frp_avg_reshaped, ebb_total_reshaped = femmi_tools.averaging_FRP(ebb_dcycle, fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_area, beta, fg_to_ug, to_s)
+ femmi_tools.produce_emiss_24hr_file(ebb_dcycle, frp_avg_reshaped, nwges_dir, current_day, tgt_latt, tgt_lont, ebb_total_reshaped, cols, rows)
+ elif ebb_dcycle == 2:
+ print('Restart dates to process',fcst_dates)
+ hwp_avail_hours, hwp_non_avail_hours = HWP_tools.check_restart_files(hourly_hwpdir, fcst_dates)
+ restart_avail, restart_nonavail_hours_test = HWP_tools.copy_missing_restart(nwges_dir, hwp_non_avail_hours, hourly_hwpdir, len_restart_interval)
+ hwp_ave_arr, xarr_hwp, totprcp_ave_arr, xarr_totprcp = HWP_tools.process_hwp(fcst_dates, hourly_hwpdir, cols, rows, intp_dir, rave_to_intp)
+ frp_avg_reshaped, ebb_total_reshaped = femmi_tools.averaging_FRP(ebb_dcycle, fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_area, beta, fg_to_ug, to_s)
+ #Fire end hours processing
+ te = femmi_tools.estimate_fire_duration(intp_avail_hours, intp_dir, fcst_dates, current_day, cols, rows, rave_to_intp)
+ fire_age = femmi_tools.save_fire_dur(cols, rows, te)
+ #produce emiss file
+ femmi_tools.produce_emiss_file(xarr_hwp, frp_avg_reshaped, totprcp_ave_arr, xarr_totprcp, nwges_dir, current_day, tgt_latt, tgt_lont, ebb_total_reshaped, fire_age, cols, rows)
+ else:
+ print('First day true, no RAVE files available. Use dummy emissions file')
+ i_tools.create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows)
+
+if __name__ == '__main__':
+
+ print('')
+ print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
+ print('Welcome to interpolating RAVE and processing fire emissions!')
+ print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
+ print('')
+ generate_emiss_workflow(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6])
+ print('')
+ print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
+ print('Successful Completion. Bye!')
+ print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
+ print('')
+
diff --git a/ush/get_crontab_contents.py b/ush/get_crontab_contents.py
old mode 100644
new mode 100755
index 6b0548141c..fbdf80dae9
--- a/ush/get_crontab_contents.py
+++ b/ush/get_crontab_contents.py
@@ -162,7 +162,7 @@ def delete_crontab_line(called_from_cron, machine, crontab_line, debug):
crontab_contents = crontab_contents.replace(crontab_line + "\n", "")
crontab_contents = crontab_contents.replace(crontab_line, "")
else:
- print(f"\nWARNING: line not found in crontab, nothing to remove:\n {crontab_line}\n")
+ print(f"\nWARNING: line not found in crontab, nothing to remove:\n{crontab_line}\n")
run_command(f"""echo '{crontab_contents}' | {crontab_cmd}""")
diff --git a/ush/interp_tools.py b/ush/interp_tools.py
new file mode 100755
index 0000000000..df2f495bbb
--- /dev/null
+++ b/ush/interp_tools.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python3
+
+import datetime as dt
+import pandas as pd
+import os
+import fnmatch
+#import ESMF
+import esmpy as ESMF
+import xarray as xr
+import numpy as np
+from netCDF4 import Dataset
+
+#Create date range, this is later used to search for RAVE and HWP from previous 24 hours
+def date_range(current_day, ebb_dcycle):
+ print(f'Searching for interpolated RAVE for {current_day}')
+ print('EBB CYCLE:',ebb_dcycle)
+
+ fcst_datetime = dt.datetime.strptime(current_day, "%Y%m%d%H")
+
+ if ebb_dcycle == 1:
+ print('Find RAVE for ebb_dcyc 1')
+ fcst_dates = pd.date_range(start=fcst_datetime, periods=24, freq='H').strftime("%Y%m%d%H")
+ else:
+ start_datetime = fcst_datetime - dt.timedelta(days=1, hours=1)
+
+ fcst_dates = pd.date_range(start=start_datetime, periods=24, freq='H').strftime("%Y%m%d%H")
+
+ print(f'Current cycle: {fcst_datetime}')
+ return(fcst_dates)
+
+# Check if interoplated RAVE is available for the previous 24 hours
+def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp):
+ intp_avail_hours = []
+ intp_non_avail_hours = []
+ # There are four situations here.
+ # 1) the file is missing (interpolate a new file)
+ # 2) the file is present (use it)
+ # 3) there is a link, but it's broken (interpolate a new file)
+ # 4) there is a valid link (use it)
+ for date in fcst_dates:
+ file_name = f'{rave_to_intp}{date}00_{date}59.nc'
+ file_path = os.path.join(intp_dir, file_name)
+ file_exists = os.path.isfile(file_path)
+ is_link = os.path.islink(file_path)
+ is_valid_link = is_link and os.path.exists(file_path)
+
+ if file_exists or is_valid_link:
+ print(f'RAVE interpolated file available for {file_name}')
+ intp_avail_hours.append(date)
+ else:
+ print(f'Interpolated file non available, interpolate RAVE for {file_name}')
+ intp_non_avail_hours.append(date)
+
+ print(f'Available interpolated files for hours: {intp_avail_hours}, Non available interpolated files for hours: {intp_non_avail_hours}')
+
+ inp_files_2use = len(intp_avail_hours) > 0
+
+ return(intp_avail_hours, intp_non_avail_hours, inp_files_2use)
+
+#Check if raw RAVE in intp_non_avail_hours list is available for interpolatation
+def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours):
+ rave_avail = []
+ rave_avail_hours = []
+ rave_nonavail_hours_test = []
+ for date in intp_non_avail_hours:
+ wildcard_name = f'*-3km*{date}*{date}59590*.nc'
+ name_retro = f'*3km*{date}*{date}*.nc'
+ matching_files = [f for f in os.listdir(RAVE) if fnmatch.fnmatch(f, wildcard_name) or fnmatch.fnmatch(f, name_retro)]
+ print(f'Find raw RAVE: {matching_files}')
+ if not matching_files:
+ print(f'Raw RAVE non_available for interpolation {date}')
+ rave_nonavail_hours_test.append(date)
+ else:
+ print(f'Raw RAVE available for interpolation {matching_files}')
+ rave_avail.append(matching_files)
+ rave_avail_hours.append(date)
+
+ print(f"Raw RAVE available: {rave_avail_hours}, rave_nonavail_hours: {rave_nonavail_hours_test}")
+ first_day = not rave_avail_hours and not intp_avail_hours
+
+ print(f'FIRST DAY?: {first_day}')
+ return(rave_avail, rave_avail_hours, rave_nonavail_hours_test, first_day)
+
+#Create source and target fields
+def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours):
+
+ # Open datasets with context managers
+ with xr.open_dataset(grid_in) as ds_in, xr.open_dataset(grid_out) as ds_out:
+ tgt_area = ds_out['area']
+ tgt_latt = ds_out['grid_latt']
+ tgt_lont = ds_out['grid_lont']
+ src_latt = ds_in['grid_latt']
+
+ srcgrid = ESMF.Grid(np.array(src_latt.shape), staggerloc=[ESMF.StaggerLoc.CENTER, ESMF.StaggerLoc.CORNER], coord_sys=ESMF.CoordSys.SPH_DEG)
+ tgtgrid = ESMF.Grid(np.array(tgt_latt.shape), staggerloc=[ESMF.StaggerLoc.CENTER, ESMF.StaggerLoc.CORNER], coord_sys=ESMF.CoordSys.SPH_DEG)
+
+ srcfield = ESMF.Field(srcgrid, name='test', staggerloc=ESMF.StaggerLoc.CENTER)
+ tgtfield = ESMF.Field(tgtgrid, name='test', staggerloc=ESMF.StaggerLoc.CENTER)
+
+ print('Grid in and out files available. Generating target and source fields')
+ return(srcfield, tgtfield, tgt_latt, tgt_lont, srcgrid, tgtgrid, src_latt, tgt_area)
+
+#Define output and variable meta data
+def create_emiss_file(fout, cols, rows):
+ """Create necessary dimensions for the emission file."""
+ fout.createDimension('t', None)
+ fout.createDimension('lat', cols)
+ fout.createDimension('lon', rows)
+ setattr(fout, 'PRODUCT_ALGORITHM_VERSION', 'Beta')
+ setattr(fout, 'TIME_RANGE', '1 hour')
+
+def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfactor):
+ """Store a 2D variable (latitude/longitude) in the file."""
+ var_out = fout.createVariable(varname, 'f4', ('lat','lon'))
+ var_out.units=units
+ var_out.long_name=long_name
+ var_out.standard_name=varname
+ fout.variables[varname][:]=var
+ var_out.FillValue=fval
+ var_out.coordinates='geolat geolon'
+
+def Store_by_Level(fout, varname, long_name, units, dim, fval, sfactor):
+ """Store a 3D variable (time, latitude/longitude) in the file."""
+ var_out = fout.createVariable(varname, 'f4', ('t','lat','lon'))
+ var_out.units=units
+ var_out.long_name = long_name
+ var_out.standard_name=long_name
+ var_out.FillValue=fval
+ var_out.coordinates='t geolat geolon'
+
+#create a dummy rave interpolated file if first day or regrider fails
+def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows):
+ file_path = os.path.join(intp_dir, f'SMOKE_RRFS_data_{current_day}00.nc')
+ dummy_file = np.zeros((cols, rows)) # Changed to 3D to match the '3D' dimensions
+ with Dataset(file_path, 'w') as fout:
+ create_emiss_file(fout, cols, rows)
+ # Store latitude and longitude
+ Store_latlon_by_Level(fout, 'geolat', tgt_latt, 'cell center latitude', 'degrees_north', '2D','-9999.f','1.f')
+ Store_latlon_by_Level(fout, 'geolon', tgt_lont, 'cell center longitude', 'degrees_east', '2D','-9999.f','1.f')
+
+ # Initialize and store each variable
+ Store_by_Level(fout,'frp_davg','Daily mean Fire Radiative Power','MW','3D','0.f','1.f')
+ fout.variables['frp_davg'][0, :, :] = dummy_file
+ Store_by_Level(fout,'ebb_rate','Total EBB emission','ug m-2 s-1','3D','0.f','1.f')
+ fout.variables['ebb_rate'][0, :, :] = dummy_file
+ Store_by_Level(fout,'fire_end_hr','Hours since fire was last detected','hrs','3D','0.f','1.f')
+ fout.variables['fire_end_hr'][0, :, :] = dummy_file
+ Store_by_Level(fout,'hwp_davg','Daily mean Hourly Wildfire Potential', 'none','3D','0.f','1.f')
+ fout.variables['hwp_davg'][0, :, :] = dummy_file
+ Store_by_Level(fout,'totprcp_24hrs','Sum of precipitation', 'm', '3D', '0.f','1.f')
+ fout.variables['totprcp_24hrs'][0, :, :] = dummy_file
+
+ return "Emissions dummy file created successfully"
+
+#generate regridder
+def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_files_2use, intp_avail_hours):
+ print('Checking conditions for generating regridder.')
+ use_dummy_emiss = len(rave_avail_hours) == 0 and len(intp_avail_hours) == 0
+ regridder = None
+
+ if not use_dummy_emiss:
+ try:
+ print('Generating regridder.')
+ regridder = ESMF.RegridFromFile(srcfield, tgtfield, weightfile)
+ print('Regridder generated successfully.')
+ except ValueError as e:
+ print(f'Regridder failed due to a ValueError: {e}.')
+ except OSError as e:
+ print(f'Regridder failed due to an OSError: {e}. Check if the weight file exists and is accessible.')
+ except (FileNotFoundError, IOError, RuntimeError, TypeError, KeyError, IndexError, MemoryError) as e:
+ print(f'Regridder failed due to corrupted file: {e}. Check if RAVE file has a different grid or format. ')
+ except Exception as e:
+ print(f'An unexpected error occurred while generating regridder: {e}.')
+ else:
+ use_dummy_emiss = True
+
+ return(regridder, use_dummy_emiss)
+
+#process RAVE available for interpolation
+def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_emis, regridder,
+ srcgrid, tgtgrid, rave_to_intp, intp_dir, src_latt, tgt_latt, tgt_lont, cols, rows):
+ for index, current_hour in enumerate(rave_avail_hours):
+ file_name = rave_avail[index]
+ rave_file_path = os.path.join(RAVE, file_name[0])
+
+ print(f"Processing file: {rave_file_path} for hour: {current_hour}")
+
+ if not use_dummy_emiss and os.path.exists(rave_file_path):
+ try:
+ with xr.open_dataset(rave_file_path, decode_times=False) as ds_togrid:
+ try:
+ ds_togrid = ds_togrid[['FRP_MEAN', 'FRE']]
+ except KeyError as e:
+ print(f"Missing required variables in {rave_file_path}: {e}")
+ continue
+
+ output_file_path = os.path.join(intp_dir, f'{rave_to_intp}{current_hour}00_{current_hour}59.nc')
+ print('=============before regridding===========', 'FRP_MEAN')
+ print(np.sum(ds_togrid['FRP_MEAN'], axis=(1, 2)))
+
+ try:
+ with Dataset(output_file_path, 'w') as fout:
+ create_emiss_file(fout, cols, rows)
+ Store_latlon_by_Level(fout, 'geolat', tgt_latt, 'cell center latitude', 'degrees_north', '2D', '-9999.f', '1.f')
+ Store_latlon_by_Level(fout, 'geolon', tgt_lont, 'cell center longitude', 'degrees_east', '2D', '-9999.f', '1.f')
+
+ for svar in vars_emis:
+ try:
+ srcfield = ESMF.Field(srcgrid, name=svar, staggerloc=ESMF.StaggerLoc.CENTER)
+ tgtfield = ESMF.Field(tgtgrid, name=svar, staggerloc=ESMF.StaggerLoc.CENTER)
+ src_rate = ds_togrid[svar].fillna(0)
+ src_QA = xr.where(ds_togrid['FRE'] > 1000, src_rate, 0.0)
+ srcfield.data[...] = src_QA[0, :, :]
+ tgtfield = regridder(srcfield, tgtfield)
+
+ if svar == 'FRP_MEAN':
+ Store_by_Level(fout, 'frp_avg_hr', 'Mean Fire Radiative Power', 'MW', '3D', '0.f', '1.f')
+ tgt_rate = tgtfield.data
+ fout.variables['frp_avg_hr'][0, :, :] = tgt_rate
+ print('=============after regridding===========' + svar)
+ print(np.sum(tgt_rate))
+ elif svar == 'FRE':
+ Store_by_Level(fout, 'FRE', 'FRE', 'MJ', '3D', '0.f', '1.f')
+ tgt_rate = tgtfield.data
+ fout.variables['FRE'][0, :, :] = tgt_rate
+ except (ValueError, KeyError) as e:
+ print(f"Error processing variable {svar} in {rave_file_path}: {e}")
+ except (OSError, IOError, RuntimeError, FileNotFoundError, TypeError, IndexError, MemoryError) as e:
+ print(f"Error creating or writing to NetCDF file {output_file_path}: {e}")
+ except (OSError, IOError, RuntimeError, FileNotFoundError, TypeError, IndexError, MemoryError) as e:
+ print(f"Error reading NetCDF file {rave_file_path}: {e}")
+ else:
+ print(f"File not found or dummy emissions required: {rave_file_path}")
diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh
index ecfb94fb50..4c4fff2478 100644
--- a/ush/job_preamble.sh
+++ b/ush/job_preamble.sh
@@ -32,6 +32,8 @@ export DATAROOT="${DATAROOT:-${PTMP}/${envir}/tmp}"
export DCOMROOT="${DCOMROOT:-${PTMP}/${envir}/dcom}"
export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p $DATA_SHARE
+
export DBNROOT="${DBNROOT:-${DBNROOT_default}}"
export SENDECF="${SENDECF:-${SENDECF_default}}"
export SENDDBN="${SENDDBN:-${SENDDBN_default}}"
diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh
index 7c26511f4f..a733d0a6c2 100644
--- a/ush/launch_FV3LAM_wflow.sh
+++ b/ush/launch_FV3LAM_wflow.sh
@@ -222,7 +222,7 @@ Output of rocotostat_cmd is:
~~~~~~~~~~~~~~~~~~~~~~~~~~~
${rocotostat_output}
-" >> "${WFLOW_LAUNCH_LOG_FN}" 2>&1
+" > "${WFLOW_LAUNCH_LOG_FN}" 2>&1
#
#-----------------------------------------------------------------------
#
@@ -353,9 +353,9 @@ script for this experiment:
# Remove CRONTAB_LINE from cron table
#
if [ "${called_from_cron}" = "TRUE" ]; then
- python3 $USHdir/get_crontab_contents.py --remove -m=${machine} -l="${CRONTAB_LINE}" -c -d
+ $USHdir/get_crontab_contents.py --remove -m=${machine} -l='${CRONTAB_LINE}' -c -d
else
- python3 $USHdir/get_crontab_contents.py --remove -m=${machine} -l="${CRONTAB_LINE}" -d
+ $USHdir/get_crontab_contents.py --remove -m=${machine} -l='${CRONTAB_LINE}' -d
fi
fi
#
diff --git a/ush/link_fix.py b/ush/link_fix.py
index f0d103d8ea..b106499424 100755
--- a/ush/link_fix.py
+++ b/ush/link_fix.py
@@ -207,7 +207,7 @@ def link_fix(
f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh0}.nc",
f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh4}.nc",
]
- if ccpp_phys_suite == "FV3_RAP" or ccpp_phys_suite == "FV3_HRRR" or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" or ccpp_phys_suite == "FV3_GFS_v17_p8":
+ if ccpp_phys_suite == "FV3_RAP" or ccpp_phys_suite == "FV3_HRRR" or ccpp_phys_suite == "FV3_HRRR_gf" or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" or ccpp_phys_suite == "FV3_GFS_v17_p8":
fns += [
f"C*{dot_or_uscore}oro_data_ss.tile{tile_rgnl}.halo{nh0}.nc",
f"C*{dot_or_uscore}oro_data_ls.tile{tile_rgnl}.halo{nh0}.nc",
diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml
index 8bc768732f..f82e57cfd3 100644
--- a/ush/machine/derecho.yaml
+++ b/ush/machine/derecho.yaml
@@ -33,6 +33,9 @@ platform:
FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth
FIXaqm: /glade/work/chanhooj/SRW-AQM_DATA/fix_aqm
FIXemis: /glade/work/chanhooj/SRW-AQM_DATA/fix_emis
+ FIXsmoke: /glade/work/chanhooj/SRW-AQM_DATA/fix_smoke
+ FIXupp: /glade/work/chanhooj/SRW-AQM_DATA/fix_upp
+ FIXcrtm: /glade/work/chanhooj/SRW-AQM_DATA/fix_crtm
EXTRN_MDL_DATA_STORES: aws
data:
ics_lbcs:
@@ -44,7 +47,13 @@ data:
HRRR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh}
RAP: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh}
GSMGFS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh}
+
cpl_aqm_parm:
COMINfire_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/RAVE_fire
COMINgefs_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GEFS_DATA
NEXUS_GFS_SFC_DIR: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
+
+smoke_dust_parm:
+ COMINsmoke_default: /glade/work/chanhooj/SRW-AQM_DATA/data_smoke_dust/RAVE_smoke_dust
+ COMINfire_default: /glade/work/chanhooj/SRW-AQM_DATA/data_smoke_dust/RAVE_fire
+
diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml
index 80fbb8fc98..ca031f02f8 100644
--- a/ush/machine/hera.yaml
+++ b/ush/machine/hera.yaml
@@ -37,6 +37,9 @@ platform:
FIXshp: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/NaturalEarth
FIXaqm: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_aqm
FIXemis: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus
+ FIXsmoke: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_smoke
+ FIXupp: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_upp
+ FIXcrtm: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_crtm
EXTRN_MDL_DATA_STORES: hpss aws nomads
cpl_aqm_parm:
@@ -44,6 +47,10 @@ cpl_aqm_parm:
COMINgefs_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
NEXUS_GFS_SFC_DIR: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
+smoke_dust_parm:
+ COMINsmoke_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/data_smoke_dust/RAVE_smoke_dust
+ COMINfire_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/data_smoke_dust/RAVE_fire
+
rocoto:
tasks:
metatask_run_ensemble:
diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml
index e29801dd49..c995229db0 100644
--- a/ush/machine/hercules.yaml
+++ b/ush/machine/hercules.yaml
@@ -35,6 +35,9 @@ platform:
FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth
FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm
FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis
+ FIXsmoke: /work/noaa/epic/SRW-AQM_DATA/fix_smoke
+ FIXupp: /work/noaa/epic/SRW-AQM_DATA/fix_upp
+ FIXcrtm: /work/noaa/epic/SRW-AQM_DATA/fix_crtm
EXTRN_MDL_DATA_STORES: aws
data:
ics_lbcs:
@@ -51,3 +54,8 @@ cpl_aqm_parm:
COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
+
+smoke_dust_parm:
+ COMINsmoke_default: /work/noaa/epic/SRW-AQM_DATA/data_smoke_dust/RAVE_smoke_dust
+ COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/data_smoke_dust/RAVE_fire
+
diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml
index 3f756e2836..619a879b7e 100644
--- a/ush/machine/orion.yaml
+++ b/ush/machine/orion.yaml
@@ -34,6 +34,9 @@ platform:
FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth
FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm
FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis
+ FIXsmoke: /work/noaa/epic/SRW-AQM_DATA/fix_smoke
+ FIXupp: /work/noaa/epic/SRW-AQM_DATA/fix_upp
+ FIXcrtm: /work/noaa/epic/SRW-AQM_DATA/fix_crtm
EXTRN_MDL_DATA_STORES: aws nomads
data:
ics_lbcs:
@@ -50,3 +53,8 @@ cpl_aqm_parm:
COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
+
+smoke_dust_parm:
+ COMINsmoke_default: /work/noaa/epic/SRW-AQM_DATA/data_smoke_dust/RAVE_smoke_dust
+ COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/data_smoke_dust/RAVE_fire
+
diff --git a/ush/mrms_pull_topofhour.py b/ush/mrms_pull_topofhour.py
old mode 100644
new mode 100755
diff --git a/ush/set_cycle_dates.py b/ush/set_cycle_dates.py
old mode 100644
new mode 100755
diff --git a/ush/set_fv3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py
old mode 100644
new mode 100755
diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py
old mode 100644
new mode 100755
diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py
old mode 100644
new mode 100755
diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py
old mode 100644
new mode 100755
diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py
old mode 100644
new mode 100755
diff --git a/ush/setup.py b/ush/setup.py
old mode 100644
new mode 100755
index 335ce229e1..2c1e772690
--- a/ush/setup.py
+++ b/ush/setup.py
@@ -1462,7 +1462,6 @@ def dict_find(user_dict, substring):
logging.debug(f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}')
logging.debug(f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}')
-
#
# -----------------------------------------------------------------------
#
@@ -1509,7 +1508,10 @@ def dict_find(user_dict, substring):
var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates])
var_defns_cfg.dump(global_var_defns_fp)
-
+ if expt_config["workflow"].get("COLDSTART"):
+ coldstart_date=workflow_config["DATE_FIRST_CYCL"]
+ fn_pass=f"task_skip_coldstart_{coldstart_date}.txt"
+ open(os.path.join(exptdir,fn_pass), 'a').close()
#
# -----------------------------------------------------------------------
#
diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py
old mode 100644
new mode 100755
diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml
index fd21b3e1cf..b28429a897 100644
--- a/ush/valid_param_vals.yaml
+++ b/ush/valid_param_vals.yaml
@@ -34,6 +34,7 @@ valid_vals_CCPP_PHYS_SUITE: [
"FV3_RRFS_v1beta",
"FV3_WoFS_v0",
"FV3_HRRR",
+"FV3_HRRR_gf",
"FV3_RAP"
]
valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072]