From 1d53953a308361561147899fc18e1aaaf1ccab03 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Mon, 12 Aug 2024 13:50:39 -0400 Subject: [PATCH 1/3] Add capability to run forecast in segments (#2795) Adds the ability to run a forecast in segments instead of all at once. To accomplish this, a new local `checkpnts` variable is introduced to `config.base` to contain a comma-separated list of intermediate stopping points for the forecast. This is combined with `FHMIN_GFS` and `FHMAX_GFS` to create a comma-separated string `FCST_SEGMENTS` with all the start/end points that is used by `config.fcst` and rocoto workflow. Capability to parse these into python lists was added to wxflow in an accompanying PR. If `checkpnts` is an empty string, this will result in a single-segment forecast. To accommodate the new segment metatasks that must be run serially, the capability of `create_task()` was expanded to allow a dictionary key of `is_serial`, which controls whether a metatask is parallel or serial using pre-existing capability in rocoto. The default when not given is parallel (i.e. most metatasks). Resolves #2274 Refs NOAA-EMC/wxflow#39 Refs NOAA-EMC/wxflow#40 --- ci/cases/yamls/gfs_extended_ci.yaml | 1 + jobs/JGLOBAL_FORECAST | 13 +++- parm/archive/enkf.yaml.j2 | 4 +- parm/archive/enkf_restarta_grp.yaml.j2 | 4 +- parm/archive/gdas.yaml.j2 | 4 +- parm/archive/gdas_restarta.yaml.j2 | 2 +- parm/archive/gfs_netcdfa.yaml.j2 | 2 +- parm/archive/master_enkf.yaml.j2 | 22 ------ parm/archive/master_gdas.yaml.j2 | 10 --- parm/archive/master_gfs.yaml.j2 | 12 --- parm/config/gefs/config.base | 7 +- parm/config/gefs/config.fcst | 13 +++- parm/config/gefs/yaml/defaults.yaml | 1 + parm/config/gfs/config.aeroanl | 2 +- parm/config/gfs/config.base | 13 ++-- parm/config/gfs/config.fcst | 11 ++- parm/config/gfs/yaml/defaults.yaml | 1 + sorc/wxflow | 2 +- ush/calcanl_gfs.py | 3 +- ush/forecast_predet.sh | 2 +- ush/python/pygfs/task/aero_analysis.py | 2 +- ush/python/pygfs/task/aero_prepobs.py | 2 +- workflow/applications/applications.py | 27 +++++++ workflow/rocoto/gefs_tasks.py | 103 +++++++++++++++++-------- workflow/rocoto/gfs_tasks.py | 63 +++++++++++---- workflow/rocoto/rocoto.py | 3 +- 26 files changed, 209 insertions(+), 120 deletions(-) diff --git a/ci/cases/yamls/gfs_extended_ci.yaml b/ci/cases/yamls/gfs_extended_ci.yaml index 42ee612f3a..8caa942eed 100644 --- a/ci/cases/yamls/gfs_extended_ci.yaml +++ b/ci/cases/yamls/gfs_extended_ci.yaml @@ -9,5 +9,6 @@ base: DO_AWIPS: "NO" DO_NPOESS: "YES" DO_GENESIS_FSU: "NO" + FCST_BREAKPOINTS: 192 FHMAX_GFS: 384 FHMAX_HF_GFS: 120 diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 9998470618..e64a91d21c 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -116,6 +116,17 @@ fi # Remove the Temporary working directory ########################################## cd "${DATAROOT}" || true -[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}" "${DATArestart}" # do not remove DATAjob. It contains DATAoutput +# do not remove DATAjob. It contains DATAoutput +if [[ "${KEEPDATA}" == "NO" ]]; then + rm -rf "${DATA}" + + # Determine if this is the last segment + commas="${FCST_SEGMENTS//[^,]}" + n_segs=${#commas} + if (( n_segs - 1 == ${FCST_SEGMENT:-0} )); then + # Only delete temporary restarts if it is the last segment + rm -rf "${DATArestart}" + fi +fi exit 0 diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2 index bc5ef03cb8..92ed0095af 100644 --- a/parm/archive/enkf.yaml.j2 +++ b/parm/archive/enkf.yaml.j2 @@ -11,7 +11,7 @@ enkf: {% endfor %} - "logs/{{ cycle_YMDH }}/{{ RUN }}echgres.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}esfc.log" - {% for grp in range(iaufhrs | length) %} + {% for grp in range(IAUFHRS | length) %} - "logs/{{ cycle_YMDH }}/{{ RUN }}ecen{{ '%03d' % grp }}.log" {% endfor %} @@ -68,7 +68,7 @@ enkf: {% if DOIAU %} # IAU increments/analyses - {% for fhr in iaufhrs if fhr != 6 %} + {% for fhr in IAUFHRS if fhr != 6 %} {% if do_calc_increment %} # Store analyses instead of increments - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensmean.nc" diff --git a/parm/archive/enkf_restarta_grp.yaml.j2 b/parm/archive/enkf_restarta_grp.yaml.j2 index 41e03edc92..13c49d4239 100644 --- a/parm/archive/enkf_restarta_grp.yaml.j2 +++ b/parm/archive/enkf_restarta_grp.yaml.j2 @@ -36,14 +36,14 @@ enkf_restarta_grp: {% endif %} # Member increments - {% for iaufhr in iaufhrs if iaufhr != 6 %} + {% for iaufhr in IAUFHRS if iaufhr != 6 %} {% set iaufhr = iaufhr %} {% if do_calc_increment %} - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % iaufhr }}.nc" {% else %} - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratmi{{ '%03d' % iaufhr }}.nc" {% endif %} - {% endfor %} # iaufhr in iaufhrs + {% endfor %} # iaufhr in IAUFHRS # Conventional data {% if not lobsdiag_forenkf and not DO_JEDIATMENS %} diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index ce5054a82f..db92141ede 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -49,7 +49,7 @@ gdas: - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc" {% if DOIAU %} # Ensemble IAU analysis residuals - {% for fhr in iaufhrs if fhr != 6 %} + {% for fhr in IAUFHRS if fhr != 6 %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensres.nc" {% endfor %} {% endif %} @@ -108,7 +108,7 @@ gdas: {% endif %} # End of cycled data # Forecast and post logs - - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst_seg0.log" {% for fhr in range(0, FHMAX + 1, 3) %} {% set fhr3 = '%03d' % fhr %} diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 index 4c0522fed7..9d86292065 100644 --- a/parm/archive/gdas_restarta.yaml.j2 +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -6,7 +6,7 @@ gdas_restarta: # Deterministic analysis increments - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" # IAU increments - {% for iaufhr in iaufhrs if iaufhr != 6 %} + {% for iaufhr in IAUFHRS if iaufhr != 6 %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iaufhr }}.nc" {% endfor %} diff --git a/parm/archive/gfs_netcdfa.yaml.j2 b/parm/archive/gfs_netcdfa.yaml.j2 index 8c0d4a813f..5a51f86148 100644 --- a/parm/archive/gfs_netcdfa.yaml.j2 +++ b/parm/archive/gfs_netcdfa.yaml.j2 @@ -6,7 +6,7 @@ gfs_netcdfa: - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" - {% for iauhr in iaufhrs if iauhr != 6 %} + {% for iauhr in IAUFHRS if iauhr != 6 %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iauhr }}.nc" {% endfor %} optional: diff --git a/parm/archive/master_enkf.yaml.j2 b/parm/archive/master_enkf.yaml.j2 index 3ebd52dbad..bb8b36c3e0 100644 --- a/parm/archive/master_enkf.yaml.j2 +++ b/parm/archive/master_enkf.yaml.j2 @@ -4,28 +4,6 @@ {% set cycle_YMDH = current_cycle | to_YMDH %} {% set head = RUN + ".t" + cycle_HH + "z." %} -# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer) -{% if IAUFHRS is string %} - # "3,6,9" - {% set iaufhrs = [] %} - {% for iaufhr in IAUFHRS.split(",") %} - {% do iaufhrs.append(iaufhr | int) %} - {% endfor %} -{% else %} - # 6 (integer) - {% set iaufhrs = [IAUFHRS] %} -{% endif %} - -# Repeat for IAUFHRS_ENKF -{% if IAUFHRS_ENKF is string %} - {% set iaufhrs_enkf = [] %} - {% for iaufhr in IAUFHRS_ENKF.split(",") %} - {% do iaufhrs_enkf.append(iaufhr | int) %} - {% endfor %} -{% else %} - {% set iaufhrs_enkf = [IAUFHRS_ENKF] %} -{% endif %} - # Determine which data to archive datasets: {% if ENSGRP == 0 %} diff --git a/parm/archive/master_gdas.yaml.j2 b/parm/archive/master_gdas.yaml.j2 index 30a2175653..11e83d387b 100644 --- a/parm/archive/master_gdas.yaml.j2 +++ b/parm/archive/master_gdas.yaml.j2 @@ -3,16 +3,6 @@ {% set cycle_YMDH = current_cycle | to_YMDH %} {% set head = "gdas.t" + cycle_HH + "z." %} -# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer) -{% if IAUFHRS is string %} - {% set iaufhrs = [] %} - {% for iaufhr in IAUFHRS.split(",") %} - {% do iaufhrs.append(iaufhr | int) %} - {% endfor %} -{% else %} - {% set iaufhrs = [IAUFHRS] %} -{% endif %} - datasets: # Always archive atmosphere forecast/analysis data {% filter indent(width=4) %} diff --git a/parm/archive/master_gfs.yaml.j2 b/parm/archive/master_gfs.yaml.j2 index b789598fac..ab9a00c95e 100644 --- a/parm/archive/master_gfs.yaml.j2 +++ b/parm/archive/master_gfs.yaml.j2 @@ -3,18 +3,6 @@ {% set cycle_YMD = current_cycle | to_YMD %} {% set cycle_YMDH = current_cycle | to_YMDH %} -# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer) -{% if IAUFHRS is string %} - # "3,6,9" - {% set iaufhrs = [] %} - {% for iaufhr in IAUFHRS.split(",") %} - {% do iaufhrs.append(iaufhr | int) %} - {% endfor %} -{% else %} - # 6 (integer) - {% set iaufhrs = [IAUFHRS] %} -{% endif %} - # Determine which data to archive datasets: # Always archive atmosphere forecast/analysis data diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index 735743b568..fad9e3421a 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -229,8 +229,11 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: # GFS output and frequency export FHMIN_GFS=0 -export FHMIN=${FHMIN_GFS} -export FHMAX_GFS=@FHMAX_GFS@ +export FHMAX_GFS="@FHMAX_GFS@" +# Intermediate times to stop forecast when running in segments +breakpnts="@FCST_BREAKPOINTS@" +export FCST_SEGMENTS="${FHMIN_GFS},${breakpnts:+${breakpnts},}${FHMAX_GFS}" + export FHOUT_GFS=6 export FHMAX_HF_GFS=@FHMAX_HF_GFS@ export FHOUT_HF_GFS=1 diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index e66fc15f87..e6dc335b79 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -30,14 +30,19 @@ string="--fv3 ${CASE}" # shellcheck disable=SC2086 source "${EXPDIR}/config.ufs" ${string} -# shellcheck disable=SC2153 -export FHMAX=${FHMAX_GFS} +# Convert comma-separated string into bash array +IFS=', ' read -ra segments <<< "${FCST_SEGMENTS}" +# Determine MIN and MAX based on the forecast segment +export FHMIN=${segments[${FCST_SEGMENT}]} +export FHMAX=${segments[${FCST_SEGMENT}+1]} +# Cap other FHMAX variables at FHMAX for the segment +export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS )) +export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV )) # shellcheck disable=SC2153 export FHOUT=${FHOUT_GFS} -export FHMAX_HF=${FHMAX_HF_GFS} export FHOUT_HF=${FHOUT_HF_GFS} export FHOUT_OCN=${FHOUT_OCN_GFS} -export FHOUT_ICE=${FHOUT_ICE_GFS} +export FHOUT_ICE=${FHOUT_ICE_GFS} # Get task specific resources source "${EXPDIR}/config.resources" fcst diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index d2b486e7ca..e4666d1aba 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -11,5 +11,6 @@ base: DO_EXTRACTVARS: "NO" FHMAX_GFS: 120 FHMAX_HF_GFS: 0 + FCST_BREAKPOINTS: "48" REPLAY_ICS: "NO" USE_OCN_PERTURB_FILES: "false" diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index 24a5e92644..a1b7e1d44b 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -24,7 +24,7 @@ if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" else - export aero_bkg_times="6" + export aero_bkg_times="6," # Trailing comma is necessary so this is treated as a list export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" fi diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 56005199aa..2a7ffab0dd 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -285,7 +285,10 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: # GFS output and frequency export FHMIN_GFS=0 -export FHMAX_GFS=@FHMAX_GFS@ +export FHMAX_GFS="@FHMAX_GFS@" +# Intermediate times to stop forecast when running in segments +breakpnts="@FCST_BREAKPOINTS@" +export FCST_SEGMENTS="${FHMIN_GFS},${breakpnts:+${breakpnts},}${FHMAX_GFS}" export FHOUT_GFS=3 # 3 for ops export FHMAX_HF_GFS=@FHMAX_HF_GFS@ export FHOUT_HF_GFS=1 @@ -384,10 +387,10 @@ fi # if 3DVAR and IAU if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then - export IAUFHRS="6" + export IAUFHRS="6," export IAU_FHROT="3" export IAU_FILTER_INCREMENTS=".true." - export IAUFHRS_ENKF="6" + export IAUFHRS_ENKF="6," fi # Generate post-processing ensemble spread files @@ -397,10 +400,10 @@ export ENKF_SPREAD="YES" if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then export IAU_OFFSET=0 export IAU_FHROT=0 - export IAUFHRS="6" + export IAUFHRS="6," fi -if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6,"; fi # Determine restart intervals # For IAU, write restarts at beginning of window also diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 4982b8f6e6..2743ea0745 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -33,11 +33,16 @@ source "${EXPDIR}/config.ufs" ${string} # Forecast length for GFS forecast case ${RUN} in *gfs) - # shellcheck disable=SC2153 - export FHMAX=${FHMAX_GFS} + # Convert comma-separated string into bash array + IFS=', ' read -ra segments <<< "${FCST_SEGMENTS}" + # Determine MIN and MAX based on the forecast segment + export FHMIN=${segments[${FCST_SEGMENT}]} + export FHMAX=${segments[${FCST_SEGMENT}+1]} + # Cap other FHMAX variables at FHMAX for the segment + export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS )) + export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV )) # shellcheck disable=SC2153 export FHOUT=${FHOUT_GFS} - export FHMAX_HF=${FHMAX_HF_GFS} export FHOUT_HF=${FHOUT_HF_GFS} export FHOUT_OCN=${FHOUT_OCN_GFS} export FHOUT_ICE=${FHOUT_ICE_GFS} diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index da4d587dff..24729ac43e 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -16,6 +16,7 @@ base: DO_METP: "YES" FHMAX_GFS: 120 FHMAX_HF_GFS: 0 + FCST_BREAKPOINTS: "" DO_VRFY_OCEANDA: "NO" GSI_SOILANAL: "NO" EUPD_CYC: "gdas" diff --git a/sorc/wxflow b/sorc/wxflow index d314e06510..e1ef697430 160000 --- a/sorc/wxflow +++ b/sorc/wxflow @@ -1 +1 @@ -Subproject commit d314e065101041a4d45e5a11ec19cd2dc5f38c67 +Subproject commit e1ef697430c09d2b1a0560f21f11c7a32ed5f3e2 diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py index 5d97d25dfd..9dc6ff9fa6 100755 --- a/ush/calcanl_gfs.py +++ b/ush/calcanl_gfs.py @@ -11,6 +11,7 @@ import gsi_utils from collections import OrderedDict import datetime +from wxflow import cast_as_dtype python2fortran_bool = {True: '.true.', False: '.false.'} @@ -358,7 +359,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ExecAnl = os.getenv('CALCANLEXEC', './calc_analysis.x') ExecChgresInc = os.getenv('CHGRESINCEXEC', './interp_inc.x') NEMSGet = os.getenv('NEMSIOGET', 'nemsio_get') - IAUHrs = list(map(int, os.getenv('IAUFHRS', '6').split(','))) + IAUHrs = cast_as_dtype(os.getenv('IAUFHRS', '6,')) Run = os.getenv('RUN', 'gdas') JEDI = gsi_utils.isTrue(os.getenv('DO_JEDIATMVAR', 'YES')) diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index ebf7cfd282..6b72f574d8 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -77,6 +77,7 @@ common_predet(){ CDATE=${CDATE:-"${PDY}${cyc}"} ENSMEM=${ENSMEM:-000} + MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER # Define significant cycles half_window=$(( assim_freq / 2 )) @@ -154,7 +155,6 @@ FV3_predet(){ FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")" # Other options - MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment # IAU options diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 69a992d7d4..ccc5fb601a 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -46,7 +46,7 @@ def __init__(self, config): 'npz_anl': self.task_config['LEVS'] - 1, 'AERO_WINDOW_BEGIN': _window_begin, 'AERO_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", - 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')), + 'aero_bkg_fhr': self.task_config['aero_bkg_times'], 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", diff --git a/ush/python/pygfs/task/aero_prepobs.py b/ush/python/pygfs/task/aero_prepobs.py index d8396fe3ca..be58fa43a5 100644 --- a/ush/python/pygfs/task/aero_prepobs.py +++ b/ush/python/pygfs/task/aero_prepobs.py @@ -31,7 +31,7 @@ def __init__(self, config: Dict[str, Any]) -> None: { 'window_begin': _window_begin, 'window_end': _window_end, - 'sensors': str(self.task_config['SENSORS']).split(','), + 'sensors': self.task_config['SENSORS'], 'data_dir': self.task_config['VIIRS_DATA_DIR'], 'input_files': '', 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 97a77c2c21..8c1f69735e 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -75,6 +75,10 @@ def __init__(self, conf: Configuration) -> None: self.do_hpssarch = _base.get('HPSSARCH', False) self.nens = _base.get('NMEM_ENS', 0) + self.fcst_segments = _base.get('FCST_SEGMENTS', None) + + if not AppConfig.is_monotonic(self.fcst_segments): + raise ValueError(f'Forecast segments do not increase monotonically: {",".join(self.fcst_segments)}') self.wave_runs = None if self.do_wave: @@ -208,3 +212,26 @@ def get_gfs_interval(gfs_cyc: int) -> timedelta: return to_timedelta(gfs_internal_map[str(gfs_cyc)]) except KeyError: raise KeyError(f'Invalid gfs_cyc = {gfs_cyc}') + + @staticmethod + def is_monotonic(test_list: List, check_decreasing: bool = False) -> bool: + """ + Determine if an array is monotonically increasing or decreasing + + TODO: Move this into wxflow somewhere + + Inputs + test_list: List + A list of comparable values to check + check_decreasing: bool [default: False] + Check whether list is monotonically decreasing + + Returns + bool: Whether the list is monotonically increasing (if check_decreasing + if False) or decreasing (if check_decreasing is True) + + """ + if check_decreasing: + return all(x > y for x, y in zip(test_list, test_list[1:])) + else: + return all(x < y for x, y in zip(test_list, test_list[1:])) diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index e78ac96d83..f0f73d1173 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -138,19 +138,34 @@ def fcst(self): dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + num_fcst_segments = len(self.app_config.fcst_segments) - 1 + + fcst_vars = self.envars.copy() + fcst_envars_dict = {'FCST_SEGMENT': '#seg#'} + for key, value in fcst_envars_dict.items(): + fcst_vars.append(rocoto.create_envar(name=key, value=str(value))) + resources = self.get_resource('fcst') - task_name = f'fcst_mem000' + task_name = f'fcst_mem000_seg#seg#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, - 'envars': self.envars, + 'envars': fcst_vars, 'cycledef': 'gefs', 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' } - task = rocoto.create_task(task_dict) + + seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])} + metatask_dict = {'task_name': f'fcst_mem000', + 'is_serial': True, + 'var_dict': seg_var_dict, + 'task_dict': task_dict + } + + task = rocoto.create_task(metatask_dict) return task @@ -169,36 +184,60 @@ def efcs(self): dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) - efcsenvars = self.envars.copy() - efcsenvars_dict = {'ENSMEM': '#member#', - 'MEMDIR': 'mem#member#' - } - for key, value in efcsenvars_dict.items(): - efcsenvars.append(rocoto.create_envar(name=key, value=str(value))) - + num_fcst_segments = len(self.app_config.fcst_segments) - 1 resources = self.get_resource('efcs') - task_name = f'fcst_mem#member#' - task_dict = {'task_name': task_name, - 'resources': resources, - 'dependency': dependencies, - 'envars': efcsenvars, - 'cycledef': 'gefs', - 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh', - 'job_name': f'{self.pslot}_{task_name}_@H', - 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', - 'maxtries': '&MAXTRIES;' - } - - member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])} - metatask_dict = {'task_name': 'fcst_ens', - 'var_dict': member_var_dict, - 'task_dict': task_dict + # Kludge to work around bug in rocoto with serial metatasks nested + # in a parallel one (see christopherwharrop/rocoto#109). For now, + # loop over member to create a separate metatask for each instead + # of a metatask of a metatask. + # + tasks = [] + for member in [f"{mem:03d}" for mem in range(1, self.nmem + 1)]: + + efcsenvars = self.envars.copy() + efcsenvars_dict = {'ENSMEM': f'{member}', + 'MEMDIR': f'mem{member}', + 'FCST_SEGMENT': '#seg#' + } + for key, value in efcsenvars_dict.items(): + efcsenvars.append(rocoto.create_envar(name=key, value=str(value))) + + task_name = f'fcst_mem{member}_seg#seg#' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': efcsenvars, + 'cycledef': 'gefs', + 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' } - task = rocoto.create_task(metatask_dict) + seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])} + seg_metatask_dict = {'task_name': f'fcst_mem{member}', + 'is_serial': True, + 'var_dict': seg_var_dict, + 'task_dict': task_dict + } - return task + tasks.append(rocoto.create_task(seg_metatask_dict)) + + return '\n'.join(tasks) + + # Keeping this in hopes the kludge is no longer necessary at some point + # + # member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])} + # mem_metatask_dict = {'task_name': 'fcst_ens', + # 'is_serial': False, + # 'var_dict': member_var_dict, + # 'task_dict': seg_metatask_dict + # } + + # task = rocoto.create_task(mem_metatask_dict) + + # return task def atmos_prod(self): return self._atmosoceaniceprod('atmos') @@ -236,7 +275,7 @@ def _atmosoceaniceprod(self, component: str): if component in ['ocean']: dep_dict = {'type': 'data', 'data': data, 'age': 120} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': 'fcst_mem#member#'} + dep_dict = {'type': 'metatask', 'name': 'fcst_mem#member#'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps, dep_condition='or') elif component in ['ice']: @@ -384,7 +423,7 @@ def wavepostsbs(self): def wavepostbndpnt(self): deps = [] - dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'} + dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -429,7 +468,7 @@ def wavepostbndpntbll(self): dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'} + dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) @@ -465,7 +504,7 @@ def wavepostbndpntbll(self): def wavepostpnt(self): deps = [] - dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'} + dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_wave_bnd: dep_dict = {'type': 'task', 'name': f'wave_post_bndpnt_bull_mem#member#'} diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 960a7548ab..9d9b28fb17 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -688,7 +688,7 @@ def ocnanalprep(self): deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}marinebmat'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dep_dict = {'type': 'metatask', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -880,12 +880,22 @@ def _fcst_forecast_only(self): dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + if self.run in ['gfs']: + num_fcst_segments = len(self.app_config.fcst_segments) - 1 + else: + num_fcst_segments = 1 + + fcst_vars = self.envars.copy() + fcst_envars_dict = {'FCST_SEGMENT': '#seg#'} + for key, value in fcst_envars_dict.items(): + fcst_vars.append(rocoto.create_envar(name=key, value=str(value))) + resources = self.get_resource('fcst') - task_name = f'{self.run}fcst' + task_name = f'{self.run}fcst_seg#seg#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, - 'envars': self.envars, + 'envars': fcst_vars, 'cycledef': self.run.replace('enkf', ''), 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', @@ -893,7 +903,14 @@ def _fcst_forecast_only(self): 'maxtries': '&MAXTRIES;' } - task = rocoto.create_task(task_dict) + seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])} + metatask_dict = {'task_name': f'{self.run}fcst', + 'is_serial': True, + 'var_dict': seg_var_dict, + 'task_dict': task_dict + } + + task = rocoto.create_task(metatask_dict) return task @@ -929,12 +946,22 @@ def _fcst_cycled(self): cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run + if self.run in ['gfs']: + num_fcst_segments = len(self.app_config.fcst_segments) - 1 + else: + num_fcst_segments = 1 + + fcst_vars = self.envars.copy() + fcst_envars_dict = {'FCST_SEGMENT': '#seg#'} + for key, value in fcst_envars_dict.items(): + fcst_vars.append(rocoto.create_envar(name=key, value=str(value))) + resources = self.get_resource('fcst') - task_name = f'{self.run}fcst' + task_name = f'{self.run}fcst_seg#seg#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, - 'envars': self.envars, + 'envars': fcst_vars, 'cycledef': cycledef, 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', @@ -942,7 +969,14 @@ def _fcst_cycled(self): 'maxtries': '&MAXTRIES;' } - task = rocoto.create_task(task_dict) + seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])} + metatask_dict = {'task_name': f'{self.run}fcst', + 'is_serial': True, + 'var_dict': seg_var_dict, + 'task_dict': task_dict + } + + task = rocoto.create_task(metatask_dict) return task @@ -1104,7 +1138,7 @@ def _atmosoceaniceprod(self, component: str): data = f'{history_path}/{history_file_tmpl}' dep_dict = {'type': 'data', 'data': data, 'age': 120} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.run}fcst'} + dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps, dep_condition='or') @@ -1169,7 +1203,7 @@ def wavepostsbs(self): def wavepostbndpnt(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}fcst'} + dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -1221,7 +1255,7 @@ def wavepostbndpntbll(self): def wavepostpnt(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}fcst'} + dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_wave_bnd: dep_dict = {'type': 'task', 'name': f'{self.run}wavepostbndpntbll'} @@ -1318,7 +1352,7 @@ def waveawipsgridded(self): def postsnd(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}fcst'} + dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -1824,8 +1858,9 @@ def metp(self): } metatask_dict = {'task_name': f'{self.run}metp', + 'is_serial': True, 'task_dict': task_dict, - 'var_dict': var_dict + 'var_dict': var_dict, } task = rocoto.create_task(metatask_dict) @@ -2524,7 +2559,7 @@ def ecen(self): def _get_ecengroups(): if self._base.get('DOIAU_ENKF', False): - fhrs = list(self._base.get('IAUFHRS', '6').split(',')) + fhrs = self._base.get('IAUFHRS', '[6]') necengrp = self._configs['ecen']['NECENGRP'] ngrps = necengrp if len(fhrs) > necengrp else len(fhrs) @@ -2666,7 +2701,7 @@ def echgres(self): self._is_this_a_gdas_task(self.run, 'echgres') deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}fcst'} + dep_dict = {'type': 'metatask', 'name': f'{self.run.replace("enkf","")}fcst'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}fcst_mem001'} deps.append(rocoto.add_dependency(dep_dict)) diff --git a/workflow/rocoto/rocoto.py b/workflow/rocoto/rocoto.py index 0abb56cafb..2a20820da8 100644 --- a/workflow/rocoto/rocoto.py +++ b/workflow/rocoto/rocoto.py @@ -56,9 +56,10 @@ def create_task(task_dict: Dict[str, Any]) -> List[str]: else: # There is a nested task_dict, so this is a metatask metataskname = f"{task_dict.get('task_name', 'demometatask')}" + metataskmode = 'serial' if task_dict.get('is_serial', False) else 'parallel' var_dict = task_dict.get('var_dict', None) - strings = [f'\n', + strings = [f'\n', '\n'] if var_dict is None: From 56991675246cfae1d91795d1db653fc170c20086 Mon Sep 17 00:00:00 2001 From: Eric Sinsky - NOAA <48259628+EricSinsky-NOAA@users.noreply.github.com> Date: Mon, 12 Aug 2024 23:12:21 -0400 Subject: [PATCH 2/3] Add fixes to products for when REPLAY IC's are used (#2755) This PR fixes a couple issues that arise when replay initial conditions are used. These issues only occur when `REPLAY_ICS` is set to `YES` and `OFFSET_START_HOUR` is greater than `0`. The following items are addressed in this PR. 1. Fix issue that causes ocean_prod tasks not to be triggered (issue [#2725](https://github.com/NOAA-EMC/global-workflow/issues/2725)). A new diag_table was added (called `diag_table_replay`) that is used only when `REPLAY_ICS` is set to `YES`. This diag_table accounts for the offset that occurs when using replay IC's. 2. Fix issue that causes atmos_prod tasks not to be triggered for the first lead time (e.g. f003) (issue [#2754](https://github.com/NOAA-EMC/global-workflow/issues/2754)). When `OFFSET_START_HOUR` is greater than `0`, the first `fhr` is `${OFFSET_START_HOUR}+(${DELTIM}/3600)`, which is defined in `forecast_predet.sh` and will allow data for the first lead time to be generated. The filename with this lead time will still be labelled with `OFFSET_START_HOUR`. 3. Minor modifications were made to the extractvars task so that atmos data from replay cases can be processed. This PR was split from PR #2680. Refs #2725, #2754 --------- Co-authored-by: Walter Kolczynski - NOAA --- parm/config/gefs/config.extractvars | 10 +- parm/config/gefs/config.fcst | 6 +- parm/config/gefs/config.resources | 2 +- parm/post/oceanice_products_gefs.yaml | 14 +- parm/ufs/fv3/diag_table_replay | 337 ++++++++++++++++++++++++++ ush/atmos_extractvars.sh | 25 +- ush/forecast_postdet.sh | 36 ++- ush/forecast_predet.sh | 30 +++ ush/ocnice_extractvars.sh | 4 +- ush/parsing_model_configure_FV3.sh | 2 +- ush/parsing_namelists_FV3.sh | 12 + 11 files changed, 449 insertions(+), 29 deletions(-) create mode 100644 parm/ufs/fv3/diag_table_replay diff --git a/parm/config/gefs/config.extractvars b/parm/config/gefs/config.extractvars index 706fe18450..cc93fcf5e0 100644 --- a/parm/config/gefs/config.extractvars +++ b/parm/config/gefs/config.extractvars @@ -9,12 +9,12 @@ echo "BEGIN: config.extractvars" export COMPRSCMD=${COMPRSCMD:-bzip2} -export compress_ocn=0 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product -export compress_ice=0 #1: Compress extracted ice product, 0: Do not compress extracted ice product +export compress_ocn=1 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product +export compress_ice=1 #1: Compress extracted ice product, 0: Do not compress extracted ice product -export ocnres="5p00" # Resolution of ocean products -export iceres="5p00" # Resolution of ice products -export wavres="5p00" # Resolution of wave products +export ocnres="1p00" # Resolution of ocean products +export iceres="native" # Resolution of ice products +export wavres="0p25" # Resolution of wave products export depthvar_name="z_l" # Name of depth variable in NetCDF ocean products export zmin="0." # Minimum depth to extract from NetCDF ocean products diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index e6dc335b79..407e48496e 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -247,7 +247,11 @@ export FSICS="0" #--------------------------------------------------------------------- # Write more variables to output -export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" +if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_replay" +else + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" +fi # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gfs:-12} diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 8c3ba88940..297bc08c05 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -272,7 +272,7 @@ case ${step} in export walltime_gefs="00:30:00" export ntasks_gefs=1 export threads_per_task_gefs=1 - export tasks_per_node_gefs="${ntasks}" + export tasks_per_node_gefs="${ntasks_gefs}" export walltime_gfs="${walltime_gefs}" export ntasks_gfs="${ntasks_gefs}" export threads_per_tasks_gfs="${threads_per_task_gefs}" diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml index 74c0f0653b..fea88df2bb 100644 --- a/parm/post/oceanice_products_gefs.yaml +++ b/parm/post/oceanice_products_gefs.yaml @@ -39,14 +39,15 @@ ocean: - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"] data_out: mkdir: - - "{{ COM_OCEAN_NETCDF }}" + - "{{ COM_OCEAN_NETCDF }}/native" {% for grid in product_grids %} + - "{{ COM_OCEAN_NETCDF }}/{{ grid }}" - "{{ COM_OCEAN_GRIB }}/{{ grid }}" {% endfor %} copy: - - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"] + - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/native/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"] {% for grid in product_grids %} - - ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"] + - ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"] {% endfor %} ice: @@ -62,12 +63,13 @@ ice: - ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"] data_out: mkdir: - - "{{ COM_ICE_NETCDF }}" + - "{{ COM_ICE_NETCDF }}/native" {% for grid in product_grids %} + - "{{ COM_ICE_NETCDF }}/{{ grid }}" - "{{ COM_ICE_GRIB }}/{{ grid }}" {% endfor %} copy: - - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"] + - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/native/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"] {% for grid in product_grids %} - - ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"] + - ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"] {% endfor %} diff --git a/parm/ufs/fv3/diag_table_replay b/parm/ufs/fv3/diag_table_replay new file mode 100644 index 0000000000..01f2cf9794 --- /dev/null +++ b/parm/ufs/fv3/diag_table_replay @@ -0,0 +1,337 @@ +"fv3_history", 0, "hours", 1, "hours", "time" +"fv3_history2d", 0, "hours", 1, "hours", "time" +"@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", @[FHOUT_OCN], "hours", 1, "hours", "time", @[FHOUT_OCN], "hours", "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR_offset] 0 0", @[FHOUT_OCN], "hours" +"@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", @[FHOUT_OCN], "hours", 1, "hours", "time", @[FHOUT_OCN], "hours", "@[SYEAR1] @[SMONTH1] @[SDAY1] @[CHOUR1] 0 0" + +############## +# Ocean fields first lead time +############## +# static fields +"ocean_model", "geolon", "geolon", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat", "geolat", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolon_c", "geolon_c", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat_c", "geolat_c", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolon_u", "geolon_u", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat_u", "geolat_u", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolon_v", "geolon_v", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat_v", "geolat_v", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +#"ocean_model", "depth_ocean", "depth_ocean", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +#"ocean_model", "wet", "wet", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "wet_c", "wet_c", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "wet_u", "wet_u", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "wet_v", "wet_v", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "sin_rot", "sin_rot", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "cos_rot", "cos_rot", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + +# ocean output TSUV and others +"ocean_model", "SSH", "SSH", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SST", "SST", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SSS", "SSS", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "speed", "speed", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SSU", "SSU", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SSV", "SSV", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "frazil", "frazil", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "ePBL_h_ML", "ePBL", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "MLD_003", "MLD_003", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "MLD_0125", "MLD_0125", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "tob", "tob", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 + +# Z-Space Fields Provided for CMIP6 (CMOR Names): +"ocean_model_z", "uo", "uo", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model_z", "vo", "vo", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model_z", "so", "so", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model_z", "temp", "temp", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 + +# forcing +"ocean_model", "taux", "taux", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "tauy", "tauy", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "latent", "latent", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "sensible", "sensible", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "SW", "SW", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "LW", "LW", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "evap", "evap", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "lprec", "lprec", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "lrunoff", "lrunoff", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +#"ocean_model", "frunoff", "frunoff", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "fprec", "fprec", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "LwLatSens", "LwLatSens", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "Heat_PmE", "Heat_PmE", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + +############## +# Ocean fields second lead time and after +############# +# static fields +ocean_model, "geolon", "geolon", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolat", "geolat", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolon_c", "geolon_c", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolat_c", "geolat_c", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolon_u", "geolon_u", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolat_u", "geolat_u", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolon_v", "geolon_v", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "geolat_v", "geolat_v", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +#"ocean_model", "depth_ocean", "depth_ocean", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +#"ocean_model", "wet", "wet", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "wet_c", "wet_c", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "wet_u", "wet_u", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "wet_v", "wet_v", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "sin_rot", "sin_rot", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +ocean_model, "cos_rot", "cos_rot", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + +# ocean output TSUV and others +ocean_model, "SSH", "SSH", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "SST", "SST", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "SSS", "SSS", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "speed", "speed", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "SSU", "SSU", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "SSV", "SSV", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "frazil", "frazil", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "ePBL_h_ML", "ePBL", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "MLD_003", "MLD_003", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "MLD_0125", "MLD_0125", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model, "tob", "tob", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 + +# Z-Space Fields Provided for CMIP6 (CMOR Names): +ocean_model_z, "uo", "uo", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model_z, "vo", "vo", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model_z, "so", "so", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +ocean_model_z, "temp", "temp", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2 + +# forcing +ocean_model, "taux", "taux", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "tauy", "tauy", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "latent", "latent", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "sensible", "sensible", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "SW", "SW", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "LW", "LW", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "evap", "evap", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "lprec", "lprec", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "lrunoff", "lrunoff", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +#"ocean_model", "frunoff", "frunoff", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "fprec", "fprec", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "LwLatSens", "LwLatSens", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 +ocean_model, "Heat_PmE", "Heat_PmE", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2 + +################### +# Atmosphere fields +################### +"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ice_nc", "nccice", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "rain_nc", "nconrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "cld_amt", "cld_amt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "refl_10cm", "refl_10cm", "fv3_history", "all", .false., "none", 2 + +"gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzrb", "frzrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozr", "frozr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozrb", "frozrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowp", "tsnowp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowpb", "tsnowpb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rhonewsn", "rhonewsn", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFtoa", "dswrf_avetoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFtoa", "uswrf_avetoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFtoa", "ulwrf_avetoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "pahi", "pahi", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pah_ave", "pah_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ecan_acc", "ecan_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "etran_acc", "etran_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "edir_acc", "edir_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wa_acc", "wa_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "lfrac", "lfrac", "fv3_history2d", "all", .false., "none", 2 + +"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rainc", "cnvprcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 + +#============================================================================================= +# +#====> This file can be used with diag_manager/v2.0a (or higher) <==== +# +# +# FORMATS FOR FILE ENTRIES (not all input values are used) +# ------------------------ +# +#"file_name", output_freq, "output_units", format, "time_units", "long_name", +# +# +#output_freq: > 0 output frequency in "output_units" +# = 0 output frequency every time step +# =-1 output frequency at end of run +# +#output_units = units used for output frequency +# (years, months, days, minutes, hours, seconds) +# +#time_units = units used to label the time axis +# (days, minutes, hours, seconds) +# +# +# FORMAT FOR FIELD ENTRIES (not all input values are used) +# ------------------------ +# +#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing +# +#time_avg = .true. or .false. +# +#packing = 1 double precision +# = 2 float +# = 4 packed 16-bit integers +# = 8 packed 1-byte (not tested?) diff --git a/ush/atmos_extractvars.sh b/ush/atmos_extractvars.sh index 70e86b2f4e..5fea8497c6 100755 --- a/ush/atmos_extractvars.sh +++ b/ush/atmos_extractvars.sh @@ -31,7 +31,16 @@ for outtype in "f2d" "f3d"; do outdirpre="${subdata}/${outtype}" [[ -d "${outdirpre}" ]] || mkdir -p "${outdirpre}" - nh=${FHMIN} + if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + if [[ "${outtype}" == "f2d" ]]; then + nh=${OFFSET_START_HOUR} + elif [[ "${outtype}" == "f3d" ]]; then + nh=${FHOUT_GFS} + fi + else + nh=${FHMIN} + fi + while (( nh <= FHMAX_GFS )); do fnh=$(printf "%3.3d" "${nh}") @@ -45,11 +54,15 @@ for outtype in "f2d" "f3d"; do outres="1p00" fi - if (( nh <= FHMAX_HF_GFS )); then - outfreq=${FHOUT_HF_GFS} - else - outfreq=${FHOUT_GFS} - fi + if [[ "${outtype}" == "f2d" ]]; then + if (( nh < FHMAX_HF_GFS )); then + outfreq=${FHOUT_HF_GFS} + else + outfreq=${FHOUT_GFS} + fi + elif [[ "${outtype}" == "f3d" ]]; then + outfreq=${FHOUT_GFS} + fi com_var="COMIN_ATMOS_GRIB_${outres}" infile1="${!com_var}/${RUN}.t${cyc}z.pgrb2.${outres}.f${fnh}" diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index e659d2ce80..8af9054972 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -206,11 +206,24 @@ EOF for fhr in ${FV3_OUTPUT_FH}; do FH3=$(printf %03i "${fhr}") FH2=$(printf %02i "${fhr}") - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc" + # When replaying, the time format outputted by model in filename is HH-MM-SS + # because first fhr is a decimal number + if [[ ${REPLAY_ICS:-NO} == "YES" ]] && (( fhr >= OFFSET_START_HOUR )); then + local hhmmss_substring=${FV3_OUTPUT_FH_hhmmss/" ${FH3}-"*/} # Extract substring that contains all lead times up to the one space before target lead HHH-MM-SS + local hhmmss_substring_len=$(( ${#hhmmss_substring} + 1 )) # Get the size of the substring and add 1 to account for space + local f_hhmmss=${FV3_OUTPUT_FH_hhmmss:${hhmmss_substring_len}:9} # extract HHH-MM-SS for target lead time + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${f_hhmmss}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${f_hhmmss}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${f_hhmmss}" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${f_hhmmss}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${f_hhmmss}.nc" + else + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc" + fi if [[ "${WRITE_DOPOST}" == ".true." ]]; then ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}" ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}" @@ -454,10 +467,19 @@ MOM6_postdet() { (( midpoint = last_fhr + interval/2 )) vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) - vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H) + #If OFFSET_START_HOUR is greater than 0, OFFSET_START_HOUR should be added to the midpoint for first lead time + if (( OFFSET_START_HOUR > 0 )) && (( fhr == FHOUT_OCN ));then + vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + $(( midpoint + OFFSET_START_HOUR )) hours" +%Y%m%d%H) + else + vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H) + fi # Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM - source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc" + if (( OFFSET_START_HOUR > 0 )) && (( fhr == FHOUT_OCN ));then + source_file="ocn_lead1_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc" + else + source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc" + fi dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc" ${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 6b72f574d8..d1a332716a 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -146,6 +146,7 @@ FV3_predet(){ fi # Convert output settings into an explicit list for FV3 + # Create an FV3 fhr list to be used in the filenames FV3_OUTPUT_FH="" local fhr=${FHMIN} if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then @@ -154,6 +155,35 @@ FV3_predet(){ fi FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")" + # Create an FV3 fhr list to be used in the namelist + # The FV3 fhr list for the namelist and the FV3 fhr list for the filenames + # are only different when REPLAY_ICS is set to YES + if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + local FV3_OUTPUT_FH_s + FV3_OUTPUT_FH_NML="$(echo "scale=5; ${OFFSET_START_HOUR}+(${DELTIM}/3600)" | bc -l)" + FV3_OUTPUT_FH_s=$(( OFFSET_START_HOUR * 3600 + DELTIM )) + local fhr=${FHOUT} + if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then + FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH_NML} $(seq -s ' ' "$(( OFFSET_START_HOUR + FHOUT_HF ))" "${FHOUT_HF}" "${FHMAX_HF}")" + FV3_OUTPUT_FH_s="${FV3_OUTPUT_FH_s} $(seq -s ' ' "$(( OFFSET_START_HOUR * 3600 + FHOUT_HF * 3600 ))" "$(( FHOUT_HF * 3600 ))" "$(( FHMAX_HF * 3600 ))")" + fhr=${FHMAX_HF} + fi + FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH_NML} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")" + FV3_OUTPUT_FH_s="${FV3_OUTPUT_FH_s} $(seq -s ' ' "$(( fhr * 3600 ))" "$(( FHOUT * 3600 ))" "$(( FHMAX * 3600 ))")" + local hh mm ss s_total + FV3_OUTPUT_FH_hhmmss="" + for s_total in ${FV3_OUTPUT_FH_s}; do + # Convert seconds to HHH:MM:SS + (( ss = s_total, mm = ss / 60, ss %= 60, hh = mm / 60, mm %= 60 )) || true + FV3_OUTPUT_FH_hhmmss="${FV3_OUTPUT_FH_hhmmss} $(printf "%03d-%02d-%02d" "${hh}" "${mm}" "${ss}")" + done + # Create a string from an array + else # If non-replay ICs are being used + # The FV3 fhr list for the namelist and the FV3 fhr list for the filenames + # are identical when REPLAY_ICS is set to NO + FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH}" + fi + # Other options PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment diff --git a/ush/ocnice_extractvars.sh b/ush/ocnice_extractvars.sh index f0660bb6ec..51276172b9 100755 --- a/ush/ocnice_extractvars.sh +++ b/ush/ocnice_extractvars.sh @@ -25,11 +25,11 @@ for (( nh = FHMIN_GFS; nh <= FHMAX_GFS; nh = nh + fhout_ocnice )); do fnh=$(printf "%3.3d" "${nh}") if [[ ${component_name} == "ocn" ]]; then - infile=${COMIN_OCEAN_NETCDF}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc + infile=${COMIN_OCEAN_NETCDF}/${datares}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc # For ocean products, add an argument to extract a subset of levels otherargs=(-d "${depthvar_name},""${zmin},""${zmax}") elif [[ ${component_name} == "ice" ]]; then - infile=${COMIN_ICE_NETCDF}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc + infile=${COMIN_ICE_NETCDF}/${datares}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc otherargs=() fi outfile=${subdata}/${RUN}.${component_name}.t${cyc}z.${datares}.f${fnh}.nc diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index d2dd434fff..7e8e065d26 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -48,7 +48,7 @@ local JCHUNK3D=$((2*restile)) local KCHUNK3D=1 local IMO=${LONB_IMO} local JMO=${LATB_JMO} -local OUTPUT_FH=${FV3_OUTPUT_FH} +local OUTPUT_FH=${FV3_OUTPUT_FH_NML} local IAU_OFFSET=${IAU_OFFSET:-0} # Ensure the template exists diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 6101c2f5e1..60f44a721a 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -42,6 +42,18 @@ local SDAY=${current_cycle:6:2} local CHOUR=${current_cycle:8:2} local MOM6_OUTPUT_DIR="./MOM6_OUTPUT" +if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + local current_cycle_p1 + current_cycle_p1=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHOUT_OCN} hours" +%Y%m%d%H) + local current_cycle_offset + current_cycle_offset=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${OFFSET_START_HOUR} hours" +%Y%m%d%H) + local SYEAR1=${current_cycle_p1:0:4} + local SMONTH1=${current_cycle_p1:4:2} + local SDAY1=${current_cycle_p1:6:2} + local CHOUR1=${current_cycle_p1:8:2} + local CHOUR_offset=${current_cycle_offset:8:2} +fi + atparse < "${template}" >> "diag_table" From eba813f89f2d20bfc0326b2b96a474a0a9b3a710 Mon Sep 17 00:00:00 2001 From: Wei Huang Date: Tue, 13 Aug 2024 06:57:25 -0600 Subject: [PATCH 3/3] Add support for forecast-only runs on AWS (#2711) The purpose of this PR it to merge code that allowing global-workflow run on AWS, first focus on ATM forecast only. Resolves: #2709 --------- Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- env/AWSPW.env | 6 ++- modulefiles/module_base.noaacloud.lua | 49 ++++++++++++++++++++++++ modulefiles/module_gwci.noaacloud.lua | 15 ++++++++ modulefiles/module_gwsetup.noaacloud.lua | 20 ++++++++++ parm/config/gfs/config.base | 8 ++++ parm/config/gfs/config.resources | 3 +- parm/config/gfs/config.resources.AWSPW | 10 +++++ sorc/build_all.sh | 2 +- sorc/build_ufs.sh | 29 ++------------ sorc/link_workflow.sh | 1 + ush/load_fv3gfs_modules.sh | 2 +- ush/load_ufswm_modules.sh | 45 +++++++--------------- ush/module-setup.sh | 6 +-- versions/build.noaacloud.ver | 5 +++ versions/run.noaacloud.ver | 8 ++++ workflow/hosts.py | 2 +- workflow/hosts/awspw.yaml | 21 +++++----- workflow/rocoto/tasks.py | 5 ++- workflow/rocoto/workflow_xml.py | 14 +++++-- 19 files changed, 169 insertions(+), 82 deletions(-) create mode 100644 modulefiles/module_base.noaacloud.lua create mode 100644 modulefiles/module_gwci.noaacloud.lua create mode 100644 modulefiles/module_gwsetup.noaacloud.lua create mode 100644 parm/config/gfs/config.resources.AWSPW create mode 100644 versions/build.noaacloud.ver create mode 100644 versions/run.noaacloud.ver diff --git a/env/AWSPW.env b/env/AWSPW.env index 867b9220ba..992281a1d7 100755 --- a/env/AWSPW.env +++ b/env/AWSPW.env @@ -9,8 +9,8 @@ fi step=$1 -export launcher="mpiexec.hydra" -export mpmd_opt="" +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment export OMP_STACKSIZE=2048000 @@ -35,6 +35,8 @@ fi if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + export launcher="srun --mpi=pmi2 -l" + (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node )) (( ufs_ntasks = nnodes*tasks_per_node )) # With ESMF threading, the model wants to use the full node diff --git a/modulefiles/module_base.noaacloud.lua b/modulefiles/module_base.noaacloud.lua new file mode 100644 index 0000000000..7997b618e4 --- /dev/null +++ b/modulefiles/module_base.noaacloud.lua @@ -0,0 +1,49 @@ +help([[ +Load environment to run GFS on noaacloud +]]) + +local spack_mod_path=(os.getenv("spack_mod_path") or "None") +prepend_path("MODULEPATH", spack_mod_path) + +load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None"))) +load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None"))) +load(pathJoin("python", (os.getenv("python_ver") or "None"))) + +load(pathJoin("jasper", (os.getenv("jasper_ver") or "None"))) +load(pathJoin("libpng", (os.getenv("libpng_ver") or "None"))) +load(pathJoin("cdo", (os.getenv("cdo_ver") or "None"))) +--load(pathJoin("R", (os.getenv("R_ver") or "None"))) + +load(pathJoin("hdf5", (os.getenv("hdf5_ver") or "None"))) +load(pathJoin("netcdf-c", (os.getenv("netcdf_c_ver") or "None"))) +load(pathJoin("netcdf-fortran", (os.getenv("netcdf_fortran_ver") or "None"))) + +load(pathJoin("nco", (os.getenv("nco_ver") or "None"))) +load(pathJoin("prod_util", (os.getenv("prod_util_ver") or "None"))) +load(pathJoin("grib-util", (os.getenv("grib_util_ver") or "None"))) +load(pathJoin("g2tmpl", (os.getenv("g2tmpl_ver") or "None"))) +load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None"))) +load(pathJoin("crtm", (os.getenv("crtm_ver") or "None"))) +load(pathJoin("bufr", (os.getenv("bufr_ver") or "None"))) +load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None"))) +load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None"))) +load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None"))) +load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None"))) +load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None"))) +load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None"))) +load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None"))) +--load(pathJoin("met", (os.getenv("met_ver") or "None"))) +--load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) +load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) + +setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) + +--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) +--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +--load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None"))) + +--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +--load(pathJoin("fit2obs", (os.getenv("fit2obs_ver") or "None"))) + +whatis("Description: GFS run environment") diff --git a/modulefiles/module_gwci.noaacloud.lua b/modulefiles/module_gwci.noaacloud.lua new file mode 100644 index 0000000000..c3142cd60d --- /dev/null +++ b/modulefiles/module_gwci.noaacloud.lua @@ -0,0 +1,15 @@ +help([[ +Load environment to run GFS workflow setup scripts on noaacloud +]]) + +prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("2021.3.0"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.3.0"))) + +load(pathJoin("netcdf-c", os.getenv("4.9.2"))) +load(pathJoin("netcdf-fortran", os.getenv("4.6.1"))) +load(pathJoin("nccmp","1.9.0.1")) +load(pathJoin("wgrib2", "2.0.8")) + +whatis("Description: GFS run setup CI environment") diff --git a/modulefiles/module_gwsetup.noaacloud.lua b/modulefiles/module_gwsetup.noaacloud.lua new file mode 100644 index 0000000000..f3845e8d72 --- /dev/null +++ b/modulefiles/module_gwsetup.noaacloud.lua @@ -0,0 +1,20 @@ +help([[ +Load environment to run GFS workflow setup scripts on noaacloud +]]) + +load(pathJoin("rocoto")) + +prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.3.0" +local python_ver=os.getenv("python_ver") or "3.10.3" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") +local git_ver=os.getenv("git_ver") or "1.8.3.1" +load(pathJoin("git", git_ver)) + +whatis("Description: GFS run setup environment") diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 2a7ffab0dd..e6a626cfe3 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -483,4 +483,12 @@ export OFFSET_START_HOUR=0 # Number of regional collectives to create soundings for export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} +# The tracker, genesis, and METplus jobs are not supported on AWS yet +# TODO: we should place these in workflow/hosts/awspw.yaml as part of AWS setup, not for general. +if [[ "${machine}" == "AWSPW" ]]; then + export DO_TRACKER="NO" + export DO_GENESIS="NO" + export DO_METP="NO" +fi + echo "END: config.base" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index a596629e76..cec2aef238 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -106,7 +106,8 @@ case ${machine} in ;; "AWSPW") export PARTITION_BATCH="compute" - max_tasks_per_node=40 + npe_node_max=36 + max_tasks_per_node=36 # TODO Supply a max mem/node value for AWS # shellcheck disable=SC2034 mem_node_max="" diff --git a/parm/config/gfs/config.resources.AWSPW b/parm/config/gfs/config.resources.AWSPW new file mode 100644 index 0000000000..8649713bb7 --- /dev/null +++ b/parm/config/gfs/config.resources.AWSPW @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +# AWS-specific job resources + +export is_exclusive="True" + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 28f52fd306..b6c4e6cc1c 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -145,7 +145,7 @@ build_opts["ww3prepost"]="${_wave_opt} ${_verbose_opt} ${_build_ufs_opt} ${_buil # Optional DA builds if [[ "${_build_ufsda}" == "YES" ]]; then - if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" && "${MACHINE_ID}" != "hercules" && "${MACHINE_ID}" != "wcoss2" ]]; then + if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" && "${MACHINE_ID}" != "hercules" && "${MACHINE_ID}" != "wcoss2" && "${MACHINE_ID}" != "noaacloud" ]]; then echo "NOTE: The GDAS App is not supported on ${MACHINE_ID}. Disabling build." else build_jobs["gdas"]=8 diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index 7e84eaebc2..44c8c7a2ad 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -41,30 +41,9 @@ COMPILE_NR=0 CLEAN_BEFORE=YES CLEAN_AFTER=NO -if [[ "${MACHINE_ID}" != "noaacloud" ]]; then - BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" - mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x - mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua - cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua -else - - if [[ "${PW_CSP:-}" == "aws" ]]; then - set +x - # TODO: This will need to be addressed further when the EPIC stacks are available/supported. - module use /contrib/spack-stack/envs/ufswm/install/modulefiles/Core - module load stack-intel - module load stack-intel-oneapi-mpi - module load ufs-weather-model-env/1.0.0 - # TODO: It is still uncertain why this is the only module that is - # missing; check the spack build as this needed to be added manually. - module load w3emc/2.9.2 # TODO: This has similar issues for the EPIC stack. - module list - set -x - fi - - export CMAKE_FLAGS="${MAKE_OPT}" - BUILD_JOBS=${BUILD_JOBS:-8} ./build.sh - mv "${cwd}/ufs_model.fd/build/ufs_model" "${cwd}/ufs_model.fd/tests/ufs_model.x" -fi +BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x +mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua +cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua exit 0 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index be912292fe..ae30e7a645 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -76,6 +76,7 @@ case "${machine}" in "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; "s4") FIX_DIR="/data/prod/glopara/fix" ;; "gaea") FIX_DIR="/gpfs/f5/ufs-ard/world-shared/global/glopara/data/fix" ;; + "noaacloud") FIX_DIR="/contrib/global-workflow-shared-data/fix" ;; *) echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR" exit 1 diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index 5f6afb7e35..ff6f64cece 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -20,7 +20,7 @@ source "${HOMEgfs}/versions/run.ver" module use "${HOMEgfs}/modulefiles" case "${MACHINE_ID}" in - "wcoss2" | "hera" | "orion" | "hercules" | "gaea" | "jet" | "s4") + "wcoss2" | "hera" | "orion" | "hercules" | "gaea" | "jet" | "s4" | "noaacloud") module load "module_base.${MACHINE_ID}" ;; *) diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh index 6477a8ff39..f00358095d 100755 --- a/ush/load_ufswm_modules.sh +++ b/ush/load_ufswm_modules.sh @@ -11,40 +11,21 @@ ulimit_s=$( ulimit -S -s ) source "${HOMEgfs}/ush/detect_machine.sh" source "${HOMEgfs}/ush/module-setup.sh" -if [[ "${MACHINE_ID}" != "noaacloud" ]]; then - module use "${HOMEgfs}/sorc/ufs_model.fd/modulefiles" - module load "ufs_${MACHINE_ID}.intel" - module load prod_util - if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals - module load cfp - module load libjpeg - module load craype-network-ucx - module load cray-mpich-ucx - else - export UTILROOT=${prod_util_ROOT} - fi - module load wgrib2 - export WGRIB2=wgrib2 -fi -if [[ "${MACHINE_ID}" == "noaacloud" ]]; then - if [[ "${PW_CSP:-}" = "aws" ]]; then - # TODO: This can be cleaned-up; most of this is a hack for now. - module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core" - module load "stack-intel" - module load "stack-intel-oneapi-mpi" - module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/" - module load "py39_4.12.0" - module load "ufs-weather-model-env/1.0.0" - export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0" - # TODO: Are there plans for EPIC to maintain this package or should GW provide support? - export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util" - export PATH="${PATH}:/contrib/global-workflow/bin" - ndate_path="$(command -v ndate)" - export NDATE="${ndate_path}" - fi +module use "${HOMEgfs}/sorc/ufs_model.fd/modulefiles" +module load "ufs_${MACHINE_ID}.intel" +module load prod_util +if [[ "${MACHINE_ID}" = "wcoss2" ]]; then + module load cray-pals + module load cfp + module load libjpeg + module load craype-network-ucx + module load cray-mpich-ucx +else + export UTILROOT=${prod_util_ROOT} fi +module load wgrib2 +export WGRIB2=wgrib2 module list unset MACHINE_ID diff --git a/ush/module-setup.sh b/ush/module-setup.sh index b4ec3edafa..398562652d 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -92,10 +92,8 @@ elif [[ ${MACHINE_ID} = discover* ]]; then # TODO: This can likely be made more general once other cloud # platforms come online. elif [[ ${MACHINE_ID} = "noaacloud" ]]; then - - export SPACK_ROOT=/contrib/global-workflow/spack-stack/spack - export PATH=${PATH}:${SPACK_ROOT}/bin - . "${SPACK_ROOT}"/share/spack/setup-env.sh + # We are on NOAA Cloud + module purge else echo WARNING: UNKNOWN PLATFORM 1>&2 diff --git a/versions/build.noaacloud.ver b/versions/build.noaacloud.ver new file mode 100644 index 0000000000..ba47313675 --- /dev/null +++ b/versions/build.noaacloud.ver @@ -0,0 +1,5 @@ +export stack_intel_ver=2021.3.0 +export stack_impi_ver=2021.3.0 +export spack_env=gsi-addon-env +source "${HOMEgfs:-}/versions/build.spack.ver" +export spack_mod_path="/contrib/spack-stack/spack-stack-${spack_stack_ver}/envs/gsi-addon-env/install/modulefiles/Core" diff --git a/versions/run.noaacloud.ver b/versions/run.noaacloud.ver new file mode 100644 index 0000000000..4c9ac3cd42 --- /dev/null +++ b/versions/run.noaacloud.ver @@ -0,0 +1,8 @@ +export stack_intel_ver=2021.3.0 +export stack_impi_ver=2021.3.0 +export spack_env=gsi-addon-env + +source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/contrib/spack-stack/spack-stack-${spack_stack_ver}/envs/gsi-addon-env/install/modulefiles/Core" + +export cdo_ver=2.2.0 diff --git a/workflow/hosts.py b/workflow/hosts.py index cd0cfe0083..eced460fd1 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -52,7 +52,7 @@ def detect(cls): elif container is not None: machine = 'CONTAINER' elif pw_csp is not None: - if pw_csp.lower() not in ['azure', 'aws', 'gcp']: + if pw_csp.lower() not in ['azure', 'aws', 'google']: raise ValueError( f'NOAA cloud service provider "{pw_csp}" is not supported.') machine = f"{pw_csp.upper()}PW" diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml index 046dafcfa7..f925f54008 100644 --- a/workflow/hosts/awspw.yaml +++ b/workflow/hosts/awspw.yaml @@ -1,12 +1,12 @@ -BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' #TODO: This does not yet exist. -DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' # TODO: This does not yet exist. -PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' #TODO: This does not yet exist. -COMINsyn: '/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat' #TODO: This does not yet exist. +BASE_GIT: '' #TODO: This does not yet exist. +DMPDIR: '' # TODO: This does not yet exist. +PACKAGEROOT: '' #TODO: This does not yet exist. +COMINsyn: '' #TODO: This does not yet exist. HOMEDIR: '/contrib/${USER}' -STMP: '/lustre/${USER}/stmp2/' -PTMP: '/lustre/${USER}/stmp4/' -NOSCRUB: ${HOMEDIR} -ACCOUNT: hwufscpldcld +STMP: '/lustre/${USER}/stmp/' +PTMP: '/lustre/${USER}/ptmp/' +NOSCRUB: '${HOMEDIR}' +ACCOUNT: '${USER}' SCHEDULER: slurm QUEUE: batch QUEUE_SERVICE: batch @@ -16,10 +16,11 @@ RESERVATION: '' CLUSTERS: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. -HPSSARCH: 'YES' +HPSSARCH: 'NO' HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below. +BASE_CPLIC: '/bucket/global-workflow-shared-data/ICSDIR/prototype_ICs' LOCALARCH: 'NO' -ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' # TODO: This will not yet work from AWS. +ATARDIR: '' # TODO: This will not yet work from AWS. MAKE_NSSTBUFR: 'NO' MAKE_ACFTBUFR: 'NO' SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 353d2aa943..64952498d4 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -209,7 +209,10 @@ def get_resource(self, task_name): else: native += ':shared' elif scheduler in ['slurm']: - native = '--export=NONE' + if task_config.get('is_exclusive', False): + native = '--exclusive' + else: + native = '--export=NONE' if task_config['RESERVATION'] != "": native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION'] if task_config.get('CLUSTERS', "") not in ["", '@CLUSTERS@']: diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py index 11b2cdfc45..ca54f3a5bb 100644 --- a/workflow/rocoto/workflow_xml.py +++ b/workflow/rocoto/workflow_xml.py @@ -157,10 +157,16 @@ def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None: strings = ['', f'#################### {pslot} ####################', - f'MAILTO="{replyto}"', - f'{cronintstr} {rocotorunstr}', - '#################################################################', - ''] + f'MAILTO="{replyto}"' + ] + # AWS need 'SHELL', and 'BASH_ENV' defined, or, the crontab job won't start. + if os.environ.get('PW_CSP', None) in ['aws', 'azure', 'google']: + strings.extend([f'SHELL="/bin/bash"', + f'BASH_ENV="/etc/bashrc"' + ]) + strings.extend([f'{cronintstr} {rocotorunstr}', + '#################################################################', + '']) if crontab_file is None: crontab_file = f"{expdir}/{pslot}.crontab"