Skip to content

Commit

Permalink
Add fixes to products for when REPLAY IC's are used (#2755)
Browse files Browse the repository at this point in the history
This PR fixes a couple issues that arise when replay initial conditions
are used. These issues only occur when `REPLAY_ICS` is set to `YES` and
`OFFSET_START_HOUR` is greater than `0`. The following items are
addressed in this PR.

1. Fix issue that causes ocean_prod tasks not to be triggered (issue
[#2725](#2725)). A new
diag_table was added (called `diag_table_replay`) that is used only when
`REPLAY_ICS` is set to `YES`. This diag_table accounts for the offset
that occurs when using replay IC's.
2. Fix issue that causes atmos_prod tasks not to be triggered for the
first lead time (e.g. f003) (issue
[#2754](#2754)). When
`OFFSET_START_HOUR` is greater than `0`, the first `fhr` is
`${OFFSET_START_HOUR}+(${DELTIM}/3600)`, which is defined in
`forecast_predet.sh` and will allow data for the first lead time to be
generated. The filename with this lead time will still be labelled with
`OFFSET_START_HOUR`.
3. Minor modifications were made to the extractvars task so that atmos
data from replay cases can be processed.

This PR was split from PR #2680. 

Refs #2725, #2754 

---------

Co-authored-by: Walter Kolczynski - NOAA <[email protected]>
  • Loading branch information
EricSinsky-NOAA and WalterKolczynski-NOAA committed Aug 13, 2024
1 parent 1d53953 commit 5699167
Show file tree
Hide file tree
Showing 11 changed files with 449 additions and 29 deletions.
10 changes: 5 additions & 5 deletions parm/config/gefs/config.extractvars
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ echo "BEGIN: config.extractvars"

export COMPRSCMD=${COMPRSCMD:-bzip2}

export compress_ocn=0 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product
export compress_ice=0 #1: Compress extracted ice product, 0: Do not compress extracted ice product
export compress_ocn=1 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product
export compress_ice=1 #1: Compress extracted ice product, 0: Do not compress extracted ice product

export ocnres="5p00" # Resolution of ocean products
export iceres="5p00" # Resolution of ice products
export wavres="5p00" # Resolution of wave products
export ocnres="1p00" # Resolution of ocean products
export iceres="native" # Resolution of ice products
export wavres="0p25" # Resolution of wave products

export depthvar_name="z_l" # Name of depth variable in NetCDF ocean products
export zmin="0." # Minimum depth to extract from NetCDF ocean products
Expand Down
6 changes: 5 additions & 1 deletion parm/config/gefs/config.fcst
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,11 @@ export FSICS="0"

#---------------------------------------------------------------------
# Write more variables to output
export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table"
if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_replay"
else
export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table"
fi

# Write gfs restart files to rerun fcst from any break point
export restart_interval=${restart_interval_gfs:-12}
Expand Down
2 changes: 1 addition & 1 deletion parm/config/gefs/config.resources
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ case ${step} in
export walltime_gefs="00:30:00"
export ntasks_gefs=1
export threads_per_task_gefs=1
export tasks_per_node_gefs="${ntasks}"
export tasks_per_node_gefs="${ntasks_gefs}"
export walltime_gfs="${walltime_gefs}"
export ntasks_gfs="${ntasks_gefs}"
export threads_per_tasks_gfs="${threads_per_task_gefs}"
Expand Down
14 changes: 8 additions & 6 deletions parm/post/oceanice_products_gefs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,15 @@ ocean:
- ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
data_out:
mkdir:
- "{{ COM_OCEAN_NETCDF }}"
- "{{ COM_OCEAN_NETCDF }}/native"
{% for grid in product_grids %}
- "{{ COM_OCEAN_NETCDF }}/{{ grid }}"
- "{{ COM_OCEAN_GRIB }}/{{ grid }}"
{% endfor %}
copy:
- ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/native/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
{% for grid in product_grids %}
- ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
{% endfor %}

ice:
Expand All @@ -62,12 +63,13 @@ ice:
- ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
data_out:
mkdir:
- "{{ COM_ICE_NETCDF }}"
- "{{ COM_ICE_NETCDF }}/native"
{% for grid in product_grids %}
- "{{ COM_ICE_NETCDF }}/{{ grid }}"
- "{{ COM_ICE_GRIB }}/{{ grid }}"
{% endfor %}
copy:
- ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/native/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
{% for grid in product_grids %}
- ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
{% endfor %}
337 changes: 337 additions & 0 deletions parm/ufs/fv3/diag_table_replay

Large diffs are not rendered by default.

25 changes: 19 additions & 6 deletions ush/atmos_extractvars.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,16 @@ for outtype in "f2d" "f3d"; do
outdirpre="${subdata}/${outtype}"
[[ -d "${outdirpre}" ]] || mkdir -p "${outdirpre}"

nh=${FHMIN}
if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
if [[ "${outtype}" == "f2d" ]]; then
nh=${OFFSET_START_HOUR}
elif [[ "${outtype}" == "f3d" ]]; then
nh=${FHOUT_GFS}
fi
else
nh=${FHMIN}
fi

while (( nh <= FHMAX_GFS )); do
fnh=$(printf "%3.3d" "${nh}")

Expand All @@ -45,11 +54,15 @@ for outtype in "f2d" "f3d"; do
outres="1p00"
fi

if (( nh <= FHMAX_HF_GFS )); then
outfreq=${FHOUT_HF_GFS}
else
outfreq=${FHOUT_GFS}
fi
if [[ "${outtype}" == "f2d" ]]; then
if (( nh < FHMAX_HF_GFS )); then
outfreq=${FHOUT_HF_GFS}
else
outfreq=${FHOUT_GFS}
fi
elif [[ "${outtype}" == "f3d" ]]; then
outfreq=${FHOUT_GFS}
fi

com_var="COMIN_ATMOS_GRIB_${outres}"
infile1="${!com_var}/${RUN}.t${cyc}z.pgrb2.${outres}.f${fnh}"
Expand Down
36 changes: 29 additions & 7 deletions ush/forecast_postdet.sh
Original file line number Diff line number Diff line change
Expand Up @@ -206,11 +206,24 @@ EOF
for fhr in ${FV3_OUTPUT_FH}; do
FH3=$(printf %03i "${fhr}")
FH2=$(printf %02i "${fhr}")
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc"
# When replaying, the time format outputted by model in filename is HH-MM-SS
# because first fhr is a decimal number
if [[ ${REPLAY_ICS:-NO} == "YES" ]] && (( fhr >= OFFSET_START_HOUR )); then
local hhmmss_substring=${FV3_OUTPUT_FH_hhmmss/" ${FH3}-"*/} # Extract substring that contains all lead times up to the one space before target lead HHH-MM-SS
local hhmmss_substring_len=$(( ${#hhmmss_substring} + 1 )) # Get the size of the substring and add 1 to account for space
local f_hhmmss=${FV3_OUTPUT_FH_hhmmss:${hhmmss_substring_len}:9} # extract HHH-MM-SS for target lead time
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${f_hhmmss}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${f_hhmmss}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${f_hhmmss}"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${f_hhmmss}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${f_hhmmss}.nc"
else
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc"
${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc"
fi
if [[ "${WRITE_DOPOST}" == ".true." ]]; then
${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}"
${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}"
Expand Down Expand Up @@ -454,10 +467,19 @@ MOM6_postdet() {
(( midpoint = last_fhr + interval/2 ))

vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
#If OFFSET_START_HOUR is greater than 0, OFFSET_START_HOUR should be added to the midpoint for first lead time
if (( OFFSET_START_HOUR > 0 )) && (( fhr == FHOUT_OCN ));then
vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + $(( midpoint + OFFSET_START_HOUR )) hours" +%Y%m%d%H)
else
vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
fi

# Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM
source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
if (( OFFSET_START_HOUR > 0 )) && (( fhr == FHOUT_OCN ));then
source_file="ocn_lead1_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
else
source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
fi
dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}"

Expand Down
30 changes: 30 additions & 0 deletions ush/forecast_predet.sh
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ FV3_predet(){
fi

# Convert output settings into an explicit list for FV3
# Create an FV3 fhr list to be used in the filenames
FV3_OUTPUT_FH=""
local fhr=${FHMIN}
if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
Expand All @@ -154,6 +155,35 @@ FV3_predet(){
fi
FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"

# Create an FV3 fhr list to be used in the namelist
# The FV3 fhr list for the namelist and the FV3 fhr list for the filenames
# are only different when REPLAY_ICS is set to YES
if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
local FV3_OUTPUT_FH_s
FV3_OUTPUT_FH_NML="$(echo "scale=5; ${OFFSET_START_HOUR}+(${DELTIM}/3600)" | bc -l)"
FV3_OUTPUT_FH_s=$(( OFFSET_START_HOUR * 3600 + DELTIM ))
local fhr=${FHOUT}
if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH_NML} $(seq -s ' ' "$(( OFFSET_START_HOUR + FHOUT_HF ))" "${FHOUT_HF}" "${FHMAX_HF}")"
FV3_OUTPUT_FH_s="${FV3_OUTPUT_FH_s} $(seq -s ' ' "$(( OFFSET_START_HOUR * 3600 + FHOUT_HF * 3600 ))" "$(( FHOUT_HF * 3600 ))" "$(( FHMAX_HF * 3600 ))")"
fhr=${FHMAX_HF}
fi
FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH_NML} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"
FV3_OUTPUT_FH_s="${FV3_OUTPUT_FH_s} $(seq -s ' ' "$(( fhr * 3600 ))" "$(( FHOUT * 3600 ))" "$(( FHMAX * 3600 ))")"
local hh mm ss s_total
FV3_OUTPUT_FH_hhmmss=""
for s_total in ${FV3_OUTPUT_FH_s}; do
# Convert seconds to HHH:MM:SS
(( ss = s_total, mm = ss / 60, ss %= 60, hh = mm / 60, mm %= 60 )) || true
FV3_OUTPUT_FH_hhmmss="${FV3_OUTPUT_FH_hhmmss} $(printf "%03d-%02d-%02d" "${hh}" "${mm}" "${ss}")"
done
# Create a string from an array
else # If non-replay ICs are being used
# The FV3 fhr list for the namelist and the FV3 fhr list for the filenames
# are identical when REPLAY_ICS is set to NO
FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH}"
fi

# Other options
PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment

Expand Down
4 changes: 2 additions & 2 deletions ush/ocnice_extractvars.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ for (( nh = FHMIN_GFS; nh <= FHMAX_GFS; nh = nh + fhout_ocnice )); do
fnh=$(printf "%3.3d" "${nh}")

if [[ ${component_name} == "ocn" ]]; then
infile=${COMIN_OCEAN_NETCDF}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc
infile=${COMIN_OCEAN_NETCDF}/${datares}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc
# For ocean products, add an argument to extract a subset of levels
otherargs=(-d "${depthvar_name},""${zmin},""${zmax}")
elif [[ ${component_name} == "ice" ]]; then
infile=${COMIN_ICE_NETCDF}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc
infile=${COMIN_ICE_NETCDF}/${datares}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc
otherargs=()
fi
outfile=${subdata}/${RUN}.${component_name}.t${cyc}z.${datares}.f${fnh}.nc
Expand Down
2 changes: 1 addition & 1 deletion ush/parsing_model_configure_FV3.sh
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ local JCHUNK3D=$((2*restile))
local KCHUNK3D=1
local IMO=${LONB_IMO}
local JMO=${LATB_JMO}
local OUTPUT_FH=${FV3_OUTPUT_FH}
local OUTPUT_FH=${FV3_OUTPUT_FH_NML}
local IAU_OFFSET=${IAU_OFFSET:-0}

# Ensure the template exists
Expand Down
12 changes: 12 additions & 0 deletions ush/parsing_namelists_FV3.sh
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,18 @@ local SDAY=${current_cycle:6:2}
local CHOUR=${current_cycle:8:2}
local MOM6_OUTPUT_DIR="./MOM6_OUTPUT"

if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
local current_cycle_p1
current_cycle_p1=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHOUT_OCN} hours" +%Y%m%d%H)
local current_cycle_offset
current_cycle_offset=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${OFFSET_START_HOUR} hours" +%Y%m%d%H)
local SYEAR1=${current_cycle_p1:0:4}
local SMONTH1=${current_cycle_p1:4:2}
local SDAY1=${current_cycle_p1:6:2}
local CHOUR1=${current_cycle_p1:8:2}
local CHOUR_offset=${current_cycle_offset:8:2}
fi

atparse < "${template}" >> "diag_table"


Expand Down

0 comments on commit 5699167

Please sign in to comment.