Skip to content
Open
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
90f0c6d
initial commits for reducing size of ocean and sea ice output
NeilBarton-NOAA Oct 30, 2025
c9483c1
edits to MOM6 diag_table
NeilBarton-NOAA Oct 30, 2025
76f89d0
edits for file name differences
NeilBarton-NOAA Oct 31, 2025
f8f6fe8
changing to 30L zgrid MOM6 file
NeilBarton-NOAA Oct 31, 2025
5ad905c
gfs test and edits
NeilBarton-NOAA Nov 3, 2025
94237f7
Merge branch 'develop' into ocnice_products
NeilBarton-NOAA Nov 3, 2025
d7abd79
fix python code style
NeilBarton-NOAA Nov 3, 2025
d2ed372
remove trailing white space
NeilBarton-NOAA Nov 3, 2025
90875e1
Merge branch 'ocnice_products' of github.com:NeilBarton-NOAA/global-w…
NeilBarton-NOAA Nov 3, 2025
46b0bc2
fixing python errors
NeilBarton-NOAA Nov 3, 2025
b7b220f
fix python style errors
NeilBarton-NOAA Nov 3, 2025
6fcf46f
try again
NeilBarton-NOAA Nov 3, 2025
c9ad5e3
white space
NeilBarton-NOAA Nov 3, 2025
156580a
Update parm/archive/ice_native.yaml.j2
NeilBarton-NOAA Nov 3, 2025
486f690
Update parm/archive/ocean_native.yaml.j2
NeilBarton-NOAA Nov 3, 2025
386a01d
corrected and to or
NeilBarton-NOAA Nov 3, 2025
b41f6b6
Merge branch 'ocnice_products' of github.com:NeilBarton-NOAA/global-w…
NeilBarton-NOAA Nov 3, 2025
1943a47
Update parm/post/oceanice_products_gfs.yaml
NeilBarton-NOAA Nov 3, 2025
3091fa1
Update subset list in oceanice_products_gfs.yaml
NeilBarton-NOAA Nov 5, 2025
93a10fe
removing compressing of COMROT files and adding namelist options
NeilBarton-NOAA Nov 6, 2025
fecb2ab
Update dev/parm/config/sfs/config.nsst.j2
NeilBarton-NOAA Nov 6, 2025
b3f10b3
addressing comments
NeilBarton-NOAA Nov 6, 2025
b7e343a
Update parm/post/oceanice_products_sfs.yaml
NeilBarton-NOAA Nov 12, 2025
0246dd6
changing to or
NeilBarton-NOAA Nov 12, 2025
29f7357
remove compressed in COMOUT overwrite
NeilBarton-NOAA Nov 12, 2025
2b705db
defining variable outside of yaml
NeilBarton-NOAA Nov 12, 2025
98b205b
Merge branch 'develop' into ocnice_products
NeilBarton-NOAA Nov 12, 2025
08f4a5a
add option to copy netcdf lat/lon products to COMROT
NeilBarton-NOAA Nov 12, 2025
e09e4d1
Merge branch 'ocnice_products' of github.com:NeilBarton-NOAA/global-w…
NeilBarton-NOAA Nov 12, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dev/parm/config/gefs/config.nsst
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ echo "BEGIN: config.nsst"
export NST_MODEL=2

# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON,
export NST_SPINUP=0
export NST_SPINUP="{{ NST_SPINUP }}"

# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON
export NST_RESV=0
Expand Down
3 changes: 2 additions & 1 deletion dev/parm/config/gefs/config.oceanice_products
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ echo "BEGIN: config.oceanice_products"
# Get task specific resources
source "${EXPDIR}/config.resources" oceanice_products

export write_grib2=False
export write_netcdf=False
export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products_${NET}.yaml"

# Maximum number of rocoto tasks per member
export MAX_TASKS=25

echo "END: config.oceanice_products"
2 changes: 2 additions & 0 deletions dev/parm/config/gfs/config.oceanice_products
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ source "${EXPDIR}/config.resources" oceanice_products
# Maximum number of rocoto tasks per member
export MAX_TASKS=25

export write_grib2=False
export write_netcdf=False
export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products_gfs.yaml"

# No. of forecast hours to process in a single job
Expand Down
6 changes: 3 additions & 3 deletions dev/parm/config/gfs/config.ufs
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ if [[ "${skip_mom6}" == "false" ]]; then
TOPOEDITS="ufs.topo_edits_011818.nc"
case ${RUN} in
gfs|gefs|sfs|gcafs)
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_30L.nc"
MOM6_DIAG_MISVAL="-1e34"
;;
gdas|enkfgdas|enkfgfs)
Expand Down Expand Up @@ -479,7 +479,7 @@ if [[ "${skip_mom6}" == "false" ]]; then
eps_imesh="1.0e-1"
case ${RUN} in
gfs|gefs|sfs|gcafs)
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_30L.nc"
MOM6_DIAG_MISVAL="-1e34"
;;
gdas|enkfgdas|enkfgfs)
Expand Down Expand Up @@ -516,7 +516,7 @@ if [[ "${skip_mom6}" == "false" ]]; then
eps_imesh="1.0e-1"
case ${RUN} in
gfs|gefs|sfs|gcafs)
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_30L.nc"
MOM6_DIAG_MISVAL="-1e34"
;;
gdas|enkfgdas|enkfgfs)
Expand Down
19 changes: 2 additions & 17 deletions dev/parm/config/sfs/config.base.j2
Original file line number Diff line number Diff line change
Expand Up @@ -318,23 +318,8 @@ export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.
export DO_FIT2OBS="NO" # Run fit to observations package

# Archiving options
export HPSSARCH="{{ HPSSARCH }}" # save data to HPSS archive
export LOCALARCH="{{ LOCALARCH }}" # save data to local archive
if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then
echo "Both HPSS and local archiving selected. Please choose one or the other."
exit 3
elif [[ ${HPSSARCH} = "YES" ]] || [[ ${LOCALARCH} = "YES" ]]; then
export DO_ARCHCOM="YES"
else
export DO_ARCHCOM="NO"
fi
export ARCH_CYC=00 # Archive data at this cycle for warm start and/or forecast-only capabilities
export ARCH_WARMICFREQ=4 # Archive frequency in days for warm start capability
export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability
export ARCH_EXPDIR='YES' # Archive the EXPDIR configs, XML, and database
export ARCH_EXPDIR_FREQ=0 # How often to archive the EXPDIR in hours or 0 for first and last cycle only
export ARCH_HASHES='YES' # Archive the hashes of the GW and submodules and 'git status' for each; requires ARCH_EXPDIR
export ARCH_DIFFS='NO' # Archive the output of 'git diff' for the GW; requires ARCH_EXPDIR
export DO_ARCHCOM="{{ DO_ARCHCOM }}" # Tar and archive the COM directories
export ARCHCOM_TO="{{ ARCHCOM_TO }}" # Valid options are hpss, globus_hpss, and local

# Number of regional collectives to create soundings for
export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9}
Expand Down
30 changes: 30 additions & 0 deletions dev/parm/config/sfs/config.nsst.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#! /usr/bin/env bash

########## config.nsst ##########
# NSST specific

echo "BEGIN: config.nsst"

# NSST parameters contained within nstf_name

# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled
export NST_MODEL=2

# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON,
export NST_SPINUP="{{ NST_SPINUP }}"

# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON
export NST_RESV=0

# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction)
export ZSEA1=0
export ZSEA2=0

export NST_GSI=3 # default 0: No NST info at all;
# 1: Input NST info but not used in GSI;
# 2: Input NST info, used in CRTM simulation, no Tr analysis
# 3: Input NST info, used in both CRTM simulation and Tr analysis
export NSTINFO=0 # number of elements added in obs. data array (default = 0)
if (( NST_GSI > 0 )); then export NSTINFO=4; fi

echo "END: config.nsst"
8 changes: 7 additions & 1 deletion dev/workflow/applications/sfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ def _get_app_configs(self, run):

if options['do_archcom']:
configs += ['arch_tars']
if options['do_globusarch']:
configs += ['globus']

configs += ['arch_vrfy', 'cleanup']

Expand Down Expand Up @@ -164,7 +166,11 @@ def get_task_names(self):
if options['do_extractvars']:
tasks += ['extractvars']

# TODO: Add archive
if options['do_archcom']:
tasks += ['arch_tars']
if options['do_globusarch']:
tasks += ['globus']

tasks += ['cleanup']

return {f"{self.run}": tasks}
4 changes: 2 additions & 2 deletions dev/workflow/rocoto/gfs_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2259,12 +2259,12 @@ def arch_tars(self):
tarball_types.append('chem')

if self.options['do_ocean']:
tarball_types.extend(['ocean_6hravg', 'ocean_grib2', 'gfs_flux_1p00'])
tarball_types.extend(['ocean_6hravg', 'ocean_native', 'gfs_flux_1p00'])
if self.options.get('do_jediocnvar', False) and self.app_config.mode == 'cycled':
tarball_types.append('gfsocean_analysis')

if self.options['do_ice']:
tarball_types.extend(['ice_6hravg', 'ice_grib2'])
tarball_types.extend(['ice_6hravg', 'ice_native'])

if self.options['do_bufrsnd']:
tarball_types.append('gfs_downstream')
Expand Down
31 changes: 31 additions & 0 deletions dev/workflow/rocoto/sfs_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -612,6 +612,30 @@ def arch_tars(self):

return task

# Globus transfer for HPSS archiving
def globus(self):
deps = []
dep_dict = {'type': 'task', 'name': f'{self.run}_arch_tars'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)

resources = self.get_resource('globus')
task_name = f'{self.run}_globus_arch'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
'envars': self.envars,
'cycledef': self.run,
'command': f'{self.HOMEgfs}/dev/jobs/globus_arch.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
}

task = rocoto.create_task(task_dict)

return task

def cleanup(self):
deps = []
dep_dict = {'type': 'metatask', 'name': f'{self.run}_atmos_prod'}
Expand All @@ -637,6 +661,13 @@ def cleanup(self):
if self.options['do_extractvars']:
dep_dict = {'type': 'metatask', 'name': f'{self.run}_extractvars'}
deps.append(rocoto.add_dependency(dep_dict))
if self.options['do_archcom']:
if self.options['do_globusarch']:
dep_dict = {'type': 'task', 'name': f'{self.run}_globus_arch'}
else:
dep_dict = {'type': 'task', 'name': f'{self.run}_arch_tars'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
dependencies = rocoto.create_dependency(dep=deps, dep_condition='and')
resources = self.get_resource('cleanup')
task_name = f'{self.run}_cleanup'
Expand Down
1 change: 1 addition & 0 deletions jobs/JGLOBAL_ARCHIVE_TARS
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
COMIN_ICE_INPUT:COM_ICE_INPUT_TMPL \
COMIN_ICE_RESTART:COM_ICE_RESTART_TMPL \
COMIN_ICE_GRIB:COM_ICE_GRIB_TMPL \
COMIN_ICE_NETCDF:COM_ICE_NETCDF_TMPL \
COMIN_OBS:COM_OBS_TMPL \
COMIN_TOP:COM_TOP_TMPL \
COMIN_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL \
Expand Down
7 changes: 7 additions & 0 deletions parm/archive/ice_native.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
ice_native:
name: "ICE_NATIVE"
target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ice_native_subset.tar"
required:
{% for fhr in range(FHOUT_ICE_GFS, FHMAX_GFS + FHOUT_ICE_GFS, FHOUT_ICE_GFS) %}
- "{{ COMIN_ICE_NETCDF | relpath(ROTDIR) }}/native/{{ RUN }}.t{{ cycle_HH }}z.native.f{{ '%03d' % fhr }}.nc"
{% endfor %}
7 changes: 7 additions & 0 deletions parm/archive/ocean_native.yaml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
ocean_native:
name: "OCEAN_NATIVE"
target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_native_subset.tar"
required:
{% for fhr in range(FHOUT_ICE_GFS, FHMAX_GFS + FHOUT_ICE_GFS, FHOUT_ICE_GFS) %}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't these be FHOUT_OCN_GFS?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, change uploaded

- "{{ COMIN_OCEAN_NETCDF | relpath(ROTDIR) }}/native/{{ RUN }}.t{{ cycle_HH }}z.native.f{{ '%03d' % fhr }}.nc"
{% endfor %}
42 changes: 30 additions & 12 deletions parm/post/oceanice_products_gfs.yaml
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
ocnicepost:
executable: "ocnicepost.x"
namelist:
write_grib2: True
write_netcdf: False
write_grib2: {{ write_grib2 }}
write_netcdf: {{ write_netcdf }}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should stay False. When this is True, the program will run and create the interpolated netcdf files, but they remain in DATA, which gets removed when the job ends.

Suggested change
write_netcdf: {{ write_netcdf }}
write_netcdf: False

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

debug: False
fix_data:
mkdir:
- "{{ DATA }}"
copy:
- ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"]
- ["{{ PARMgfs }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"]
{% if write_grib2 and write_netcdf %}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't this be or instead of and?
Also, shouldn't the ocnicepost.nml.jinja2 file be conditioned based on the if-block?
I think the reason it it outside because the yaml wants atleast one item in the copy block.
In that case, the copy block itself should be conditioned on the if statement, no?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, I uploaded the change

- ["{{ PARMgfs }}/post/{{ component }}_gfs.csv", "{{ DATA }}/{{ component }}.csv"]
- ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"]
- ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"]
- ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"]
- ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"]
Expand All @@ -19,6 +20,7 @@ ocnicepost:
- ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"]
- ["{{ FIXgfs }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"]
{% endfor %}
{% endif %}

ocean:
namelist:
Expand All @@ -28,26 +30,34 @@ ocean:
cosvar: "cos_rot"
angvar: ""
{% if model_grid == 'mx025' or model_grid == 'mx050' or model_grid == 'mx100' %}
ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267, 309, 374, 467, 594, 757, 960, 1204, 1490, 1817, 2184, 2587, 3024, 3489, 3977, 4481]
ocean_levels: [1, 3, 5, 10, 20, 30, 50, 100, 200, 500, 1000]
{% elif model_grid == 'mx500' %}
ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267]
{% endif %}
subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'SSU', 'SSV', 'temp', 'tob', 'so', 'uo', 'vo']
data_in:
copy:
- ["{{ COMIN_OCEAN_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
- ["{{ COMIN_OCEAN_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
data_out:
mkdir:
- "{{ COMOUT_OCEAN_NETCDF }}"
- "{{ COMOUT_OCEAN_NETCDF }}/native"
{% for grid in product_grids %}
{% if write_netcdf %}
- "{{ COMOUT_OCEAN_NETCDF }}/{{ grid }}"
{% endif %}
{% if write_grib2 %}
- "{{ COMOUT_OCEAN_GRIB }}/{{ grid }}"
{% endif %}
{% endfor %}
copy:
- ["{{ DATA }}/ocean_subset.nc", "{{ COMOUT_OCEAN_NETCDF }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.tripolar.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ocean_subset.nc", "{{ COMOUT_OCEAN_NETCDF }}/native/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ocean_compressed.nc", "{{ COMIN_OCEAN_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc"]
{% if write_grib2 %}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Where is the data from write_netcdf being copied to?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't know. I don't think it was defined in the yaml file before my edits.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct. The requirement was to provide grib2 output to the users. The netcdf was considered as an intermediate by-product of ocnicepost.x
write_netcdf=True is only needed if we are going to ever give out interpolated netcdf products.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

reminder that we need to add the section when write_netcdf = True to copy the interpolated netcdf files

{% for grid in product_grids %}
- ["{{ DATA }}/ocean.{{ grid }}.grib2", "{{ COMOUT_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
- ["{{ DATA }}/ocean.{{ grid }}.grib2.idx", "{{ COMOUT_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
{% endfor %}
{% endif %}

ice:
namelist:
Expand All @@ -56,19 +66,27 @@ ice:
sinvar: ""
cosvar: ""
angvar: "ANGLET"
subset: ['hi_h', 'hs_h', 'aice_h', 'Tsfc_h', 'uvel_h', 'vvel_h', 'frzmlt_h', 'albsni_h', 'mlt_onset_h', 'frz_onset_h']
subset: ['hi_h', 'hs_h', 'aice_h', 'Tsfc_h', 'uvel_h', 'vvel_h', 'albsni_h']
data_in:
copy:
- ["{{ COMIN_ICE_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
- ["{{ COMIN_ICE_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
data_out:
mkdir:
- "{{ COMOUT_ICE_NETCDF }}"
- "{{ COMOUT_ICE_NETCDF }}/native"
{% for grid in product_grids %}
{% if write_netcdf %}
- "{{ COMOUT_ICE_NETCDF }}/{{ grid }}"
{% endif %}
{% if write_grib2 %}
- "{{ COMOUT_ICE_GRIB }}/{{ grid }}"
{% endif %}
{% endfor %}
copy:
- ["{{ DATA }}/ice_subset.nc", "{{ COMOUT_ICE_NETCDF }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.tripolar.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ice_subset.nc", "{{ COMOUT_ICE_NETCDF }}/native/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
- ["{{ DATA }}/ice_compressed.nc", "{{ COMIN_ICE_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc"]
{% for grid in product_grids %}
{% if write_grib2 %}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't the interpolated output data from write_netcdf=True be copied to COMOUT_ICE_NETCDF/grid?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same as above

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same reminder as above.

- ["{{ DATA }}/ice.{{ grid }}.grib2", "{{ COMOUT_ICE_GRIB }}/{{ grid }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
- ["{{ DATA }}/ice.{{ grid }}.grib2.idx", "{{ COMOUT_ICE_GRIB }}/{{ grid }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
{% endif %}
{% endfor %}
Loading