From 3a8697d4e289306e8e3443cfc0401965714bb54a Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Mon, 9 Dec 2024 09:33:00 -0500 Subject: [PATCH 1/3] Archive the experiment directory along with git status/diff output (#3105) # Description This adds the capability to archive the experiment directory. Additionally, this adds options to run `git status` and `git diff` on the `HOMEgfs` global workflow (but not the submodules) and store that information within the experiment directory's archive. These options are specified in `config.base` with the following defaults: ```bash export ARCH_EXPDIR='YES' # Archive the EXPDIR configs, XML, and database export ARCH_EXPDIR_FREQ=0 # How often to archive the EXPDIR in hours or 0 for first and last cycle only export ARCH_HASHES='YES' # Archive the hashes of the GW and submodules and 'git status' for each; requires ARCH_EXPDIR export ARCH_DIFFS='NO' # Archive the output of 'git diff' for the GW; requires ARCH_EXPDIR ``` Resolves #2994 # Type of change - [x] New feature (adds functionality) # Change characteristics - Is this a breaking change (a change in existing functionality)? NO - Does this change require a documentation update? YES - Does this change require an update to any of the following submodules? YES (If YES, please add a link to any PRs that are pending.) - [x] wxflow https://github.com/NOAA-EMC/wxflow/pull/45 # How has this been tested? - [x] Local archiving on Hercules for a C48_ATM case - [x] Cycled testing on Hercules with `ARCH_DIFFS=YES` and `ARCH_EXPDIR_FREQ=6,12` - [x] Testing with `ARCH_EXPDIR=NO` or `ARCH_HASHES=NO` # Checklist - [x] Any dependent changes have been merged and published - [x] My code follows the style guidelines of this project - [x] I have performed a self-review of my own code - [x] I have commented my code, particularly in hard-to-understand areas - [x] I have documented my code, including function, input, and output descriptions - [x] My changes generate no new warnings - [x] New and existing tests pass with my changes - [x] This change is covered by an existing CI test or a new one has been added - [x] Any new scripts have been added to the .github/CODEOWNERS file with owners - [x] I have made corresponding changes to the system documentation if necessary --------- Co-authored-by: Walter Kolczynski - NOAA --- .flake8 | 3 + docs/source/configure.rst | 13 ++- parm/archive/expdir.yaml.j2 | 24 +++++ parm/archive/master_gdas.yaml.j2 | 9 +- parm/archive/master_gefs.yaml.j2 | 7 ++ parm/archive/master_gfs.yaml.j2 | 7 ++ parm/config/gefs/config.base | 8 +- parm/config/gfs/config.base | 8 +- scripts/exglobal_archive.py | 26 ++--- sorc/wxflow | 2 +- ush/python/pygfs/task/archive.py | 175 ++++++++++++++++++++++++++++++- 11 files changed, 253 insertions(+), 29 deletions(-) create mode 100644 .flake8 create mode 100644 parm/archive/expdir.yaml.j2 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000..e3a4e3e47e --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +exclude = .git,.github,venv,__pycache__,old,build,dist +max-line-length = 160 diff --git a/docs/source/configure.rst b/docs/source/configure.rst index 439c5df110..bc37bbf833 100644 --- a/docs/source/configure.rst +++ b/docs/source/configure.rst @@ -48,12 +48,15 @@ The global-workflow configs contain switches that change how the system runs. Ma | | (.true.) or cold (.false)? | | | be set when running ``setup_expt.py`` script with | | | | | | the ``--start`` flag (e.g. ``--start warm``) | +------------------+----------------------------------+---------------+-------------+---------------------------------------------------+ -| HPSSARCH | Archive to HPPS | NO | Possibly | Whether to save output to tarballs on HPPS | +| HPSSARCH | Archive to HPPS | NO | NO | Whether to save output to tarballs on HPPS. | +------------------+----------------------------------+---------------+-------------+---------------------------------------------------+ -| LOCALARCH | Archive to a local directory | NO | Possibly | Instead of archiving data to HPSS, archive to a | -| | | | | local directory, specified by ATARDIR. If | -| | | | | LOCALARCH=YES, then HPSSARCH must =NO. Changing | -| | | | | HPSSARCH from YES to NO will adjust the XML. | +| LOCALARCH | Archive to a local directory | NO | NO | Whether to save output to tarballs locally. For | +| | | | | HPSSARCH and LOCALARCH, ARCDIR specifies the | +| | | | | directory. These options are mutually exclusive. | ++------------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| ARCH_EXPDIR | Archive the EXPDIR | NO | NO | Whether to create a tarball of the EXPDIR. | +| | | | | ARCH_HASHES and ARCH_DIFFS generate text files | +| | | | | of git output that are archived with the EXPDIR. | +------------------+----------------------------------+---------------+-------------+---------------------------------------------------+ | QUILTING | Use I/O quilting | .true. | NO | If .true. choose OUTPUT_GRID as cubed_sphere_grid | | | | | | in netcdf or gaussian_grid | diff --git a/parm/archive/expdir.yaml.j2 b/parm/archive/expdir.yaml.j2 new file mode 100644 index 0000000000..e2ec3f4736 --- /dev/null +++ b/parm/archive/expdir.yaml.j2 @@ -0,0 +1,24 @@ +{% set cycle_YMDH = current_cycle | to_YMDH %} + +expdir: + name: "EXPDIR" + # Copy the experiment files from the EXPDIR into the ROTDIR for archiving + {% set copy_expdir = "expdir." ~ cycle_YMDH %} + FileHandler: + mkdir: + - "{{ ROTDIR }}/{{ copy_expdir }}" + copy: + {% for config in glob(EXPDIR ~ "/config.*") %} + - [ "{{ config }}", "{{ ROTDIR }}/{{ copy_expdir }}/." ] + {% endfor %} + - [ "{{ EXPDIR }}/{{ PSLOT }}.xml", "{{ ROTDIR }}/{{ copy_expdir }}/." ] + {% if ARCH_HASHES or ARCH_DIFFS %} + - [ "{{ EXPDIR }}/git_info.log", "{{ ROTDIR }}/{{ copy_expdir }}/." ] + {% endif %} + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/expdir.tar" + required: + - "{{ copy_expdir }}/config.*" + - "{{ copy_expdir }}/{{ PSLOT }}.xml" + {% if ARCH_HASHES or ARCH_DIFFS %} + - "{{ copy_expdir }}/git_info.log" + {% endif %} diff --git a/parm/archive/master_gdas.yaml.j2 b/parm/archive/master_gdas.yaml.j2 index 11e83d387b..b3d6560012 100644 --- a/parm/archive/master_gdas.yaml.j2 +++ b/parm/archive/master_gdas.yaml.j2 @@ -40,7 +40,7 @@ datasets: # Determine if we will save restart ICs or not (only valid for cycled) {% set save_warm_start_forecast, save_warm_start_cycled = ( False, False ) %} - {% if ARCH_CYC == cycle_HH | int%} + {% if ARCH_CYC == cycle_HH | int %} # Save the forecast-only cycle ICs every ARCH_WARMICFREQ or ARCH_FCSTICFREQ days {% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %} {% set save_warm_start_forecast = True %} @@ -97,3 +97,10 @@ datasets: # End of restart checking {% endif %} + +# Archive the EXPDIR if requested +{% if archive_expdir %} +{% filter indent(width=4) %} +{% include "expdir.yaml.j2" %} +{% endfilter %} +{% endif %} diff --git a/parm/archive/master_gefs.yaml.j2 b/parm/archive/master_gefs.yaml.j2 index 5dc046dcfd..e76d7c9f7a 100644 --- a/parm/archive/master_gefs.yaml.j2 +++ b/parm/archive/master_gefs.yaml.j2 @@ -10,3 +10,10 @@ datasets: {% include "gefs_extracted_ice.yaml.j2" %} {% include "gefs_extracted_wave.yaml.j2" %} {% endfilter %} + +# Archive the EXPDIR if requested +{% if archive_expdir %} +{% filter indent(width=4) %} +{% include "expdir.yaml.j2" %} +{% endfilter %} +{% endif %} diff --git a/parm/archive/master_gfs.yaml.j2 b/parm/archive/master_gfs.yaml.j2 index e7187d70d5..dc8c0640e5 100644 --- a/parm/archive/master_gfs.yaml.j2 +++ b/parm/archive/master_gfs.yaml.j2 @@ -98,3 +98,10 @@ datasets: {% endfilter %} {% endif %} {% endif %} + +# Archive the EXPDIR if requested +{% if archive_expdir %} +{% filter indent(width=4) %} +{% include "expdir.yaml.j2" %} +{% endfilter %} +{% endif %} diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index 2bafde04f5..44074d0410 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -333,9 +333,13 @@ if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then echo "Both HPSS and local archiving selected. Please choose one or the other." exit 3 fi -export ARCH_CYC=00 # Archive data at this cycle for warm_start capability -export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_CYC=00 # Archive data at this cycle for warm start and/or forecast-only capabilities +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm start capability export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability +export ARCH_EXPDIR='YES' # Archive the EXPDIR configs, XML, and database +export ARCH_EXPDIR_FREQ=0 # How often to archive the EXPDIR in hours or 0 for first and last cycle only +export ARCH_HASHES='YES' # Archive the hashes of the GW and submodules and 'git status' for each; requires ARCH_EXPDIR +export ARCH_DIFFS='NO' # Archive the output of 'git diff' for the GW; requires ARCH_EXPDIR export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 8e54bc321c..f51acf2b35 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -479,9 +479,13 @@ if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then echo "FATAL ERROR: Both HPSS and local archiving selected. Please choose one or the other." exit 4 fi -export ARCH_CYC=00 # Archive data at this cycle for warm_start capability -export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_CYC=00 # Archive data at this cycle for warm start and/or forecast-only capabilities +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm start capability export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability +export ARCH_EXPDIR='YES' # Archive the EXPDIR configs, XML, and database +export ARCH_EXPDIR_FREQ=0 # How often to archive the EXPDIR in hours or 0 for first and last cycle only +export ARCH_HASHES='YES' # Archive the hashes of the GW and submodules and 'git status' for each; requires ARCH_EXPDIR +export ARCH_DIFFS='NO' # Archive the output of 'git diff' for the GW; requires ARCH_EXPDIR # The monitor jobs are not yet supported for JEDIATMVAR. if [[ ${DO_JEDIATMVAR} = "YES" ]]; then diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py index 6998831366..f477548319 100755 --- a/scripts/exglobal_archive.py +++ b/scripts/exglobal_archive.py @@ -3,7 +3,7 @@ import os from pygfs.task.archive import Archive -from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, logit +from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, logit, chdir # initialize root logger logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) @@ -32,7 +32,8 @@ def main(): 'DO_AERO_ANL', 'DO_AERO_FCST', 'DO_CA', 'DOIBP_WAV', 'DO_JEDIOCNVAR', 'NMEM_ENS', 'DO_JEDIATMVAR', 'DO_VRFY_OCEANDA', 'FHMAX_FITS', 'waveGRD', 'IAUFHRS', 'DO_FIT2OBS', 'NET', 'FHOUT_HF_GFS', 'FHMAX_HF_GFS', 'REPLAY_ICS', - 'OFFSET_START_HOUR'] + 'OFFSET_START_HOUR', 'ARCH_EXPDIR', 'EXPDIR', 'ARCH_EXPDIR_FREQ', 'ARCH_HASHES', + 'ARCH_DIFFS', 'SDATE', 'EDATE', 'HOMEgfs'] archive_dict = AttrDict() for key in keys: @@ -47,21 +48,20 @@ def main(): if archive_dict[key] is None: print(f"Warning: key ({key}) not found in task_config!") - cwd = os.getcwd() + with chdir(config.ROTDIR): - os.chdir(config.ROTDIR) + # Determine which archives to create + arcdir_set, atardir_sets = archive.configure(archive_dict) - # Determine which archives to create - arcdir_set, atardir_sets = archive.configure(archive_dict) + # Populate the product archive (ARCDIR) + archive.execute_store_products(arcdir_set) - # Populate the product archive (ARCDIR) - archive.execute_store_products(arcdir_set) + # Create the backup tarballs and store in ATARDIR + for atardir_set in atardir_sets: + archive.execute_backup_dataset(atardir_set) - # Create the backup tarballs and store in ATARDIR - for atardir_set in atardir_sets: - archive.execute_backup_dataset(atardir_set) - - os.chdir(cwd) + # Clean up any temporary files + archive.clean() if __name__ == '__main__': diff --git a/sorc/wxflow b/sorc/wxflow index e1ef697430..a7b49e9cc7 160000 --- a/sorc/wxflow +++ b/sorc/wxflow @@ -1 +1 @@ -Subproject commit e1ef697430c09d2b1a0560f21f11c7a32ed5f3e2 +Subproject commit a7b49e9cc76ef4b50cc1c28d4b7959ebde99c5f5 diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index f1d8cdf865..c6376206b3 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -7,10 +7,11 @@ from logging import getLogger from typing import Any, Dict, List -from wxflow import (AttrDict, FileHandler, Hsi, Htar, Task, - chgrp, get_gid, logit, mkdir_p, parse_j2yaml, rm_p, strftime, - to_YMDH) +from wxflow import (AttrDict, FileHandler, Hsi, Htar, Task, to_timedelta, + chgrp, get_gid, logit, mkdir_p, parse_j2yaml, rm_p, rmdir, + strftime, to_YMDH, which, chdir, ProcessError) +git_filename = "git_info.log" logger = getLogger(__name__.split('.')[-1]) @@ -43,6 +44,9 @@ def __init__(self, config: Dict[str, Any]) -> None: # Extend task_config with path_dict self.task_config = AttrDict(**self.task_config, **path_dict) + # Boolean used for cleanup if the EXPDIR was archived + self.archive_expdir = False + @logit(logger) def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str, Any]]): """Determine which tarballs will need to be created. @@ -109,6 +113,16 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str self.tar_cmd = "" return arcdir_set, [] + # Determine if we are archiving the EXPDIR this cycle (always skip for ensembles) + if "enkf" not in arch_dict.RUN and arch_dict.ARCH_EXPDIR: + self.archive_expdir = self._archive_expdir(arch_dict) + arch_dict.archive_expdir = self.archive_expdir + + if self.archive_expdir: + # If requested, get workflow hashes/statuses/diffs for EXPDIR archiving + if arch_dict.ARCH_HASHES or arch_dict.ARCH_DIFFS: + self._pop_git_info(arch_dict) + master_yaml = "master_" + arch_dict.RUN + ".yaml.j2" parsed_sets = parse_j2yaml(os.path.join(archive_parm, master_yaml), @@ -195,6 +209,12 @@ def _create_fileset(atardir_set: Dict[str, Any]) -> List: """ fileset = [] + # Check if any external files need to be brought into the ROTDIR (i.e. EXPDIR contents) + if "FileHandler" in atardir_set: + # Run the file handler to stage files for archiving + FileHandler(atardir_set["FileHandler"]).sync() + + # Check that all required files are present and add them to the list of files to archive if "required" in atardir_set: if atardir_set.required is not None: for item in atardir_set.required: @@ -204,6 +224,7 @@ def _create_fileset(atardir_set: Dict[str, Any]) -> List: for entry in glob_set: fileset.append(entry) + # Check for optional files and add found items to the list of files to archive if "optional" in atardir_set: if atardir_set.optional is not None: for item in atardir_set.optional: @@ -244,7 +265,7 @@ def _has_rstprod(fileset: List) -> bool: return False @logit(logger) - def _protect_rstprod(self, atardir_set: Dict[str, any]) -> None: + def _protect_rstprod(self, atardir_set: Dict[str, Any]) -> None: """ Changes the group of the target tarball to rstprod and the permissions to 640. If this fails for any reason, attempt to delete the file before exiting. @@ -289,7 +310,7 @@ def _create_tarball(target: str, fileset: List) -> None: tarball.add(filename) @logit(logger) - def _gen_relative_paths(self, root_path: str) -> Dict: + def _gen_relative_paths(self, root_path: str) -> Dict[str, Any]: """Generate a dict of paths in self.task_config relative to root_path Parameters @@ -417,3 +438,147 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4) return + + @logit(logger) + def _archive_expdir(self, arch_dict: Dict[str, Any]) -> bool: + """ + This function checks if the EXPDIR should be archived this RUN/cycle + and returns the temporary path in the ROTDIR where the EXPDIR will be + copied to for archiving. + + Parameters + ---------- + arch_dict: Dict + Dictionary with required parameters, including the following: + + current_cycle: Datetime + Date of the current cycle. + SDATE: Datetime + Starting cycle date. + EDATE: Datetime + Ending cycle date. + NET: str + The workflow type (gfs or gefs) + ARCH_EXPDIR_FREQ: int + Frequency to perform EXPDIR archiving + ROTDIR: str + Full path to the ROTDIR + """ + + # Get commonly used variables + current_cycle = arch_dict.current_cycle + sdate = arch_dict.SDATE + edate = arch_dict.EDATE + mode = arch_dict.MODE + assim_freq = to_timedelta(f"+{arch_dict.assim_freq}H") + # Convert frequency to seconds from hours + freq = arch_dict.ARCH_EXPDIR_FREQ * 3600 + + # Skip gfs and enkf cycled RUNs (only archive during gdas RUNs) + # (do not skip forecast-only, regardless of RUN) + if arch_dict.NET == "gfs" and arch_dict.MODE == "cycled" and arch_dict.RUN != "gdas": + return False + + # Determine if we should skip this cycle + # If the frequency is set to 0, only run on sdate (+assim_freq for cycled) and edate + first_full = sdate + if mode in ["cycled"]: + first_full += assim_freq + if current_cycle in [first_full, edate]: + # Always save the first and last + return True + elif (current_cycle - first_full).total_seconds() % freq == 0: + # Otherwise, the frequency is in hours + return True + else: + return False + + @logit(logger) + def _pop_git_info(self, arch_dict: Dict[str, Any]) -> Dict[str, Any]: + """ + This function checks the configuration options ARCH_HASHES and ARCH_DIFFS + and ARCH_EXPDIR_FREQ to determine if the git hashes and/or diffs should be + added to the EXPDIR for archiving and execute the commands. The hashes and + diffs will be stored in EXPDIR/git_info.log. + + Parameters + ---------- + arch_dict: Dict + Dictionary with required parameters, including the following: + + EXPDIR: str + Location of the EXPDIR + HOMEgfs: str + Location of the HOMEgfs (the global workflow) + ARCH_HASHES: bool + Whether to archive git hashes of the workflow and submodules + ARCH_DIFFS: bool + Whether to archive git diffs of the workflow and submodules + """ + + # Get commonly used variables + arch_hashes = arch_dict.ARCH_HASHES + arch_diffs = arch_dict.ARCH_DIFFS + homegfs = arch_dict.HOMEgfs + expdir = arch_dict.EXPDIR + + # Find the git command + git = which('git') + if git is None: + raise FileNotFoundError("FATAL ERROR: the git command could not be found!") + + output = "" + # Navigate to HOMEgfs to run the git commands + with chdir(homegfs): + + # Are we running git to get hashes? + if arch_hashes: + output += "Global workflow hash:\n" + + try: + output += git("rev-parse", "HEAD", output=str) + output += "\nSubmodule hashes:\n" + output += git("submodule", "status", output=str) + except ProcessError as pe: + raise OSError("FATAL ERROR Failed to run git") from pe + + # Are we running git to get diffs? + if arch_diffs: + output += "Global workflow diffs:\n" + # This command will only work on git v2.14+ + try: + output += git("diff", "--submodule=diff", output=str) + except ProcessError: + # The version of git may be too old. See if we can run just a surface diff. + try: + output += git("diff", output=str) + print("WARNING git was unable to do a recursive diff.\n" + "Only a top level diff was performed.\n" + "Note that the git version must be >= 2.14 for this feature.") + except ProcessError as pe: + raise OSError("FATAL ERROR Failed to run 'git diff'") from pe + + # Write out to the log file + try: + with open(os.path.join(expdir, git_filename), 'w') as output_file: + output_file.write(output) + except OSError as ose: + fname = os.path.join(expdir, git_filename) + raise OSError(f"FATAL ERROR Unable to write git output to '{fname}'") from ose + + return + + @logit(logger) + def clean(self): + """ + Remove the temporary directories/files created by the Archive task. + Presently, this is only the ROTDIR/expdir directory if EXPDIR archiving + was performed. + """ + + if self.archive_expdir: + temp_expdir_path = os.path.join(self.task_config.ROTDIR, "expdir." + + to_YMDH(self.task_config.current_cycle)) + rmdir(temp_expdir_path) + + return From 6585798434ba583b62a328017e4657b619f7880c Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Mon, 9 Dec 2024 14:29:19 -0500 Subject: [PATCH 2/3] Add marine hybrid envar (#3041) Add marine hybrid ensemble var: - A new possible `ci test` that runs 1.5 cycle of the hybrid envar with the coupled UFS - `yamls` to allow running the hybrid envar GFSv17 prototype at c384/0.25 for the det and C192/0.25 for the ens. members - a few bug and dependency fixes to allow cycling with an ensemble - an option to turn off the direct insertion of the sea-ice ensemble member analysis/recentering Resolves NOAA-EMC/GDASApp#1289 Resolves NOAA-EMC/GDASApp#1357 --------- Co-authored-by: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Co-authored-by: Walter Kolczynski - NOAA Co-authored-by: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> --- ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml | 9 +++-- ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml | 19 +++++++++ .../gfsv17/{ocnanal.yaml => marine3dvar.yaml} | 5 --- ci/cases/gfsv17/marinehyb.yaml | 21 ++++++++++ ci/cases/pr/C48mx500_3DVarAOWCDA.yaml | 5 +-- ci/cases/pr/C48mx500_hybAOWCDA.yaml | 14 +++---- ci/cases/yamls/soca_gfs_defaults_ci.yaml | 3 ++ ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml | 8 ++++ env/HERCULES.env | 9 +---- env/WCOSS2.env | 3 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN | 11 +++-- jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT | 1 + jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE | 1 + jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE | 1 + jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL | 1 + parm/archive/gdas.yaml.j2 | 2 +- parm/config/gefs/config.base | 1 + parm/config/gfs/config.base | 4 +- parm/config/gfs/yaml/defaults.yaml | 4 +- parm/stage/ice.yaml.j2 | 9 ++++- scripts/exglobal_archive.py | 2 +- scripts/exglobal_stage_ic.py | 3 +- sorc/gdas.cd | 2 +- ush/forecast_postdet.sh | 20 +++++----- ush/python/pygfs/task/marine_analysis.py | 40 +++++++++---------- ush/python/pygfs/task/marine_bmat.py | 17 +++----- ush/python/pygfs/task/marine_letkf.py | 2 +- workflow/applications/gfs_cycled.py | 1 + workflow/rocoto/gfs_tasks.py | 10 ++++- 29 files changed, 146 insertions(+), 82 deletions(-) create mode 100644 ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml rename ci/cases/gfsv17/{ocnanal.yaml => marine3dvar.yaml} (72%) create mode 100644 ci/cases/gfsv17/marinehyb.yaml create mode 100644 ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml diff --git a/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml b/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml index 99ba7c3661..4147249a4c 100644 --- a/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml +++ b/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml @@ -9,10 +9,11 @@ arguments: resdetocean: 0.25 nens: 0 interval: 6 - start: cold + start: warm comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR - idate: 2021063000 + idate: 2021063018 edate: 2021070306 - icsdir: /scratch1/NCEPDEV/climate/Jessica.Meixner/cycling/IC_2021063000_V2 - yaml: {{ HOMEgfs }}/ci/cases/gfsv17/ocnanal.yaml + #icsdir: /scratch1/NCEPDEV/climate/Jessica.Meixner/cycling/IC_2021063000_V2 + icsdir: /work/noaa/da/gvernier/ensda/ictest/1440x1080x75/ + yaml: {{ HOMEgfs }}/ci/cases/gfsv17/marine3dvar.yaml diff --git a/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml b/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml new file mode 100644 index 0000000000..f0e0b42c28 --- /dev/null +++ b/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml @@ -0,0 +1,19 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2S + resdetatmos: 384 + resensatmos: 192 + resdetocean: 0.25 + nens: 30 + interval: 0 + start: warm + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2021063018 + edate: 2021070306 + icsdir: /work/noaa/da/gvernier/ensda/ictest/1440x1080x75/ + yaml: {{ HOMEgfs }}/ci/cases/gfsv17/marinehyb.yaml diff --git a/ci/cases/gfsv17/ocnanal.yaml b/ci/cases/gfsv17/marine3dvar.yaml similarity index 72% rename from ci/cases/gfsv17/ocnanal.yaml rename to ci/cases/gfsv17/marine3dvar.yaml index b0605c9c16..abf86f0aa8 100644 --- a/ci/cases/gfsv17/ocnanal.yaml +++ b/ci/cases/gfsv17/marine3dvar.yaml @@ -21,8 +21,3 @@ marineanl: SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml SOCA_NINNER: 100 - -prepoceanobs: - SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml - OBSPREP_YAML: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obsprep/obsprep_config.yaml - DMPDIR: /scratch1/NCEPDEV/da/common/ diff --git a/ci/cases/gfsv17/marinehyb.yaml b/ci/cases/gfsv17/marinehyb.yaml new file mode 100644 index 0000000000..ed62e607a8 --- /dev/null +++ b/ci/cases/gfsv17/marinehyb.yaml @@ -0,0 +1,21 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml + +base: + DOIAU: "YES" + DO_JEDIATMVAR: "NO" + DO_JEDIATMENS: "NO" + DO_JEDIOCNVAR: "YES" + DO_JEDISNOWDA: "NO" + DO_MERGENSST: "NO" + DOHYBVAR_OCN: "YES" + DO_FIT2OBS: "YES" + DO_VERFOZN: "YES" + DO_VERFRAD: "YES" + DO_VRFY_OCEANDA: "NO" + FHMAX_GFS: 240 + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} + +marineanl: + SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca + SOCA_NINNER: 20 # revert to ~100 after the memory leak is fixed diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml index 762d2c3fbe..9cc3d3c03a 100644 --- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml +++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml @@ -10,8 +10,8 @@ arguments: comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500/20241120 - idate: 2021032412 - edate: 2021032418 + idate: 2021032418 + edate: 2021032500 nens: 0 interval: 0 start: warm @@ -21,4 +21,3 @@ skip_ci_on_hosts: - wcoss2 - gaea - orion - - hercules diff --git a/ci/cases/pr/C48mx500_hybAOWCDA.yaml b/ci/cases/pr/C48mx500_hybAOWCDA.yaml index ca477b5fba..d0fe13a689 100644 --- a/ci/cases/pr/C48mx500_hybAOWCDA.yaml +++ b/ci/cases/pr/C48mx500_hybAOWCDA.yaml @@ -6,21 +6,19 @@ arguments: pslot: {{ 'pslot' | getenv }} app: S2S resdetatmos: 48 - resdetocean: 5.0 resensatmos: 48 + resdetocean: 5.0 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500/20241120 - idate: 2021032412 - edate: 2021032418 - nens: 3 + idate: 2021032418 + edate: 2021032500 + nens: 2 interval: 0 start: warm - yaml: {{ HOMEgfs }}/ci/cases/yamls/soca_gfs_defaults_ci.yaml + yaml: {{ HOMEgfs }}/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml skip_ci_on_hosts: - wcoss2 - - orion - - hercules - - hera - gaea + - orion diff --git a/ci/cases/yamls/soca_gfs_defaults_ci.yaml b/ci/cases/yamls/soca_gfs_defaults_ci.yaml index 38d55e3574..c18eac9196 100644 --- a/ci/cases/yamls/soca_gfs_defaults_ci.yaml +++ b/ci/cases/yamls/soca_gfs_defaults_ci.yaml @@ -3,4 +3,7 @@ defaults: base: ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} DO_JEDIOCNVAR: "YES" + +marineanl: + SOCA_NINNER: 1 DO_TEST_MODE: "YES" diff --git a/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml b/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml new file mode 100644 index 0000000000..bad760aca0 --- /dev/null +++ b/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml @@ -0,0 +1,8 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml +base: + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} + DO_JEDIOCNVAR: "YES" + DOHYBVAR_OCN: "YES" +marineanl: + SOCA_NINNER: 1 diff --git a/env/HERCULES.env b/env/HERCULES.env index fccc2f87a5..3a59b1992d 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -138,14 +138,9 @@ case ${step} in ;; "ocnanalecen") - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - max_threads_per_task=$((max_tasks_per_node / tasks_per_node_ocnanalecen)) + export APRUN_OCNANALECEN="${APRUN_default}" +;; - export NTHREADS_OCNANALECEN=${threads_per_task_ocnanalecen:-${max_threads_per_task}} - [[ ${NTHREADS_OCNANALECEN} -gt ${max_threads_per_task} ]] && export NTHREADS_OCNANALECEN=${max_threads_per_task} - export APRUN_OCNANALECEN="${launcher} -n ${ntasks_ocnanalecen} --cpus-per-task=${NTHREADS_OCNANALECEN}" - ;; "marineanlchkpt") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 27001bebd7..4e8d1ddfea 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -115,6 +115,7 @@ elif [[ "${step}" = "marineanlvar" ]]; then elif [[ "${step}" = "marineanlchkpt" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + export APRUN_MARINEANLCHKPT="${APRUN_default}" elif [[ "${step}" = "ocnanalecen" ]]; then @@ -126,7 +127,7 @@ elif [[ "${step}" = "marineanlletkf" ]]; then export NTHREADS_MARINEANLLETKF=${NTHREADSmax} export APRUN_MARINEANLLETKF="${APRUN_default}" - + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN index 7b8bb84809..098ad0d06c 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN @@ -1,6 +1,6 @@ #!/bin/bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalecen" -c "base ocnanal ocnanalecen" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalecen" -c "base marineanl ocnanalecen" ############################################## # Set variables used in the script @@ -12,8 +12,13 @@ export gPDY=${GDATE:0:8} export gcyc=${GDATE:8:2} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ - COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL + COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ + COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL + +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL \ + COMIN_ICE_RESTART:COM_ICE_RESTART_TMPL \ + COMOUT_ICE_ANALYSIS:COM_ICE_ANALYSIS_TMPL ############################################## # Begin JOB SPECIFIC work diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT b/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT index 8cd7b1ab7c..7537937f82 100755 --- a/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT +++ b/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT @@ -2,6 +2,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATAjob="${DATAROOT}/${RUN}marineanalysis.${PDY:-}${cyc}" +export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marinevariational" source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlchkpt" -c "base marineanl marineanlchkpt" diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE b/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE index 2614639184..cdc6dfecc8 100755 --- a/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE @@ -2,6 +2,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATAjob="${DATAROOT}/${RUN}marineanalysis.${PDY:-}${cyc}" +export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marinevariational" source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlfinal" -c "base marineanl marineanlfinal" diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE index eb167af94d..14f5490a70 100755 --- a/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE @@ -2,6 +2,7 @@ source "${HOMEgfs}/ush/preamble.sh" export DATAjob="${DATAROOT}/${RUN}marineanalysis.${PDY:-}${cyc}" +export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marinevariational" source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlinit" -c "base marineanl marineanlinit" diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL b/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL index 7780353294..1bc476bffc 100755 --- a/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL +++ b/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL @@ -3,6 +3,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATAjob="${DATAROOT}/${RUN}marineanalysis.${PDY:-}${cyc}" +export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marinevariational" source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlvar" -c "base marineanl marineanlvar" diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index fa8919a589..140631feb5 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -26,7 +26,7 @@ gdas: - "logs/{{ cycle_YMDH }}/{{ RUN }}_marineanlvar.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}_marineanlfinal.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}_marineanlchkpt.log" - {% if DOHYBVAR %} + {% if DOHYBVAR_OCN %} - "logs/{{ cycle_YMDH }}/{{ RUN }}_ocnanalecen.log" {% endif %} {% endif %} diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index 44074d0410..8d5852a15b 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -282,6 +282,7 @@ export DO_JEDIATMENS="NO" export DO_JEDIOCNVAR="NO" export DO_JEDISNOWDA="NO" export DO_MERGENSST="NO" +export DO_STARTMEM_FROM_JEDIICE="NO" # Hybrid related export NMEM_ENS=@NMEM_ENS@ diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index f51acf2b35..4781f97274 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -368,9 +368,11 @@ export DO_JEDIATMENS="@DO_JEDIATMENS@" export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" export DO_JEDISNOWDA="@DO_JEDISNOWDA@" export DO_MERGENSST="@DO_MERGENSST@" +export DO_STARTMEM_FROM_JEDIICE="@DO_STARTMEM_FROM_JEDIICE@" # Hybrid related export DOHYBVAR="@DOHYBVAR@" +export DOHYBVAR_OCN="@DOHYBVAR_OCN@" export NMEM_ENS=@NMEM_ENS@ export SMOOTH_ENKF="NO" export l4densvar=".true." @@ -489,7 +491,7 @@ export ARCH_DIFFS='NO' # Archive the output of 'git diff' for the GW; requ # The monitor jobs are not yet supported for JEDIATMVAR. if [[ ${DO_JEDIATMVAR} = "YES" ]]; then - export DO_FIT2OBS="NO" # Run fit to observations package + export DO_FIT2OBS="NO" # Run fit to observations package export DO_VERFOZN="NO" # Ozone data assimilation monitoring export DO_VERFRAD="NO" # Radiance data assimilation monitoring export DO_VMINMON="NO" # GSI minimization monitoring diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 05dfc90332..c9ecd11f5b 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -5,6 +5,7 @@ base: DO_JEDIOCNVAR: "NO" DO_JEDISNOWDA: "NO" DO_MERGENSST: "NO" + DO_STARTMEM_FROM_JEDIICE: "NO" DO_GOES: "NO" DO_BUFRSND: "NO" DO_GEMPAK: "NO" @@ -21,6 +22,7 @@ base: GSI_SOILANAL: "NO" EUPD_CYC: "gdas" FHMAX_ENKF_GFS: 12 + DOHYBVAR_OCN: "NO" DO_TEST_MODE: "NO" atmanl: @@ -39,7 +41,7 @@ atmensanl: LAYOUT_Y_ATMENSANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 - + aeroanl: IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 diff --git a/parm/stage/ice.yaml.j2 b/parm/stage/ice.yaml.j2 index 0e0aa40c7f..241f45a35b 100644 --- a/parm/stage/ice.yaml.j2 +++ b/parm/stage/ice.yaml.j2 @@ -1,5 +1,12 @@ +{% set START_ICE_FROM_ANA = False %} +{% if DO_JEDIOCNVAR == True and RUN == 'gdas' %} + {% set START_ICE_FROM_ANA = True %} +{% endif %} +{% if DO_STARTMEM_FROM_JEDIICE == False and RUN == 'enkfgdas' %} + {% set START_ICE_FROM_ANA = False %} +{% endif %} ice: - {% if DO_JEDIOCNVAR == True %} + {% if START_ICE_FROM_ANA == True %} mkdir: {% for mem in range(first_mem, last_mem + 1) %} {% set imem = mem - first_mem %} diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py index f477548319..5ae57ca7e3 100755 --- a/scripts/exglobal_archive.py +++ b/scripts/exglobal_archive.py @@ -29,7 +29,7 @@ def main(): 'DOIAU', 'OCNRES', 'ICERES', 'NUM_SND_COLLECTIVES', 'FHOUT_WAV', 'FHOUT_HF_WAV', 'FHMAX_WAV', 'FHMAX_HF_WAV', 'FHMAX_WAV_GFS', 'restart_interval_gdas', 'restart_interval_gfs', - 'DO_AERO_ANL', 'DO_AERO_FCST', 'DO_CA', 'DOIBP_WAV', 'DO_JEDIOCNVAR', + 'DO_AERO_ANL', 'DO_AERO_FCST', 'DO_CA', 'DOIBP_WAV', 'DO_JEDIOCNVAR', 'DOHYBVAR_OCN', 'NMEM_ENS', 'DO_JEDIATMVAR', 'DO_VRFY_OCEANDA', 'FHMAX_FITS', 'waveGRD', 'IAUFHRS', 'DO_FIT2OBS', 'NET', 'FHOUT_HF_GFS', 'FHMAX_HF_GFS', 'REPLAY_ICS', 'OFFSET_START_HOUR', 'ARCH_EXPDIR', 'EXPDIR', 'ARCH_EXPDIR_FREQ', 'ARCH_HASHES', diff --git a/scripts/exglobal_stage_ic.py b/scripts/exglobal_stage_ic.py index 9d74d227fc..bf4217f45f 100755 --- a/scripts/exglobal_stage_ic.py +++ b/scripts/exglobal_stage_ic.py @@ -21,7 +21,8 @@ def main(): keys = ['RUN', 'MODE', 'EXP_WARM_START', 'NMEM_ENS', 'assim_freq', 'current_cycle', 'previous_cycle', 'ROTDIR', 'ICSDIR', 'STAGE_IC_YAML_TMPL', 'DO_JEDIATMVAR', - 'OCNRES', 'waveGRD', 'ntiles', 'DOIAU', 'DO_JEDIOCNVAR', + 'OCNRES', 'waveGRD', 'ntiles', 'DOIAU', + 'DO_JEDIOCNVAR', 'DO_STARTMEM_FROM_JEDIICE', 'REPLAY_ICS', 'DO_WAVE', 'DO_OCN', 'DO_ICE', 'DO_NEST', 'DO_CA', 'USE_ATM_ENS_PERTURB_FILES', 'USE_OCN_ENS_PERTURB_FILES'] diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 9ab7994a0c..d91663bb58 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 9ab7994a0caf6b201613dd7e7ceae482ffa600e0 +Subproject commit d91663bb585fbfa30db99d6126a1d4f24906b69b diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 532e0bb883..432e6f690d 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -471,14 +471,10 @@ MOM6_postdet() { || ( echo "FATAL ERROR: Unable to copy MOM6 increment, ABORT!"; exit 1 ) fi - # GEFS perturbations - if [[ "${RUN}" == "gefs" ]]; then - # to ensure it does not interfere with the GFS - if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then - ${NCP} "${COMIN_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \ - || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) - fi - fi # if [[ "${RUN}" == "gefs" ]]; then + if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then + ${NCP} "${COMIN_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \ + || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) + fi fi # if [[ "${RERUN}" == "NO" ]]; then # Link output files @@ -599,7 +595,13 @@ CICE_postdet() { restart_date="${model_start_date_current_cycle}" cice_restart_file="${COMIN_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" if [[ "${DO_JEDIOCNVAR:-NO}" == "YES" ]]; then - cice_restart_file="${COMIN_ICE_ANALYSIS}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model_anl.res.nc" + if (( MEMBER == 0 )); then + # Start the deterministic from the JEDI/SOCA analysis if the Marine DA in ON + cice_restart_file="${COMIN_ICE_ANALYSIS}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model_anl.res.nc" + elif (( MEMBER > 0 )) && [[ "${DO_STARTMEM_FROM_JEDIICE:-NO}" == "YES" ]]; then + # Ignore the JEDI/SOCA ensemble analysis for the ensemble members if DO_START_FROM_JEDIICE is OFF + cice_restart_file="${COMIN_ICE_ANALYSIS}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model_anl.res.nc" + fi fi fi diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index 4f8fa760c0..ebb1502634 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -51,9 +51,9 @@ def __init__(self, config): _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) _window_end = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.assim_freq}H") / 2) - # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert + # compute the relative path from self.task_config.DATA to self.task_config.DATAens if self.task_config.NMEM_ENS > 0: - _enspert_relpath = os.path.relpath(self.task_config.DATAenspert, self.task_config.DATA) + _enspert_relpath = os.path.relpath(self.task_config.DATAens, self.task_config.DATA) else: _enspert_relpath = None @@ -69,7 +69,8 @@ def __init__(self, config): 'MARINE_WINDOW_MIDDLE_ISO': self.task_config.current_cycle.strftime('%Y-%m-%dT%H:%M:%SZ'), 'ENSPERT_RELPATH': _enspert_relpath, 'CALC_SCALE_EXEC': _calc_scale_exec, - 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." } ) @@ -110,10 +111,13 @@ def initialize(self: Task) -> None: os.symlink('../staticb', 'staticb') # hybrid EnVAR case - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - # stage ensemble membersfiles for use in hybrid background error - logger.debug(f"Stage ensemble members for the hybrid background error") - mdau.stage_ens_mem(self.task_config) + if self.task_config.DOHYBVAR_OCN == "YES" or self.task_config.NMEM_ENS >= 2: + # stage the ensemble weights + logger.debug(f"Stage ensemble weights for the hybrid background error") + FileHandler({'copy': [[os.path.join(self.task_config.COMIN_OCEAN_BMATRIX, f'{self.task_config.APREFIX}ocean.ens_weights.nc'), + os.path.join(self.task_config.DATA, 'ocean.ens_weights.nc')], + [os.path.join(self.task_config.COMIN_ICE_BMATRIX, f'{self.task_config.APREFIX}ice.ens_weights.nc'), + os.path.join(self.task_config.DATA, 'ice.ens_weights.nc')]]}).sync() # prepare the yaml configuration to run the SOCA variational application self._prep_variational_yaml() @@ -137,8 +141,8 @@ def _fetch_observations(self: Task) -> None: obs_files = [] for ob in obs_list_config['observations']['observers']: - logger.info(f"******** {self.task_config.OPREFIX}{ob['obs space']['name'].lower()}.{to_YMD(self.task_config.PDY)}{self.task_config.cyc}.nc4") - obs_files.append(f"{self.task_config.OPREFIX}{ob['obs space']['name'].lower()}.{to_YMD(self.task_config.PDY)}{self.task_config.cyc}.nc4") + logger.info(f"******** {self.task_config.OPREFIX}{ob['obs space']['name'].lower()}.{to_YMD(self.task_config.PDY)}{self.task_config.cyc:02d}.nc4") + obs_files.append(f"{self.task_config.OPREFIX}{ob['obs space']['name'].lower()}.{to_YMD(self.task_config.PDY)}{self.task_config.cyc:02d}.nc4") obs_list = [] # copy obs from COM_OBS to DATA/obs @@ -202,7 +206,7 @@ def _prep_variational_yaml(self: Task) -> None: envconfig_jcb['PARMgfs'] = self.task_config.PARMgfs envconfig_jcb['NMEM_ENS'] = self.task_config.NMEM_ENS envconfig_jcb['berror_model'] = 'marine_background_error_static_diffusion' - if self.task_config.NMEM_ENS > 3: + if self.task_config.NMEM_ENS >= 3: envconfig_jcb['berror_model'] = 'marine_background_error_hybrid_diffusion_diffusion' envconfig_jcb['DATA'] = self.task_config.DATA envconfig_jcb['OPREFIX'] = self.task_config.OPREFIX @@ -210,7 +214,7 @@ def _prep_variational_yaml(self: Task) -> None: envconfig_jcb['cyc'] = os.getenv('cyc') envconfig_jcb['SOCA_NINNER'] = self.task_config.SOCA_NINNER envconfig_jcb['obs_list'] = ['adt_rads_all'] - envconfig_jcb['MOM6_LEVS'] = mdau.get_mom6_levels(str(self.task_config.OCNRES)) + envconfig_jcb['MOM6_LEVS'] = mdau.get_mom6_levels(str(self.task_config.OCNRES).zfill(3)) # Write obs_list_short save_as_yaml(parse_obs_list_file(self.task_config.MARINE_OBS_LIST_YAML), 'obs_list_short.yaml') @@ -220,12 +224,8 @@ def _prep_variational_yaml(self: Task) -> None: jcb_base_yaml = os.path.join(self.task_config.PARMsoca, 'marine-jcb-base.yaml') jcb_algo_yaml = os.path.join(self.task_config.PARMsoca, 'marine-jcb-3dfgat.yaml.j2') - jcb_base_config = YAMLFile(path=jcb_base_yaml) - jcb_base_config = Template.substitute_structure(jcb_base_config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig_jcb.get) - jcb_base_config = Template.substitute_structure(jcb_base_config, TemplateConstants.DOLLAR_PARENTHESES, envconfig_jcb.get) - jcb_algo_config = YAMLFile(path=jcb_algo_yaml) - jcb_algo_config = Template.substitute_structure(jcb_algo_config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig_jcb.get) - jcb_algo_config = Template.substitute_structure(jcb_algo_config, TemplateConstants.DOLLAR_PARENTHESES, envconfig_jcb.get) + jcb_base_config = parse_j2yaml(path=jcb_base_yaml, data=envconfig_jcb) + jcb_algo_config = parse_j2yaml(path=jcb_algo_yaml, data=envconfig_jcb) # Override base with the application specific config jcb_config = {**jcb_base_config, **jcb_algo_config} @@ -385,12 +385,10 @@ def list_all_files(dir_in, dir_out, wc='*', fh_list=[]): os.path.join(com_ocean_analysis, f'{RUN}.t{cyc}z.{domain}ana.nc')]) # Copy of the ssh diagnostics - ''' if nmem_ens > 2: for string in ['ssh_steric_stddev', 'ssh_unbal_stddev', 'ssh_total_stddev', 'steric_explained_variance']: - post_file_list.append([os.path.join(anl_dir, 'static_ens', f'ocn.{string}.incr.{bdate}.nc'), + post_file_list.append([os.path.join(anl_dir, 'staticb', f'ocn.{string}.incr.{bdate}.nc'), os.path.join(com_ocean_analysis, f'{RUN}.t{cyc}z.ocn.{string}.nc')]) - ''' # Copy DA grid (computed for the start of the window) post_file_list.append([os.path.join(anl_dir, 'soca_gridspec.nc'), @@ -460,7 +458,7 @@ def create_obs_space(data): # get the variable name, assume 1 variable per file nc = netCDF4.Dataset(obsfile, 'r') - variable = next(iter(nc.groups["ObsValue"].variables)) + variable = next(iter(nc.groups["ombg"].variables)) nc.close() # filling values for the templated yaml diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index a21699227b..8e2b84a673 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -63,9 +63,9 @@ def __init__(self, config): 'MARINE_WINDOW_END': _window_end, 'MARINE_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'ENSPERT_RELPATH': _enspert_relpath, - 'MOM6_LEVS': mdau.get_mom6_levels(str(self.task_config.OCNRES)), + 'CALC_SCALE_EXEC': _calc_scale_exec, 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", - 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." + 'MOM6_LEVS': mdau.get_mom6_levels(str(self.task_config.OCNRES)) } ) @@ -130,12 +130,12 @@ def initialize(self: Task) -> None: self.jedi_dict['soca_parameters_diffusion_vt'].initialize(self.task_config) self.jedi_dict['soca_setcorscales'].initialize(self.task_config) self.jedi_dict['soca_parameters_diffusion_hz'].initialize(self.task_config) - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + if self.task_config.DOHYBVAR_OCN == "YES" or self.task_config.NMEM_ENS >= 2: self.jedi_dict['soca_ensb'].initialize(self.task_config) self.jedi_dict['soca_ensweights'].initialize(self.task_config) # stage ensemble members for the hybrid background error - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + if self.task_config.DOHYBVAR_OCN == "YES" or self.task_config.NMEM_ENS >= 2: logger.debug(f"Stage ensemble members for the hybrid background error") mdau.stage_ens_mem(self.task_config) @@ -182,7 +182,7 @@ def execute(self) -> None: self.jedi_dict['soca_parameters_diffusion_vt'].execute() # hybrid EnVAR case - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + if self.task_config.DOHYBVAR_OCN == "YES" or self.task_config.NMEM_ENS >= 2: self.jedi_dict['soca_ensb'].execute() self.jedi_dict['soca_ensweights'].execute() @@ -221,11 +221,6 @@ def finalize(self: Task) -> None: f"{self.task_config.APREFIX}{diff_type}_ocean.nc") diffusion_coeff_list.append([src, dest]) - src = os.path.join(self.task_config.DATAstaticb, f"hz_ice.nc") - dest = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, - f"{self.task_config.APREFIX}hz_ice.nc") - diffusion_coeff_list.append([src, dest]) - FileHandler({'copy': diffusion_coeff_list}).sync() # Copy diag B files to ROTDIR @@ -252,7 +247,7 @@ def finalize(self: Task) -> None: FileHandler({'copy': diagb_list}).sync() # Copy the ensemble perturbation diagnostics to the ROTDIR - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + if self.task_config.DOHYBVAR_OCN == "YES" or self.task_config.NMEM_ENS >= 2: window_middle_iso = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ') weight_list = [] src = os.path.join(self.task_config.DATA, f"ocn.ens_weights.incr.{window_middle_iso}.nc") diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py index 54d40f8d66..98c4f29085 100644 --- a/ush/python/pygfs/task/marine_letkf.py +++ b/ush/python/pygfs/task/marine_letkf.py @@ -112,7 +112,7 @@ def initialize(self): # TODO(AFE) - this should be removed when the obs config yamls are jinjafied if 'distribution' not in ob['obs space']: ob['obs space']['distribution'] = {'name': 'Halo', 'halo size': self.task_config['DIST_HALO_SIZE']} - obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc4" + obs_filename = f"{self.task_config.OPREFIX}{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc4" obs_files.append((obs_filename, ob)) obs_files_to_copy = [] diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 2d16b6a59c..e11f708aa6 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -37,6 +37,7 @@ def _get_run_options(self, conf: Configuration) -> Dict[str, Any]: base = conf.parse_config('config.base', RUN=run) run_options[run]['do_hybvar'] = base.get('DOHYBVAR', False) + run_options[run]['do_hybvar_ocn'] = base.get('DOHYBVAR_OCN', False) run_options[run]['nens'] = base.get('NMEM_ENS', 0) if run_options[run]['do_hybvar']: run_options[run]['lobsdiag_forenkf'] = base.get('lobsdiag_forenkf', False) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 535f5ce844..59b0951d44 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -672,7 +672,10 @@ def marinebmat(self): data = f'{ocean_hist_path}/gdas.ocean.t@Hz.inst.f009.nc' dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + if self.options['do_hybvar_ocn']: + dep_dict = {'type': 'metatask', 'name': 'enkfgdas_fcst', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('marinebmat') task_name = f'{self.run}_marinebmat' @@ -770,7 +773,7 @@ def ocnanalecen(self): def marineanlchkpt(self): deps = [] - if self.options['do_hybvar']: + if self.options['do_hybvar_ocn']: dep_dict = {'type': 'task', 'name': f'{self.run}_ocnanalecen'} else: dep_dict = {'type': 'task', 'name': f'{self.run}_marineanlvar'} @@ -2756,6 +2759,9 @@ def efcs(self): deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}_esfc'} deps.append(rocoto.add_dependency(dep_dict)) + if self.options['do_hybvar_ocn']: + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf", "")}_ocnanalecen'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) dep_dict = {'type': 'task', 'name': f'{self.run}_stage_ic'} dependencies.append(rocoto.add_dependency(dep_dict)) From 6c5b9bb4d51d5189a5ca7e78d42b4a316957eb4f Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Mon, 9 Dec 2024 14:33:58 -0500 Subject: [PATCH 3/3] Update HPC Tier Definitions (#3138) Updates the tier definitions for supported platforms. This also updates and reformats the README. ~Note that the README badges will display errors until this PR is merged.~ @TerrenceMcGuinness-NOAA updated the gist, so the badges are now showing up correctly in the README. Resolves #3135 --- .../workflows/{orion.yaml => hercules.yaml} | 20 +- .github/workflows/wcoss2.yaml | 81 ++++++++ README.md | 44 +++-- docs/source/components.rst | 1 + docs/source/hpc.rst | 174 ++++++++++++++++++ docs/source/index.rst | 1 + 6 files changed, 296 insertions(+), 25 deletions(-) rename .github/workflows/{orion.yaml => hercules.yaml} (75%) create mode 100644 .github/workflows/wcoss2.yaml diff --git a/.github/workflows/orion.yaml b/.github/workflows/hercules.yaml similarity index 75% rename from .github/workflows/orion.yaml rename to .github/workflows/hercules.yaml index aaf1e28370..a08ec867b6 100644 --- a/.github/workflows/orion.yaml +++ b/.github/workflows/hercules.yaml @@ -1,4 +1,4 @@ -name: Orion +name: Hercules on: pull_request_target: @@ -26,7 +26,7 @@ jobs: echo "labels=$LABELS" >> $GITHUB_OUTPUT passed: - if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') && github.event.pull_request.merged + if: contains( needs.getlabels.outputs.labels, 'CI-Hercules-Passed') && github.event.pull_request.merged runs-on: ubuntu-22.04 needs: - getlabels @@ -38,13 +38,13 @@ jobs: forceUpdate: true auth: ${{ secrets.CLI_DYNAMIC_BADGES }} gistID: e35aa2904a54deae6bbb1fdc2d960c71 - filename: orion.json - label: orion + filename: hercules.json + label: hercules message: passing color: green failed: - if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Failed') && github.event.pull_request.merged + if: contains( needs.getlabels.outputs.labels, 'CI-Hercules-Failed') && github.event.pull_request.merged runs-on: ubuntu-latest needs: - getlabels @@ -56,14 +56,14 @@ jobs: forceUpdate: true auth: ${{ secrets.CLI_DYNAMIC_BADGES }} gistID: e35aa2904a54deae6bbb1fdc2d960c71 - filename: orion.json - label: orion + filename: hercules.json + label: hercules message: failing color: red pending: - if: "!contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') && !contains( needs.getlabels.outputs.labels, 'CI-Orion-Failed')" + if: "!contains( needs.getlabels.outputs.labels, 'CI-Hercules-Passed') && !contains( needs.getlabels.outputs.labels, 'CI-Hercules-Failed')" runs-on: ubuntu-latest needs: - getlabels @@ -75,7 +75,7 @@ jobs: forceUpdate: true auth: ${{ secrets.CLI_DYNAMIC_BADGES }} gistID: e35aa2904a54deae6bbb1fdc2d960c71 - filename: orion.json - label: orion + filename: hercules.json + label: hercules message: pending color: orange diff --git a/.github/workflows/wcoss2.yaml b/.github/workflows/wcoss2.yaml new file mode 100644 index 0000000000..489ae58406 --- /dev/null +++ b/.github/workflows/wcoss2.yaml @@ -0,0 +1,81 @@ +name: WCOSS2 + +on: + pull_request_target: + branches: + - develop + types: [closed] + +jobs: + + getlabels: + runs-on: ubuntu-22.04 + outputs: + labels: ${{ steps.id.outputs.labels }} + steps: + - name: Get Label Steps + id: id + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + OWNER: ${{ github.repository_owner }} + REPO_NAME: ${{ github.event.repository.name }} + PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + run: | + LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')" + LABELS=$(echo "$LABELS1" | tr '\n' ' ') + echo "labels=$LABELS" >> $GITHUB_OUTPUT + + passed: + if: contains( needs.getlabels.outputs.labels, 'CI-Wcoss2-Passed') && github.event.pull_request.merged + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Passed + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: wcoss2.json + label: wcoss2 + message: passing + color: green + + failed: + if: contains( needs.getlabels.outputs.labels, 'CI-Wcoss2-Failed') && github.event.pull_request.merged + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Failed + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: wcoss2.json + label: wcoss2 + message: failing + color: red + + + pending: + if: "!contains( needs.getlabels.outputs.labels, 'CI-Wcoss2-Passed') && !contains( needs.getlabels.outputs.labels, 'CI-Wcoss2-Failed')" + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Pending + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: wcoss2.json + label: wcoss2 + message: pending + color: orange diff --git a/README.md b/README.md index 289e74933b..3311f540a5 100644 --- a/README.md +++ b/README.md @@ -3,28 +3,42 @@ [![pynorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml) ![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/hera.json) -![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/orion.json) +![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/hercules.json) +![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/wcoss2.json) -# global-workflow -Global Workflow currently supporting the Global Forecast System (GFS) with the [UFS-weather-model](https://github.com/ufs-community/ufs-weather-model) and [GSI](https://github.com/NOAA-EMC/GSI)-based Data Assimilation System. +The Global Workflow supporting the Global Forecast System (GFS), the Global Ensemble Forecasting System (GEFS), and the Seasonal Forecast System (SFS) with the [UFS-weather-model](https://github.com/ufs-community/ufs-weather-model). Data assimilation, currently only available for the GFS, is provides by both the [GSI](https://github.com/NOAA-EMC/GSI)- and [GDASApp (JEDI)](https://github.com/NOAA-EMC/GDASApp)-based Data Assimilation systems. -The `global-workflow` depends on the following prerequisities to be available on the system: +In progress [documentation](https://global-workflow.readthedocs.io/en/latest/) is available. -* Workflow Engine - [Rocoto](https://github.com/christopherwharrop/rocoto) and [ecFlow](https://github.com/ecmwf/ecflow) (for NWS Operations) -* Compiler - Intel Compiler Suite -* Software - NCEPLIBS (various), ESMF, HDF5, NetCDF, and a host of other softwares (see module files under /modulefiles for additional details) +# Prerequisites -The `global-workflow` current supports the following tier-1 machines: +The Global Workflow depends on the following prerequisities to be available on the system: -* NOAA RDHPCS - Hera -* MSU HPC - Orion -* MSU HPC - Hercules -* NOAA's operational HPC - WCOSS2 +* Workflow Engines - [Rocoto](https://github.com/christopherwharrop/rocoto) and [ecFlow](https://github.com/ecmwf/ecflow) (for NWS Operations) +* Compiler - Intel classic compiler suite version 2021.x +* Software - NCEPLIBS (various), ESMF, HDF5, NetCDF, and a host of other softwares (see module files under /modulefiles for additional details). + - [Spack-stack](https://github.com/JCSDA/spack-stack) is recommended for the installation of dependent libraries. -Additionally, the following tier-2 machine is supported: -* SSEC at Univ. of Wisconsin - S4 (Note that S2S+ experiments are not fully supported) +## Supported platforms -Documentation (in progress) is available [here](https://global-workflow.readthedocs.io/en/latest/). +The Global Workflow currently supports the following machines at the indicated tier. + +| HPC | Tier | Notes | +| --------------------------------------- |:----:|:--------------------------------------------------------------------------:| +| WCOSS2
NCO | 1 | GEFS testing is not regularly performed. | +| Hera
NOAA RDHPCS | 1 | | +| Hercules
MSU | 1 | Currently does not support the TC Tracker. | +| Orion
MSU | 2 | The GSI runs very slowly on Orion and the TC tracker is not supported. | +| Gaea C5/C6
RDHPCS | 3 | Currently non-operational following an OS upgrade.
Supported by EPIC. | +| AWS, GCP, Azure
NOAA Parallel Works | 3 | Supported by EPIC. | +| Jet
RDHPCS | 3 | Supported by NESDIS. | +| S4
SSEC | 3 | Currently non-operational following an OS upgrade.
Supported by NESDIS. | + +**Tier Definitions** + +1. Fully supported by the EMC global workflow team. CI testing is regularly performed on these systems, the majority of the global workflow features are supported, and the team will address any platform-specific features, bugs, upgrades, and requests for data. +2. Supported by the global workflow team on an ad-hoc basis. CI tests are supported on these systems, but not regularly performed. +3. No official support by the global workflow team, but may be supported by other entities (e.g. EPIC). # Disclaimer diff --git a/docs/source/components.rst b/docs/source/components.rst index f5a60a96af..98803c96c7 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -12,6 +12,7 @@ The major components of the system are: * Forecast * Post-processing * Verification +* Archiving The Global Workflow repository contains the workflow and script layers. External components will be checked out as git submodules. All of the submodules of the system reside in their respective repositories on GitHub. diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst index e83851b1a2..8337f61886 100644 --- a/docs/source/hpc.rst +++ b/docs/source/hpc.rst @@ -44,6 +44,180 @@ The following system software requirements are the minimum for any new or existi | | | WCOSS2 | +--------------+-------------+---------------------------------------+ +=========================== +Feature availability by HPC +=========================== + +The Global Workflow provides capabilities for deterministic and ensemble forecasts along with data assimilation on multiple platforms. However, not all features are currently supported on all platforms. The following table lists the features by platform and states their level of support. + +.. list-table:: Capabilities matrix by HPC + :header-rows: 1 + :align: center + + * - HPC + - Tier + - Coupled + GFS + - Coupled + GEFS + - GSI + DA + - GDASApp + DA + - Coupled + DA + - TC Tracker + - AWIPS + - MOS + - Fit2Obs + - METplus + Verification + - HPSS + Archiving + * - WCOSS2 + - 1 + - X + - X + - X + - X + - + - X + - X + - X + - X + - X + - X + * - Hera + - 1 + - X + - X + - X + - X + - X + - X + - X + - + - X + - X + - X + * - Hercules + - 1 + - X + - X + - X + - X + - X + - + - + - + - X + - X + - + * - Orion + - 2 + - X + - X + - X + - X + - + - X + - + - + - X + - X + - + * - Gaea C5 + - 3 + - X + - X + - X + - X + - + - + - + - + - + - + - X + * - Gaea C6 + - 3 + - X + - X + - X + - X + - + - + - + - + - + - + - X + * - AWS (PW) + - 3 + - X + - X + - X + - + - + - + - + - + - + - + - + * - GCP (PW) + - 3 + - X + - X + - + - + - + - + - + - + - + - + - + * - Azure + - 3 + - X + - X + - + - + - + - + - + - + - + - + - + * - Jet + - 3 + - X + - + - X + - + - + - X + - + - + - X + - X + - X + * - S4 + - 3 + - + - + - X + - + - + - X + - + - + - X + - X + - + ================================ Experiment troubleshooting help ================================ diff --git a/docs/source/index.rst b/docs/source/index.rst index e6513b743a..aefec10676 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -18,6 +18,7 @@ Code managers * Kate Friedman - @KateFriedman-NOAA / kate.friedman@noaa.gov * Walter Kolczynski - @WalterKolczynski-NOAA / walter.kolczynski@noaa.gov +* David Huber - @DavidHuber-NOAA / david.huber@noaa.gov ============= Announcements