From 2896bb8787698be28ad20f2298589e6e95eaf9fb Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Tue, 12 Dec 2023 09:13:06 -0500 Subject: [PATCH 1/7] Fix module load typo (#2145) Fix typo with loading the crtm module in the UPP hack for WCOSS2 Refs #2144 --- jobs/rocoto/upp.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh index 6ab243d58f..18d5c12cea 100755 --- a/jobs/rocoto/upp.sh +++ b/jobs/rocoto/upp.sh @@ -27,7 +27,7 @@ if [[ "${MACHINE_ID}" = "wcoss2" ]]; then module load wgrib2/2.0.8 export WGRIB2=wgrib2 module load python/3.8.6 - module laod crtm/2.4.0 # TODO: This is only needed when UPP_RUN=goes. Is there a better way to handle this? + module load crtm/2.4.0 # TODO: This is only needed when UPP_RUN=goes. Is there a better way to handle this? set_trace else . "${HOMEgfs}/ush/load_fv3gfs_modules.sh" From 24d898ec0ec0c7fcbda6966988f9a986f528ed14 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Wed, 13 Dec 2023 09:32:24 -0500 Subject: [PATCH 2/7] Fix Hercules hostname (#2150) * Allow lower or upper hostname on hercules. * Add Hercules to issue template. --- .github/ISSUE_TEMPLATE/bug_report.yml | 1 + ush/detect_machine.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index ade36811cc..216293781c 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -40,6 +40,7 @@ body: - WCOSS2 - Hera - Orion + - Hercules - Jet - Cloud validations: diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index afeb01830b..01ae66a02d 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -26,7 +26,7 @@ case $(hostname -f) in Orion-login-[1-4].HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion1-4 - hercules-login-[1-4].hpc.msstate.edu) MACHINE_ID=hercules ;; ### hercules1-4 + [Hh]ercules-login-[1-4].[Hh][Pp][Cc].[Mm]s[Ss]tate.[Ee]du) MACHINE_ID=hercules ;; ### hercules1-4 cheyenne[1-6].cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 cheyenne[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 From 1332188ce2409e012ec127d44880e49732f7f379 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 13 Dec 2023 09:32:57 -0500 Subject: [PATCH 3/7] Enable radar reflectivity output (#2143) Turns on radar reflectivity output from FV3. This eliminates missing values appearing in the grib2 files. --- parm/config/gefs/config.fcst | 2 +- parm/config/gfs/config.fcst | 2 +- parm/ufs/fv3/diag_table | 2 +- ush/parsing_namelists_FV3.sh | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index e36e023652..4c8d3be99f 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -173,7 +173,7 @@ case ${imp_physics} in export random_clds=".false." export effr_in=".true." export ltaerosol=".false." - export lradar=".false." + export lradar=".true." export ttendlim="-999" export dt_inner=$((DELTIM/2)) export sedi_semi=.true. diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 3f63578d76..f367df1194 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -173,7 +173,7 @@ case ${imp_physics} in export random_clds=".false." export effr_in=".true." export ltaerosol=".false." - export lradar=".false." + export lradar=".true." export ttendlim="-999" export dt_inner=$((DELTIM/2)) export sedi_semi=.true. diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table index 29ecfefb3c..b972b3470c 100644 --- a/parm/ufs/fv3/diag_table +++ b/parm/ufs/fv3/diag_table @@ -99,6 +99,7 @@ "gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "refl_10cm", "refl_10cm", "fv3_history", "all", .false., "none", 2 "gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2 @@ -266,7 +267,6 @@ "gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - #============================================================================================= # #====> This file can be used with diag_manager/v2.0a (or higher) <==== diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 709a5741a0..0d759e7570 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -251,7 +251,7 @@ EOF cat >> input.nml << EOF iovr = ${iovr:-"3"} ltaerosol = ${ltaerosol:-".false."} - lradar = ${lradar:-".false."} + lradar = ${lradar:-".true."} ttendlim = ${ttendlim:-"-999"} dt_inner = ${dt_inner:-"${default_dt_inner}"} sedi_semi = ${sedi_semi:-".true."} From bb6151aa50c27253cb9b8f0a6601ddd58526b8b3 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 14 Dec 2023 05:38:29 -0500 Subject: [PATCH 4/7] Replace checkout script with submodules (#2142) Replaces the checkout script with git submodules. This change brings workflow in line with other repos and git standards. Due to these changes, the build system is updated slightly. First, the options that were formerly used with `checkout.sh` to reduce unneeded builds (`-g` for GSI and `-u` for UFSDA) are moved to `build_all.sh` instead. This is needed since build all will not be able to key off directories existing or not to determine which com- ponents to build. Second, the partial build system is removed. It was rarely, if ever, used, and parts of it were already ignored with the previous changes to base builds off whether directories exist. The manage externals file is also removed since that is also not being used. Following these changes, clones/checkout will need to either be recursive, or `git submodule update` will need to be called separately to make sure components are properly checked out (non-exhaustive list): ``` ## Checkout develop (method 1) git clone --recursive https://github.com/NOAA-EMC/global-workflow destination ## Checkout develop (method 2) git clone --jobs 8 https://github.com/NOAA-EMC/global-workflow destination cd destination git submodule update --init --recursive --jobs 8 ## Checkout existing branch some_branch (method 1) git clone --recursive --jobs 8 -b some_branch https://github.com/NOAA-EMC/global-workflow destination ## Checkout existing branch some_branch (method 2) git clone https://github.com/NOAA-EMC/global-workflow destination cd destination git checkout --recurse-submodules some_branch ## Checkout existing branch some_branch (method 3) git clone https://github.com/NOAA-EMC/global-workflow destination cd destination git checkout some_branch git submodule update --init --recursive --jobs 8 ## Checkout new branch new_branch git clone https://github.com/NOAA-EMC/global-workflow destination cd destination git checkout -b new_branch git submodule update --init --recursive --jobs 8 ``` The git ignore list had to be updated to use explicit paths as the wildcard paths pick up the ones that are now submodules. UFS creates files during the build process that are not in its ignore list, so that sub- module ignores the "dirty" state to avoid confusing users running `git status`. CI and documentation are also updated to reflect the changes. Resolves #2141 --- .gitignore | 42 ++++++- .gitmodules | 28 +++++ Externals.cfg | 67 ----------- ci/scripts/clone-build_ci.sh | 22 +--- ci/scripts/driver_weekly.sh | 2 +- docs/source/clone.rst | 80 ++++++------- docs/source/components.rst | 13 +- docs/source/development.rst | 1 + docs/source/init.rst | 10 +- docs/source/run.rst | 2 +- parm/config/gfs/config.resources | 2 +- sorc/build_all.sh | 70 +++++------ sorc/checkout.sh | 184 ---------------------------- sorc/gdas.cd | 1 + sorc/gfs_build.cfg | 15 --- sorc/gfs_utils.fd | 1 + sorc/gsi_enkf.fd | 1 + sorc/gsi_monitor.fd | 1 + sorc/gsi_utils.fd | 1 + sorc/link_workflow.sh | 10 +- sorc/partial_build.sh | 199 ------------------------------- sorc/ufs_model.fd | 1 + sorc/ufs_utils.fd | 1 + sorc/verif-global.fd | 1 + sorc/wxflow | 1 + 25 files changed, 163 insertions(+), 593 deletions(-) create mode 100644 .gitmodules delete mode 100644 Externals.cfg delete mode 100755 sorc/checkout.sh create mode 160000 sorc/gdas.cd delete mode 100644 sorc/gfs_build.cfg create mode 160000 sorc/gfs_utils.fd create mode 160000 sorc/gsi_enkf.fd create mode 160000 sorc/gsi_monitor.fd create mode 160000 sorc/gsi_utils.fd delete mode 100755 sorc/partial_build.sh create mode 160000 sorc/ufs_model.fd create mode 160000 sorc/ufs_utils.fd create mode 160000 sorc/verif-global.fd create mode 160000 sorc/wxflow diff --git a/.gitignore b/.gitignore index f94c17ba35..047313a32f 100644 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,7 @@ fix/cice fix/cpl fix/datm fix/gldas +fix/gdas fix/gsi fix/lut fix/mom6 @@ -40,7 +41,6 @@ fix/sfc_climo fix/ugwd fix/verif fix/wave -fix/wafs # Ignore parm file symlinks #-------------------------- @@ -97,8 +97,42 @@ parm/wafs #-------------------------------------------- sorc/*log sorc/logs -sorc/*.cd -sorc/*.fd +sorc/calc_analysis.fd +sorc/calc_increment_ens.fd +sorc/calc_increment_ens_ncio.fd +sorc/emcsfc_ice_blend.fd +sorc/emcsfc_snow2mdl.fd +sorc/enkf.fd +sorc/enkf_chgres_recenter_nc.fd +sorc/fbwndgfs.fd +sorc/gaussian_sfcanl.fd +sorc/getsfcensmeanp.fd +sorc/getsigensmeanp_smooth.fd +sorc/getsigensstatp.fd +sorc/gfs_bufr.fd +sorc/global_cycle.fd +sorc/gsi.fd +sorc/interp_inc.fd +sorc/mkgfsawps.fd +sorc/overgridid.fd +sorc/oznmon_horiz.fd +sorc/oznmon_time.fd +sorc/radmon_angle.fd +sorc/radmon_bcoef.fd +sorc/radmon_bcor.fd +sorc/radmon_time.fd +sorc/rdbfmsua.fd +sorc/recentersigp.fd +sorc/reg2grb2.fd +sorc/supvit.fd +sorc/syndat_getjtbul.fd +sorc/syndat_maksynrc.fd +sorc/syndat_qctropcy.fd +sorc/tave.fd +sorc/tocsbufr.fd +sorc/upp.fd +sorc/vint.fd +sorc/webtitle.fd # Ignore scripts from externals #------------------------------ @@ -121,6 +155,7 @@ scripts/exgfs_atmos_wafs_grib2_0p25.sh ush/chgres_cube.sh ush/emcsfc_ice_blend.sh ush/emcsfc_snow.sh +ush/exglobal_prep_ocean_obs.py ush/fix_precip.sh ush/fv3gfs_driver_grid.sh ush/fv3gfs_filter_topo.sh @@ -150,7 +185,6 @@ versions/build.ver versions/run.ver # wxflow checkout and symlinks -sorc/wxflow ush/python/wxflow workflow/wxflow ci/scripts/wxflow diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..3eb26fb0fe --- /dev/null +++ b/.gitmodules @@ -0,0 +1,28 @@ +[submodule "sorc/ufs_model.fd"] + path = sorc/ufs_model.fd + url = https://github.com/ufs-community/ufs-weather-model + ignore = dirty +[submodule "sorc/wxflow"] + path = sorc/wxflow + url = https://github.com/NOAA-EMC/wxflow +[submodule "sorc/gfs_utils.fd"] + path = sorc/gfs_utils.fd + url = https://github.com/NOAA-EMC/gfs-utils +[submodule "sorc/ufs_utils.fd"] + path = sorc/ufs_utils.fd + url = https://github.com/ufs-community/UFS_UTILS.git +[submodule "sorc/verif-global.fd"] + path = sorc/verif-global.fd + url = https://github.com/NOAA-EMC/EMC_verif-global.git +[submodule "sorc/gsi_enkf.fd"] + path = sorc/gsi_enkf.fd + url = https://github.com/NOAA-EMC/GSI.git +[submodule "sorc/gdas.cd"] + path = sorc/gdas.cd + url = https://github.com/NOAA-EMC/GDASApp.git +[submodule "sorc/gsi_utils.fd"] + path = sorc/gsi_utils.fd + url = https://github.com/NOAA-EMC/GSI-Utils.git +[submodule "sorc/gsi_monitor.fd"] + path = sorc/gsi_monitor.fd + url = https://github.com/NOAA-EMC/GSI-Monitor.git diff --git a/Externals.cfg b/Externals.cfg deleted file mode 100644 index eba775275a..0000000000 --- a/Externals.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# External sub-modules of global-workflow - -[wxflow] -tag = 528f5ab -local_path = sorc/wxflow -repo_url = https://github.com/NOAA-EMC/wxflow.git -protocol = git -required = True - -[UFS] -tag = 3ba8dff -local_path = sorc/ufs_model.fd -repo_url = https://github.com/ufs-community/ufs-weather-model.git -protocol = git -required = True - -[gfs-utils] -hash = a283262 -local_path = sorc/gfs_utils.fd -repo_url = https://github.com/NOAA-EMC/gfs-utils -protocol = git -required = True - -[UFS-Utils] -hash = 72a0471 -local_path = sorc/ufs_utils.fd -repo_url = https://github.com/ufs-community/UFS_UTILS.git -protocol = git -required = True - -[EMC_verif-global] -tag = c267780 -local_path = sorc/verif-global.fd -repo_url = https://github.com/NOAA-EMC/EMC_verif-global.git -protocol = git -required = True - -[GSI-EnKF] -hash = ca19008 -local_path = sorc/gsi_enkf.fd -repo_url = https://github.com/NOAA-EMC/GSI.git -protocol = git -required = False - -[GSI-Utils] -hash = 322cc7b -local_path = sorc/gsi_utils.fd -repo_url = https://github.com/NOAA-EMC/GSI-utils.git -protocol = git -required = False - -[GSI-Monitor] -hash = 45783e3 -local_path = sorc/gsi_monitor.fd -repo_url = https://github.com/NOAA-EMC/GSI-monitor.git -protocol = git -required = False - -[GDASApp] -hash = f44a6d5 -local_path = sorc/gdas.cd -repo_url = https://github.com/NOAA-EMC/GDASApp.git -protocol = git -required = False - -[externals_description] -schema_version = 1.0.0 diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 4af44507e9..798c98bf50 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -48,7 +48,7 @@ git clone "${REPO_URL}" cd global-workflow || exit 1 # checkout pull request -"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" --recurse-submodules HOMEgfs="${PWD}" source "${HOMEgfs}/ush/detect_machine.sh" @@ -67,30 +67,14 @@ source "${HOMEgfs}/ush/detect_machine.sh" commit=$(git log --pretty=format:'%h' -n 1) echo "${commit}" > "../commit" -# run checkout script +# build full cycle cd sorc || exit 1 set +e -./checkout.sh -c -g -u >> log.checkout 2>&1 -checkout_status=$? -DATE=$(date +'%D %r') -if [[ ${checkout_status} != 0 ]]; then - { - echo "Checkout: *** FAILED ***" - echo "Checkout: Failed at ${DATE}" - echo "Checkout: see output at ${PWD}/log.checkout" - } >> "${outfile}" - exit "${checkout_status}" -else - { - echo "Checkout: Completed at ${DATE}" - } >> "${outfile}" -fi -# build full cycle source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./build_all.sh >> log.build 2>&1 +./build_all.sh -gu >> log.build 2>&1 build_status=$? DATE=$(date +'%D %r') diff --git a/ci/scripts/driver_weekly.sh b/ci/scripts/driver_weekly.sh index a52e5b1caa..88b027d100 100755 --- a/ci/scripts/driver_weekly.sh +++ b/ci/scripts/driver_weekly.sh @@ -66,7 +66,7 @@ echo "Creating new branch ${branch} from develop on ${MACHINE_ID} in ${develop_d rm -Rf "${develop_dir}" mkdir -p "${develop_dir}" cd "${develop_dir}" || exit 1 -git clone "${REPO_URL}" +git clone --recursive "${REPO_URL}" cd global-workflow || exit 1 git checkout -b "${branch}" diff --git a/docs/source/clone.rst b/docs/source/clone.rst index c098a34f7e..bad3f0e9f6 100644 --- a/docs/source/clone.rst +++ b/docs/source/clone.rst @@ -15,35 +15,34 @@ Clone the `global-workflow` and `cd` into the `sorc` directory: :: - git clone https://github.com/NOAA-EMC/global-workflow + git clone --recursive https://github.com/NOAA-EMC/global-workflow cd global-workflow/sorc -For forecast-only (coupled or uncoupled) checkout the components: +For forecast-only (coupled or uncoupled) build of the components: :: - ./checkout.sh + ./build_all.sh -For cycled (w/ data assimilation) use the `-g` option during checkout: +For cycled (w/ data assimilation) use the `-g` option during build: :: - ./checkout.sh -g + ./build_all.sh -g -For coupled cycling (include new UFSDA) use the `-gu` options during checkout: +For coupled cycling (include new UFSDA) use the `-gu` options during build: [Currently only available on Hera, Orion, and Hercules] :: - ./checkout.sh -gu + ./build_all.sh -gu Build workflow components and link workflow artifacts such as executables, etc. :: - ./build_all.sh ./link_workflow.sh @@ -73,7 +72,7 @@ You now have a cloned copy of the global-workflow git repository. To checkout a :: - git checkout BRANCH_NAME + git checkout --recurse-submodules BRANCH_NAME .. note:: Branch must already exist. If it does not you need to make a new branch using the ``-b`` flag: @@ -86,62 +85,55 @@ The ``checkout`` command will checkout BRANCH_NAME and switch your clone to that :: - git checkout my_branch + git checkout --recurse-submodules my_branch git branch * my_branch develop -********** -Components -********** +Using ``--recurse-submodules`` is important to ensure you are updating the component versions to match the branch. -Once you have cloned the workflow repository it's time to checkout/clone its components. The components will be checked out under the ``/sorc`` folder via a script called checkout.sh. Run the script with no arguments for forecast-only: +^^^^^^^^^^^^^^^^ +Build components +^^^^^^^^^^^^^^^^ -:: +Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After checking out the branch you wish to use, run this script to build all components codes: - cd sorc - ./checkout.sh +:: -Or with the ``-g`` switch to include data assimilation (GSI) for cycling: + ./build_all.sh [-a UFS_app][-g][-h][-u][-v] + -a UFS_app: + Build a specific UFS app instead of the default + -g: + Build GSI + -h: + Print this help message and exit + -j: + Specify maximum number of build jobs (n) + -u: + Build UFS-DA + -v: + Execute all build scripts with -v option to turn on verbose where supported + +For forecast-only (coupled or uncoupled) build of the components: :: - cd sorc - ./checkout.sh -g + ./build_all.sh -Or also with the ``-u`` swtich to include coupled DA (via UFSDA): -[Currently only available on Hera, Orion, and Hercules] +For cycled (w/ data assimilation) use the `-g` option during build: :: - cd sorc - ./checkout.sh -gu - -Each component cloned via checkout.sh will have a log (``/sorc/logs/checkout-COMPONENT.log``). Check the screen output and logs for clone errors. + ./build_all.sh -g -^^^^^^^^^^^^^^^^ -Build components -^^^^^^^^^^^^^^^^ +For coupled cycling (include new UFSDA) use the `-gu` options during build: -Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After running checkout.sh run this script to build all components codes: +[Currently only available on Hera, Orion, and Hercules] :: - ./build_all.sh [-a UFS_app][-c build_config][-h][-v] - -a UFS_app: - Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config - -h: - Print usage message and exit - -v: - Run all scripts in verbose mode - -A partial build option is also available via two methods: - - a) modify gfs_build.cfg config file to disable/enable particular builds and then rerun build_all.sh + ./build_all.sh -gu - b) run individual build scripts also available in ``/sorc`` folder for each component or group of codes ^^^^^^^^^^^^^^^ Link components diff --git a/docs/source/components.rst b/docs/source/components.rst index 4d2619e44e..98e76b467b 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -13,13 +13,13 @@ The major components of the system are: * Post-processing * Verification -The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. +The Global Workflow repository contains the workflow and script layers. External components will be checked out as git submodules. All of the submodules of the system reside in their respective repositories on GitHub. ====================== Component repositories ====================== -Components checked out via sorc/checkout.sh: +Components included as submodules: * **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration * **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory @@ -28,10 +28,11 @@ Components checked out via sorc/checkout.sh: * **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model +* **wxflow** Collection of python utilities for weather workflows (https://github.com/NOAA-EMC/wxflow) * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only .. note:: - When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not checked out. + When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not built. ===================== External dependencies @@ -41,11 +42,11 @@ External dependencies Libraries ^^^^^^^^^ -All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms +All the libraries that are needed to run the end to end Global Workflow are built using a package manager. These are served via spack-stack. These libraries are already available on supported NOAA HPC platforms. -Find information on official installations of HPC-STACK here: +Find information on official installations of spack-stack here: -https://github.com/NOAA-EMC/hpc-stack/wiki/Official-Installations +https://github.com/JCSDA/spack-stack/wiki/Porting-spack-stack-to-a-new-system ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Observation data (OBSPROC/prep) diff --git a/docs/source/development.rst b/docs/source/development.rst index e95516bcca..4739d2b602 100644 --- a/docs/source/development.rst +++ b/docs/source/development.rst @@ -196,3 +196,4 @@ Moving forward you'll want to perform the "remote update" command regularly to u :: git remote update + diff --git a/docs/source/init.rst b/docs/source/init.rst index f9562a3a7d..65e400c68e 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -318,12 +318,12 @@ Manual Generation The following information is for users needing to generate cold-start initial conditions for a cycled experiment that will run at a different resolution or layer amount than the operational GFS (C768C384L127). -The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop (found in ``sorc/checkout.sh``). The ``chgres_cube`` code/scripts currently support the following GFS inputs: +The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop. The ``chgres_cube`` code/scripts currently support the following GFS inputs: -* pre-GFSv14 -* GFSv14 -* GFSv15 -* GFSv16 +* pre-GFSv14 +* GFSv14 +* GFSv15 +* GFSv16 Users can use the copy of UFS_UTILS that is already cloned and built within their global-workflow clone or clone/build it separately: diff --git a/docs/source/run.rst b/docs/source/run.rst index 0d38b8d6a4..817ed3ccfa 100644 --- a/docs/source/run.rst +++ b/docs/source/run.rst @@ -2,7 +2,7 @@ Run Global Workflow ################### -Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial checkout stage will be slightly different. +Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial build stage will be slightly different. .. toctree:: diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 695ad5fcc5..1fc0c606c1 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -730,7 +730,7 @@ elif [[ ${step} = "verfozn" ]]; then elif [[ ${step} = "verfrad" ]]; then - export wtime_verfrad="00:20:00" + export wtime_verfrad="00:40:00" export npe_verfrad=1 export nth_verfrad=1 export npe_node_verfrad=1 diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 4ba0b92888..e65c50e8de 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -19,12 +19,14 @@ Builds all of the global-workflow components by calling the individual build Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v] -a UFS_app: Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config + -g: + Build GSI -h: - print this help message and exit + Print this help message and exit -j: Specify maximum number of build jobs (n) + -u: + Build UFS-DA -v: Execute all build scripts with -v option to turn on verbose where supported EOF @@ -35,17 +37,19 @@ script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) cd "${script_dir}" || exit 1 _build_ufs_opt="" +_build_ufsda="NO" +_build_gsi="NO" _verbose_opt="" -_partial_opt="" _build_job_max=20 # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:c:j:hv" option; do +while getopts ":a:ghj:uv" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; - c) _partial_opt+="-c ${OPTARG} ";; + g) _build_gsi="YES" ;; h) _usage;; j) _build_job_max="${OPTARG} ";; + u) _build_ufsda="YES" ;; v) _verbose_opt="-v";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -89,15 +93,6 @@ fi # TODO: Commented out until components aligned for build #source ../versions/build.ver -#------------------------------------ -# INCLUDE PARTIAL BUILD -#------------------------------------ -# Turn off some shellcheck warnings because we want to have -# variables with multiple arguments. -# shellcheck disable=SC2086,SC2248 -source ./partial_build.sh ${_verbose_opt} ${_partial_opt} -# shellcheck disable= - #------------------------------------ # Exception Handling Init #------------------------------------ @@ -116,45 +111,36 @@ declare -A build_opts # Mandatory builds, unless otherwise specified, for the UFS big_jobs=0 -if [[ ${Build_ufs_model} == 'true' ]]; then - build_jobs["ufs"]=8 - big_jobs=$((big_jobs+1)) - build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" -fi -# The UPP is hardcoded to use 6 cores -if [[ ${Build_upp} == 'true' ]]; then - build_jobs["upp"]=6 - build_opts["upp"]="" -fi -if [[ ${Build_ufs_utils} == 'true' ]]; then - build_jobs["ufs_utils"]=3 - build_opts["ufs_utils"]="${_verbose_opt}" -fi -if [[ ${Build_gfs_utils} == 'true' ]]; then - build_jobs["gfs_utils"]=1 - build_opts["gfs_utils"]="${_verbose_opt}" -fi -if [[ ${Build_ww3prepost} == "true" ]]; then - build_jobs["ww3prepost"]=3 - build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" -fi +build_jobs["ufs"]=8 +big_jobs=$((big_jobs+1)) +build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" + +build_jobs["upp"]=6 # The UPP is hardcoded to use 6 cores +build_opts["upp"]="" + +build_jobs["ufs_utils"]=3 +build_opts["ufs_utils"]="${_verbose_opt}" + +build_jobs["gfs_utils"]=1 +build_opts["gfs_utils"]="${_verbose_opt}" + +build_jobs["ww3prepost"]=3 +build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" # Optional DA builds -if [[ -d gdas.cd ]]; then +if [[ "${_build_ufsda}" == "YES" ]]; then build_jobs["gdas"]=8 big_jobs=$((big_jobs+1)) build_opts["gdas"]="${_verbose_opt}" fi -if [[ -d gsi_enkf.fd ]]; then +if [[ "${_build_gsi}" == "YES" ]]; then build_jobs["gsi_enkf"]=8 big_jobs=$((big_jobs+1)) build_opts["gsi_enkf"]="${_verbose_opt}" fi -if [[ -d gsi_utils.fd ]]; then +if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then build_jobs["gsi_utils"]=2 build_opts["gsi_utils"]="${_verbose_opt}" -fi -if [[ -d gsi_monitor.fd ]]; then build_jobs["gsi_monitor"]=1 build_opts["gsi_monitor"]="${_verbose_opt}" fi diff --git a/sorc/checkout.sh b/sorc/checkout.sh deleted file mode 100755 index 25680df2c0..0000000000 --- a/sorc/checkout.sh +++ /dev/null @@ -1,184 +0,0 @@ -#! /usr/bin/env bash - -set +x -set -u - -function usage() { - cat << EOF -Clones and checks out external components necessary for - global workflow. If the directory already exists, skip - cloning and just check out the requested version (unless - -c option is used). - -Usage: ${BASH_SOURCE[0]} [-c][-h][-m ufs_hash] - -c: - Create a fresh clone (delete existing directories) - -h: - Print this help message and exit - -m ufs_hash: - Check out this UFS hash instead of the default - -g: - Check out GSI for GSI-based DA - -u: - Check out GDASApp for UFS-based DA -EOF - exit 1 -} - -function checkout() { - # - # Clone or fetch repo, then checkout specific hash and update submodules - # - # Environment variables: - # topdir [default: $(pwd)]: parent directory to your checkout - # logdir [default: $(pwd)]: where you want logfiles written - # CLEAN [default: NO]: whether to delete existing directories and create a fresh clone - # - # Usage: checkout - # - # Arguments - # dir: Directory for the clone - # remote: URL of the remote repository - # version: Commit to check out; should always be a speciifc commit (hash or tag), not a branch - # - # Returns - # Exit code of last failed command, or 0 if successful - # - - dir="$1" - remote="$2" - version="$3" - cpus="${4:-1}" # Default 1 thread - recursive=${5:-"YES"} - - name=$(echo "${dir}" | cut -d '.' -f 1) - echo "Performing checkout of ${name}" - - logfile="${logdir:-$(pwd)}/checkout_${name}.log" - - if [[ -f "${logfile}" ]]; then - rm "${logfile}" - fi - - cd "${topdir}" || exit 1 - if [[ -d "${dir}" && ${CLEAN} == "YES" ]]; then - echo "|-- Removing existing clone in ${dir}" - rm -Rf "${dir}" - fi - if [[ ! -d "${dir}" ]]; then - echo "|-- Cloning from ${remote} into ${dir}" - git clone "${remote}" "${dir}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while cloning ${name}" - echo - return "${status}" - fi - cd "${dir}" || exit 1 - else - # Fetch any updates from server - cd "${dir}" || exit 1 - echo "|-- Fetching updates from ${remote}" - git fetch - fi - echo "|-- Checking out ${version}" - git checkout "${version}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while checking out ${version} in ${name}" - echo - return "${status}" - fi - if [[ "${recursive}" == "YES" ]]; then - echo "|-- Updating submodules (if any)" - git submodule update --init --recursive -j "${cpus}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while updating submodules of ${name}" - echo - return "${status}" - fi - fi - echo - return 0 -} - -# Set defaults for variables toggled by options -export CLEAN="NO" -checkout_gsi="NO" -checkout_gdas="NO" - -# Parse command line arguments -while getopts ":chgum:o" option; do - case ${option} in - c) - echo "Received -c flag, will delete any existing directories and start clean" - export CLEAN="YES" - ;; - g) - echo "Received -g flag for optional checkout of GSI-based DA" - checkout_gsi="YES" - ;; - h) usage;; - u) - echo "Received -u flag for optional checkout of UFS-based DA" - checkout_gdas="YES" - ;; - m) - echo "Received -m flag with argument, will check out ufs-weather-model hash ${OPTARG} instead of default" - ufs_model_hash=${OPTARG} - ;; - :) - echo "option -${OPTARG} needs an argument" - usage - ;; - *) - echo "invalid option -${OPTARG}, exiting..." - usage - ;; - esac -done -shift $((OPTIND-1)) - -topdir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) -export topdir -export logdir="${topdir}/logs" -mkdir -p "${logdir}" - -# Setup lmod environment -source "${topdir}/../workflow/gw_setup.sh" - -# The checkout version should always be a speciifc commit (hash or tag), not a branch -errs=0 -# Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-3ba8dff}" "8" & - -# Run all other checkouts simultaneously with just 1 core each to handle submodules. -checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" & -checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "427d467" & -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "892b693" & -checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" & - -if [[ ${checkout_gsi} == "YES" ]]; then - checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "c94bc72" "1" "NO" & -fi - -if [[ ${checkout_gdas} == "YES" ]]; then - checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "f44a6d5" & -fi - -if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then - checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "f371890" & - checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "ae256c0" & -fi - -# Go through each PID and verify no errors were reported. -for checkout_pid in $(jobs -p); do - wait "${checkout_pid}" || errs=$((errs + $?)) -done - -if (( errs > 0 )); then - echo "WARNING: One or more errors encountered during checkout process, please check logs before building" -fi -echo -exit "${errs}" diff --git a/sorc/gdas.cd b/sorc/gdas.cd new file mode 160000 index 0000000000..f44a6d500d --- /dev/null +++ b/sorc/gdas.cd @@ -0,0 +1 @@ +Subproject commit f44a6d500dda2aba491e4fa12c0bee428ddb7b80 diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg deleted file mode 100644 index 8c171072d0..0000000000 --- a/sorc/gfs_build.cfg +++ /dev/null @@ -1,15 +0,0 @@ -# -# ***** configuration of global-workflow build ***** - - Building ufs_model (ufs_model) ........................ yes - Building ww3prepost (ww3prepost) ...................... yes - Building gsi_enkf (gsi_enkf) .......................... yes - Building gsi_utils (gsi_utils) ........................ yes - Building gsi_monitor (gsi_monitor) .................... yes - Building gdas (gdas) .................................. yes - Building UPP (upp) .................................... yes - Building ufs_utils (ufs_utils) ........................ yes - Building gfs_utils (gfs_utils) ........................ yes - -# -- END -- - diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd new file mode 160000 index 0000000000..427d4678b8 --- /dev/null +++ b/sorc/gfs_utils.fd @@ -0,0 +1 @@ +Subproject commit 427d4678b80f88723528d5f5ff07b6c90df9a977 diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd new file mode 160000 index 0000000000..c94bc72ff4 --- /dev/null +++ b/sorc/gsi_enkf.fd @@ -0,0 +1 @@ +Subproject commit c94bc72ff410b48c325abbfe92c9fcb601d89aed diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd new file mode 160000 index 0000000000..ae256c0d69 --- /dev/null +++ b/sorc/gsi_monitor.fd @@ -0,0 +1 @@ +Subproject commit ae256c0d69df3232ee9dd3e81b176bf2c3cda312 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd new file mode 160000 index 0000000000..f371890b9f --- /dev/null +++ b/sorc/gsi_utils.fd @@ -0,0 +1 @@ +Subproject commit f371890b9fcb42312da5f6228d87b5a4829e7e3a diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 581c50e704..1bdb4dd492 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -199,7 +199,7 @@ fi #------------------------------ #--add GDASApp files #------------------------------ -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then cd "${HOMEgfs}/ush" || exit 1 ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ufsda" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/jediinc2fv3.py" . @@ -261,7 +261,7 @@ for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do done # GSI -if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd/install" ]]; then for gsiexe in enkf.x gsi.x; do [[ -s "${gsiexe}" ]] && rm -f "${gsiexe}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_enkf.fd/install/bin/${gsiexe}" . @@ -269,7 +269,7 @@ if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd" ]]; then fi # GSI Utils -if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd/install" ]]; then for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ interp_inc.x recentersigp.x @@ -280,7 +280,7 @@ if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd" ]]; then fi # GSI Monitor -if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd/install" ]]; then for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ radmon_bcoef.x radmon_bcor.x radmon_time.x do @@ -290,7 +290,7 @@ if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd" ]]; then fi # GDASApp -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ "fv3jedi_diffstates.x" \ "fv3jedi_ensvariance.x" \ diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh deleted file mode 100755 index 34b8b557ce..0000000000 --- a/sorc/partial_build.sh +++ /dev/null @@ -1,199 +0,0 @@ -#! /usr/bin/env bash -# -# define the array of the name of build program -# -declare -a Build_prg=("Build_ufs_model" \ - "Build_ww3prepost" \ - "Build_gsi_enkf" \ - "Build_gsi_utils" \ - "Build_gsi_monitor" \ - "Build_gdas" \ - "Build_upp" \ - "Build_ufs_utils" \ - "Build_gfs_utils") - -# -# function parse_cfg: read config file and retrieve the values -# -parse_cfg() { - declare -i n - declare -i num_args - declare -i total_args - declare -a all_prg - total_args=$# - num_args=$1 - (( num_args == 0 )) && return 0 - config=$2 - [[ ${config,,} == "--verbose" ]] && config=$3 - all_prg=() - for (( n = num_args + 2; n <= total_args; n++ )); do - all_prg+=( "${!n}" ) - done - - if [[ ${config^^} == ALL ]]; then - # - # set all values to true - # - for var in "${Build_prg[@]}"; do - eval "${var}=true" - done - elif [[ ${config} == config=* ]]; then - # - # process config file - # - cfg_file=${config#config=} - ${verbose} && echo "INFO: settings in config file: ${cfg_file}" - while read -r cline; do - # remove leading white space - clean_line="${cline#"${cline%%[![:space:]]*}"}" - { [[ -z "${clean_line}" ]] || [[ "${clean_line:0:1}" == "#" ]]; } || { - ${verbose} && echo "${clean_line}" - first9=${clean_line:0:9} - [[ ${first9,,} == "building " ]] && { - # No shellcheck, this can't be replaced by a native bash substitute - # because it uses a regex - # shellcheck disable=SC2001 - short_prg=$(sed -e 's/.*(\(.*\)).*/\1/' <<< "${clean_line}") - # shellcheck disable= - # remove trailing white space - clean_line="${cline%"${cline##*[![:space:]]}"}" - build_action=true - last5=${clean_line: -5} - [[ ${last5,,} == ". yes" ]] && build_action=true - last4=${clean_line: -4} - [[ ${last4,,} == ". no" ]] && build_action=false - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=${build_action}" - break - } - done - ${found} || { - echo "*** Unrecognized line in config file \"${cfg_file}\":" 2>&1 - echo "${cline}" 2>&1 - exit 3 - } - } - } - done < "${cfg_file}" - elif [[ ${config} == select=* ]]; then - # - # set all values to (default) false - # - for var in "${Build_prg[@]}"; do - eval "${var}=false" - done - # - # read command line partial build setting - # - del="" - sel_prg=${config#select=} - for separator in " " "," ";" ":" "/" "|"; do - [[ "${sel_prg/${separator}}" == "${sel_prg}" ]] || { - del=${separator} - sel_prg=${sel_prg//${del}/ } - } - done - if [[ ${del} == "" ]]; then - { - short_prg=${sel_prg} - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=true" - break - } - done - ${found} || { - echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 - exit 4 - } - } || { - for short_prg in ${sel_prg}; do - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=true" - break - } - done - ${found} || { - echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 - exit 5 - } - done - } - fi - else - echo "*** Unrecognized command line option \"${config}\"" 2>&1 - exit 6 - fi -} - - -usage() { - cat << EOF 2>&1 -Usage: ${BASH_SOURCE[0]} [-c config_file][-h][-v] - -h: - Print this help message and exit - -v: - Turn on verbose mode - -c config_file: - Override default config file to determine whether to build each program [default: gfs_build.cfg] -EOF -} - - -# -# read command line arguments; processing config file -# -declare -a parse_argv=() -verbose=false -config_file="gfs_build.cfg" -# Reset option counter for when this script is sourced -OPTIND=1 -while getopts ":c:h:v" option; do - case "${option}" in - c) config_file="${OPTARG}";; - h) usage;; - v) - verbose=true - parse_argv+=( "--verbose" ) - ;; - :) - echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage - ;; - *) - echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage - ;; - esac -done - -shift $((OPTIND-1)) - -parse_argv+=( "config=${config_file}" ) - -# -# call arguments retriever/config parser -# -parse_cfg ${#parse_argv[@]} "${parse_argv[@]}" "${Build_prg[@]}" - -# -# print values of build array -# -${verbose} && { - echo "INFO: partial build settings:" - for var in "${Build_prg[@]}"; do - echo -n " ${var}: " - "${!var}" && echo True || echo False - done -} - -echo "=== end of partial build setting ===" > /dev/null - diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd new file mode 160000 index 0000000000..3ba8dff29a --- /dev/null +++ b/sorc/ufs_model.fd @@ -0,0 +1 @@ +Subproject commit 3ba8dff29a7395445ce5da8c9b48cfe0ff8a668a diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd new file mode 160000 index 0000000000..892b693ba4 --- /dev/null +++ b/sorc/ufs_utils.fd @@ -0,0 +1 @@ +Subproject commit 892b693ba49b37c23f08cc8e18550ba72e108762 diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd new file mode 160000 index 0000000000..c267780a12 --- /dev/null +++ b/sorc/verif-global.fd @@ -0,0 +1 @@ +Subproject commit c267780a1255fa7db052c745cf9c78b7dc6a2695 diff --git a/sorc/wxflow b/sorc/wxflow new file mode 160000 index 0000000000..528f5abb49 --- /dev/null +++ b/sorc/wxflow @@ -0,0 +1 @@ +Subproject commit 528f5abb49e80751f83ebd6eb0a87bc70012bb24 From f3f3e05e6325e6c2e8ddab186f1ed2f985fec49a Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Thu, 14 Dec 2023 12:06:23 -0500 Subject: [PATCH 5/7] Add MOS jobs to rocoto mesh (#2148) Create separate jobs for GFS MOS - extracted from now-retired vrfy job Refs #2068 --- env/WCOSS2.env | 8 + jobs/rocoto/mos_ext_grd_fcst.sh | 25 +++ jobs/rocoto/mos_ext_grd_prdgen.sh | 25 +++ jobs/rocoto/mos_ext_grd_prep.sh | 25 +++ jobs/rocoto/mos_ext_stn_fcst.sh | 25 +++ jobs/rocoto/mos_ext_stn_prdgen.sh | 25 +++ jobs/rocoto/mos_ext_stn_prep.sh | 25 +++ jobs/rocoto/mos_grd_fcst.sh | 25 +++ jobs/rocoto/mos_grd_prdgen.sh | 25 +++ jobs/rocoto/mos_grd_prep.sh | 25 +++ jobs/rocoto/mos_stn_fcst.sh | 25 +++ jobs/rocoto/mos_stn_prdgen.sh | 25 +++ jobs/rocoto/mos_stn_prep.sh | 25 +++ jobs/rocoto/mos_wx_ext_prdgen.sh | 25 +++ jobs/rocoto/mos_wx_prdgen.sh | 25 +++ modulefiles/module_base.wcoss2.lua | 3 + parm/config/gfs/config.base.emc.dyn | 1 + parm/config/gfs/config.mos | 9 + parm/config/gfs/config.mos_ext_grd_fcst | 12 ++ parm/config/gfs/config.mos_ext_grd_prdgen | 12 ++ parm/config/gfs/config.mos_ext_grd_prep | 12 ++ parm/config/gfs/config.mos_ext_stn_fcst | 12 ++ parm/config/gfs/config.mos_ext_stn_prdgen | 12 ++ parm/config/gfs/config.mos_ext_stn_prep | 12 ++ parm/config/gfs/config.mos_grd_fcst | 12 ++ parm/config/gfs/config.mos_grd_prdgen | 12 ++ parm/config/gfs/config.mos_grd_prep | 12 ++ parm/config/gfs/config.mos_stn_fcst | 12 ++ parm/config/gfs/config.mos_stn_prdgen | 12 ++ parm/config/gfs/config.mos_stn_prep | 12 ++ parm/config/gfs/config.mos_wx_ext_prdgen | 12 ++ parm/config/gfs/config.mos_wx_prdgen | 12 ++ parm/config/gfs/config.resources | 147 ++++++++++++++++ versions/run.wcoss2.ver | 2 + workflow/applications/applications.py | 1 + workflow/applications/gfs_cycled.py | 12 ++ workflow/applications/gfs_forecast_only.py | 12 ++ workflow/rocoto/gfs_tasks.py | 190 +++++++++++++++++++++ workflow/rocoto/tasks.py | 8 +- 39 files changed, 910 insertions(+), 1 deletion(-) create mode 100755 jobs/rocoto/mos_ext_grd_fcst.sh create mode 100755 jobs/rocoto/mos_ext_grd_prdgen.sh create mode 100755 jobs/rocoto/mos_ext_grd_prep.sh create mode 100755 jobs/rocoto/mos_ext_stn_fcst.sh create mode 100755 jobs/rocoto/mos_ext_stn_prdgen.sh create mode 100755 jobs/rocoto/mos_ext_stn_prep.sh create mode 100755 jobs/rocoto/mos_grd_fcst.sh create mode 100755 jobs/rocoto/mos_grd_prdgen.sh create mode 100755 jobs/rocoto/mos_grd_prep.sh create mode 100755 jobs/rocoto/mos_stn_fcst.sh create mode 100755 jobs/rocoto/mos_stn_prdgen.sh create mode 100755 jobs/rocoto/mos_stn_prep.sh create mode 100755 jobs/rocoto/mos_wx_ext_prdgen.sh create mode 100755 jobs/rocoto/mos_wx_prdgen.sh create mode 100644 parm/config/gfs/config.mos create mode 100644 parm/config/gfs/config.mos_ext_grd_fcst create mode 100644 parm/config/gfs/config.mos_ext_grd_prdgen create mode 100644 parm/config/gfs/config.mos_ext_grd_prep create mode 100644 parm/config/gfs/config.mos_ext_stn_fcst create mode 100644 parm/config/gfs/config.mos_ext_stn_prdgen create mode 100644 parm/config/gfs/config.mos_ext_stn_prep create mode 100644 parm/config/gfs/config.mos_grd_fcst create mode 100644 parm/config/gfs/config.mos_grd_prdgen create mode 100644 parm/config/gfs/config.mos_grd_prep create mode 100644 parm/config/gfs/config.mos_stn_fcst create mode 100644 parm/config/gfs/config.mos_stn_prdgen create mode 100644 parm/config/gfs/config.mos_stn_prep create mode 100644 parm/config/gfs/config.mos_wx_ext_prdgen create mode 100644 parm/config/gfs/config.mos_wx_prdgen diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 068b69fd7b..a4fe81060d 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -283,4 +283,12 @@ elif [[ "${step}" = "waveawipsbulls" ]]; then unset PERL5LIB +elif [[ "${step:0:3}" = "mos" ]]; then + + export FORT_BUFFERED=TRUE + + if [[ "${step}" = "mos_stn_prep" ]]; then + export OMP_PROC_BIND=true + fi + fi diff --git a/jobs/rocoto/mos_ext_grd_fcst.sh b/jobs/rocoto/mos_ext_grd_fcst.sh new file mode 100755 index 0000000000..ce37711907 --- /dev/null +++ b/jobs/rocoto/mos_ext_grd_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_grd_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_grd_fcst" -c "base mos_ext_grd_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_GRD_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_grd_prdgen.sh b/jobs/rocoto/mos_ext_grd_prdgen.sh new file mode 100755 index 0000000000..fb641e04f0 --- /dev/null +++ b/jobs/rocoto/mos_ext_grd_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_grd_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_grd_prdgen" -c "base mos_ext_grd_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_GRD_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_grd_prep.sh b/jobs/rocoto/mos_ext_grd_prep.sh new file mode 100755 index 0000000000..defe9222b6 --- /dev/null +++ b/jobs/rocoto/mos_ext_grd_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_grd_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_grd_prep" -c "base mos_ext_grd_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_GRD_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_stn_fcst.sh b/jobs/rocoto/mos_ext_stn_fcst.sh new file mode 100755 index 0000000000..85cde49192 --- /dev/null +++ b/jobs/rocoto/mos_ext_stn_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_stn_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_stn_fcst" -c "base mos_ext_stn_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_STN_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_stn_prdgen.sh b/jobs/rocoto/mos_ext_stn_prdgen.sh new file mode 100755 index 0000000000..17709d5ffb --- /dev/null +++ b/jobs/rocoto/mos_ext_stn_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_stn_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_stn_prdgen" -c "base mos_ext_stn_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_STN_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_stn_prep.sh b/jobs/rocoto/mos_ext_stn_prep.sh new file mode 100755 index 0000000000..9c65761a0d --- /dev/null +++ b/jobs/rocoto/mos_ext_stn_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_stn_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_stn_prep" -c "base mos_ext_stn_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_STN_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_grd_fcst.sh b/jobs/rocoto/mos_grd_fcst.sh new file mode 100755 index 0000000000..42832d5f14 --- /dev/null +++ b/jobs/rocoto/mos_grd_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_grd_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_grd_fcst" -c "base mos_grd_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_GRD_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_grd_prdgen.sh b/jobs/rocoto/mos_grd_prdgen.sh new file mode 100755 index 0000000000..c60b2e8f39 --- /dev/null +++ b/jobs/rocoto/mos_grd_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_grd_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_grd_prdgen" -c "base mos_grd_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_GRD_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_grd_prep.sh b/jobs/rocoto/mos_grd_prep.sh new file mode 100755 index 0000000000..3276ebf87d --- /dev/null +++ b/jobs/rocoto/mos_grd_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_grd_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_grd_prep" -c "base mos_grd_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_GRD_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_stn_fcst.sh b/jobs/rocoto/mos_stn_fcst.sh new file mode 100755 index 0000000000..0024ed24b7 --- /dev/null +++ b/jobs/rocoto/mos_stn_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_stn_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_stn_fcst" -c "base mos_stn_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_STN_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_stn_prdgen.sh b/jobs/rocoto/mos_stn_prdgen.sh new file mode 100755 index 0000000000..01ada3f9d5 --- /dev/null +++ b/jobs/rocoto/mos_stn_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_stn_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_stn_prdgen" -c "base mos_stn_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_STN_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_stn_prep.sh b/jobs/rocoto/mos_stn_prep.sh new file mode 100755 index 0000000000..21b19c6305 --- /dev/null +++ b/jobs/rocoto/mos_stn_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_stn_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_stn_prep" -c "base mos_stn_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_STN_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_wx_ext_prdgen.sh b/jobs/rocoto/mos_wx_ext_prdgen.sh new file mode 100755 index 0000000000..b1c81e8a3b --- /dev/null +++ b/jobs/rocoto/mos_wx_ext_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_wx_ext_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_wx_ext_prdgen" -c "base mos_wx_ext_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_WX_EXT_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_wx_prdgen.sh b/jobs/rocoto/mos_wx_prdgen.sh new file mode 100755 index 0000000000..345682b03e --- /dev/null +++ b/jobs/rocoto/mos_wx_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_wx_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_wx_prdgen" -c "base mos_wx_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_WX_PRDGEN" +status=$? + +exit "${status}" diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua index aedecf7040..57d3bf51dd 100644 --- a/modulefiles/module_base.wcoss2.lua +++ b/modulefiles/module_base.wcoss2.lua @@ -38,4 +38,7 @@ load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/Fit2Obs/v" .. os.getenv("fit2obs_ver"), "modulefiles")) load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) +append_path("MODULEPATH", pathJoin("/apps/ops/prod/nco/models/modulefiles")) +load(pathJoin("mos_shared", os.getenv("mos_shared_ver"))) + whatis("Description: GFS run environment") diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn index 4451c049b0..ca26a69ec0 100644 --- a/parm/config/gfs/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -64,6 +64,7 @@ export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) export DO_VERFOZN="YES" # Ozone data assimilation monitoring export DO_VERFRAD="YES" # Radiance data assimilation monitoring export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES diff --git a/parm/config/gfs/config.mos b/parm/config/gfs/config.mos new file mode 100644 index 0000000000..a74c7e7d21 --- /dev/null +++ b/parm/config/gfs/config.mos @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.mos ########## +echo "BEGIN: config.mos" + +# MOS package location +export HOMEgfs_mos=/lfs/h1/ops/prod/packages/gfs_mos.v${mos_ver} + +echo "END: config.mos" diff --git a/parm/config/gfs/config.mos_ext_grd_fcst b/parm/config/gfs/config.mos_ext_grd_fcst new file mode 100644 index 0000000000..db94af945f --- /dev/null +++ b/parm/config/gfs/config.mos_ext_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_fcst ########## +echo "BEGIN: config.mos_ext_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_fcst" diff --git a/parm/config/gfs/config.mos_ext_grd_prdgen b/parm/config/gfs/config.mos_ext_grd_prdgen new file mode 100644 index 0000000000..ade31b0c1a --- /dev/null +++ b/parm/config/gfs/config.mos_ext_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prdgen ########## +echo "BEGIN: config.mos_ext_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prdgen" diff --git a/parm/config/gfs/config.mos_ext_grd_prep b/parm/config/gfs/config.mos_ext_grd_prep new file mode 100644 index 0000000000..0ba14e2573 --- /dev/null +++ b/parm/config/gfs/config.mos_ext_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prep ########## +echo "BEGIN: config.mos_ext_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prep" diff --git a/parm/config/gfs/config.mos_ext_stn_fcst b/parm/config/gfs/config.mos_ext_stn_fcst new file mode 100644 index 0000000000..5b26d196f9 --- /dev/null +++ b/parm/config/gfs/config.mos_ext_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_fcst ########## +echo "BEGIN: config.mos_ext_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_fcst" diff --git a/parm/config/gfs/config.mos_ext_stn_prdgen b/parm/config/gfs/config.mos_ext_stn_prdgen new file mode 100644 index 0000000000..9f63eb56fd --- /dev/null +++ b/parm/config/gfs/config.mos_ext_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prdgen ########## +echo "BEGIN: config.mos_ext_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prdgen" diff --git a/parm/config/gfs/config.mos_ext_stn_prep b/parm/config/gfs/config.mos_ext_stn_prep new file mode 100644 index 0000000000..c443503f11 --- /dev/null +++ b/parm/config/gfs/config.mos_ext_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prep ########## +echo "BEGIN: config.mos_ext_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prep" diff --git a/parm/config/gfs/config.mos_grd_fcst b/parm/config/gfs/config.mos_grd_fcst new file mode 100644 index 0000000000..bd0d50a04d --- /dev/null +++ b/parm/config/gfs/config.mos_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_fcst ########## +echo "BEGIN: config.mos_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_fcst" diff --git a/parm/config/gfs/config.mos_grd_prdgen b/parm/config/gfs/config.mos_grd_prdgen new file mode 100644 index 0000000000..dd9ce8bcd8 --- /dev/null +++ b/parm/config/gfs/config.mos_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prdgen ########## +echo "BEGIN: config.mos_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prdgen" diff --git a/parm/config/gfs/config.mos_grd_prep b/parm/config/gfs/config.mos_grd_prep new file mode 100644 index 0000000000..8a3d334d0d --- /dev/null +++ b/parm/config/gfs/config.mos_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prep ########## +echo "BEGIN: config.mos_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prep" diff --git a/parm/config/gfs/config.mos_stn_fcst b/parm/config/gfs/config.mos_stn_fcst new file mode 100644 index 0000000000..7cb266ea3a --- /dev/null +++ b/parm/config/gfs/config.mos_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_fcst ########## +echo "BEGIN: config.mos_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_fcst" diff --git a/parm/config/gfs/config.mos_stn_prdgen b/parm/config/gfs/config.mos_stn_prdgen new file mode 100644 index 0000000000..f92edbd0fd --- /dev/null +++ b/parm/config/gfs/config.mos_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prdgen ########## +echo "BEGIN: config.mos_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prdgen" diff --git a/parm/config/gfs/config.mos_stn_prep b/parm/config/gfs/config.mos_stn_prep new file mode 100644 index 0000000000..b236f42879 --- /dev/null +++ b/parm/config/gfs/config.mos_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prep ########## +echo "BEGIN: config.mos_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prep" diff --git a/parm/config/gfs/config.mos_wx_ext_prdgen b/parm/config/gfs/config.mos_wx_ext_prdgen new file mode 100644 index 0000000000..054cb950ad --- /dev/null +++ b/parm/config/gfs/config.mos_wx_ext_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_ext_prdgen ########## +echo "BEGIN: config.mos_wx_ext_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_ext_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_ext_prdgen" diff --git a/parm/config/gfs/config.mos_wx_prdgen b/parm/config/gfs/config.mos_wx_prdgen new file mode 100644 index 0000000000..d4481b65fc --- /dev/null +++ b/parm/config/gfs/config.mos_wx_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_prdgen ########## +echo "BEGIN: config.mos_wx_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_prdgen" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 1fc0c606c1..b3319ecc1b 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -1030,6 +1030,153 @@ elif [[ ${step} = "gempak" ]]; then export memory_gempak="4GB" export memory_gempak_gfs="2GB" +elif [[ ${step} = "mos_stn_prep" ]]; then + + export wtime_mos_stn_prep="00:10:00" + export npe_mos_stn_prep=3 + export npe_node_mos_stn_prep=3 + export nth_mos_stn_prep=1 + export memory_mos_stn_prep="5GB" + export NTASK="${npe_mos_stn_prep}" + export PTILE="${npe_node_mos_stn_prep}" + +elif [[ ${step} = "mos_grd_prep" ]]; then + + export wtime_mos_grd_prep="00:10:00" + export npe_mos_grd_prep=4 + export npe_node_mos_grd_prep=4 + export nth_mos_grd_prep=1 + export memory_mos_grd_prep="16GB" + export NTASK="${npe_mos_grd_prep}" + export PTILE="${npe_node_mos_grd_prep}" + +elif [[ ${step} = "mos_ext_stn_prep" ]]; then + + export wtime_mos_ext_stn_prep="00:15:00" + export npe_mos_ext_stn_prep=2 + export npe_node_mos_ext_stn_prep=2 + export nth_mos_ext_stn_prep=1 + export memory_mos_ext_stn_prep="5GB" + export NTASK="${npe_mos_ext_stn_prep}" + export PTILE="${npe_node_mos_ext_stn_prep}" + +elif [[ ${step} = "mos_ext_grd_prep" ]]; then + + export wtime_mos_ext_grd_prep="00:10:00" + export npe_mos_ext_grd_prep=7 + export npe_node_mos_ext_grd_prep=7 + export nth_mos_ext_grd_prep=1 + export memory_mos_ext_grd_prep="3GB" + export NTASK="${npe_mos_ext_grd_prep}" + export PTILE="${npe_node_mos_ext_grd_prep}" + +elif [[ ${step} = "mos_stn_fcst" ]]; then + + export wtime_mos_stn_fcst="00:10:00" + export npe_mos_stn_fcst=5 + export npe_node_mos_stn_fcst=5 + export nth_mos_stn_fcst=1 + export memory_mos_stn_fcst="40GB" + export NTASK="${npe_mos_stn_fcst}" + export PTILE="${npe_node_mos_stn_fcst}" + +elif [[ ${step} = "mos_grd_fcst" ]]; then + + export wtime_mos_grd_fcst="00:10:00" + export npe_mos_grd_fcst=7 + export npe_node_mos_grd_fcst=7 + export nth_mos_grd_fcst=1 + export memory_mos_grd_fcst="50GB" + export NTASK="${npe_mos_grd_fcst}" + export PTILE="${npe_node_mos_grd_fcst}" + +elif [[ ${step} = "mos_ext_stn_fcst" ]]; then + + export wtime_mos_ext_stn_fcst="00:20:00" + export npe_mos_ext_stn_fcst=3 + export npe_node_mos_ext_stn_fcst=3 + export nth_mos_ext_stn_fcst=1 + export memory_mos_ext_stn_fcst="50GB" + export NTASK="${npe_mos_ext_stn_fcst}" + export PTILE="${npe_node_mos_ext_stn_fcst}" + export prepost=True + +elif [[ ${step} = "mos_ext_grd_fcst" ]]; then + + export wtime_mos_ext_grd_fcst="00:10:00" + export npe_mos_ext_grd_fcst=7 + export npe_node_mos_ext_grd_fcst=7 + export nth_mos_ext_grd_fcst=1 + export memory_mos_ext_grd_fcst="50GB" + export NTASK="${npe_mos_ext_grd_fcst}" + export PTILE="${npe_node_mos_ext_grd_fcst}" + +elif [[ ${step} = "mos_stn_prdgen" ]]; then + + export wtime_mos_stn_prdgen="00:10:00" + export npe_mos_stn_prdgen=1 + export npe_node_mos_stn_prdgen=1 + export nth_mos_stn_prdgen=1 + export memory_mos_stn_prdgen="15GB" + export NTASK="${npe_mos_stn_prdgen}" + export PTILE="${npe_node_mos_stn_prdgen}" + export prepost=True + +elif [[ ${step} = "mos_grd_prdgen" ]]; then + + export wtime_mos_grd_prdgen="00:40:00" + export npe_mos_grd_prdgen=72 + export npe_node_mos_grd_prdgen=18 + export nth_mos_grd_prdgen=4 + export memory_mos_grd_prdgen="20GB" + export NTASK="${npe_mos_grd_prdgen}" + export PTILE="${npe_node_mos_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_grd_prdgen}" + +elif [[ ${step} = "mos_ext_stn_prdgen" ]]; then + + export wtime_mos_ext_stn_prdgen="00:10:00" + export npe_mos_ext_stn_prdgen=1 + export npe_node_mos_ext_stn_prdgen=1 + export nth_mos_ext_stn_prdgen=1 + export memory_mos_ext_stn_prdgen="15GB" + export NTASK="${npe_mos_ext_stn_prdgen}" + export PTILE="${npe_node_mos_ext_stn_prdgen}" + export prepost=True + +elif [[ ${step} = "mos_ext_grd_prdgen" ]]; then + + export wtime_mos_ext_grd_prdgen="00:30:00" + export npe_mos_ext_grd_prdgen=96 + export npe_node_mos_ext_grd_prdgen=6 + export nth_mos_ext_grd_prdgen=16 + export memory_mos_ext_grd_prdgen="30GB" + export NTASK="${npe_mos_ext_grd_prdgen}" + export PTILE="${npe_node_mos_ext_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}" + +elif [[ ${step} = "mos_wx_prdgen" ]]; then + + export wtime_mos_wx_prdgen="00:10:00" + export npe_mos_wx_prdgen=4 + export npe_node_mos_wx_prdgen=2 + export nth_mos_wx_prdgen=2 + export memory_mos_wx_prdgen="10GB" + export NTASK="${npe_mos_wx_prdgen}" + export PTILE="${npe_node_mos_wx_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_prdgen}" + +elif [[ ${step} = "mos_wx_ext_prdgen" ]]; then + + export wtime_mos_wx_ext_prdgen="00:10:00" + export npe_mos_wx_ext_prdgen=4 + export npe_node_mos_wx_ext_prdgen=2 + export nth_mos_wx_ext_prdgen=2 + export memory_mos_wx_ext_prdgen="10GB" + export NTASK="${npe_mos_wx_ext_prdgen}" + export PTILE="${npe_node_mos_wx_ext_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}" + else echo "Invalid step = ${step}, ABORT!" diff --git a/versions/run.wcoss2.ver b/versions/run.wcoss2.ver index f61122337e..a188cdea74 100644 --- a/versions/run.wcoss2.ver +++ b/versions/run.wcoss2.ver @@ -47,3 +47,5 @@ export prepobs_run_ver=1.0.1 export ens_tracker_ver=feature-GFSv17_com_reorg export fit2obs_ver=1.0.0 +export mos_ver=5.4.3 +export mos_shared_ver=2.7.2 diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 441dbe4c19..766d4aa508 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -62,6 +62,7 @@ def __init__(self, conf: Configuration) -> None: self.do_genesis_fsu = _base.get('DO_GENESIS_FSU', False) self.do_metp = _base.get('DO_METP', False) self.do_upp = not _base.get('WRITE_DOPOST', True) + self.do_mos = _base.get('DO_MOS', False) self.do_hpssarch = _base.get('HPSSARCH', False) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 63332c0cf6..a3caf8685d 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -104,6 +104,12 @@ def _get_app_configs(self): if self.do_jedilandda: configs += ['preplandobs', 'landanl'] + if self.do_mos: + configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + return configs @staticmethod @@ -238,6 +244,12 @@ def get_task_names(self): if self.do_awips: gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwinds'] + if self.do_mos: + gfs_tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + gfs_tasks += gdas_gfs_common_cleanup_tasks tasks = dict() diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 701ec12a04..ba1d1af0e1 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -60,6 +60,12 @@ def _get_app_configs(self): if self.do_awips: configs += ['waveawipsbulls', 'waveawipsgridded'] + if self.do_mos: + configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + return configs @staticmethod @@ -129,6 +135,12 @@ def get_task_names(self): if self.do_awips: tasks += ['waveawipsbulls', 'waveawipsgridded'] + if self.do_mos: + tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + tasks += ['arch', 'cleanup'] # arch and cleanup **must** be the last tasks return {f"{self._base['CDUMP']}": tasks} diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 047c174cdb..d4ecf8e4d5 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -1091,6 +1091,187 @@ def metp(self): return task + def mos_stn_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_stn_prep') + task = create_wf_task('mos_stn_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_grd_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_grd_prep') + task = create_wf_task('mos_grd_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_stn_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_ext_stn_prep') + task = create_wf_task('mos_ext_stn_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_grd_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_ext_grd_prep') + task = create_wf_task('mos_ext_grd_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_stn_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_stn_fcst') + task = create_wf_task('mos_stn_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_grd_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_grd_fcst') + task = create_wf_task('mos_grd_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_stn_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_stn_fcst') + task = create_wf_task('mos_ext_stn_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_grd_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_grd_fcst') + task = create_wf_task('mos_ext_grd_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_stn_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_stn_prdgen') + task = create_wf_task('mos_stn_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_grd_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_grd_prdgen') + task = create_wf_task('mos_grd_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_stn_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_stn_prdgen') + task = create_wf_task('mos_ext_stn_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_grd_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_grd_prdgen') + task = create_wf_task('mos_ext_grd_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_wx_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_wx_prdgen') + task = create_wf_task('mos_wx_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_wx_ext_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_wx_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_wx_ext_prdgen') + task = create_wf_task('mos_wx_ext_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def arch(self): deps = [] dependencies = [] @@ -1145,6 +1326,15 @@ def arch(self): if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost to run in cycled mode dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'} deps.append(rocoto.add_dependency(dep_dict)) + # MOS job dependencies + if self.cdump in ['gfs'] and self.app_config.do_mos: + mos_jobs = ["stn_prep", "grd_prep", "ext_stn_prep", "ext_grd_prep", + "stn_fcst", "grd_fcst", "ext_stn_fcst", "ext_grd_fcst", + "stn_prdgen", "grd_prdgen", "ext_stn_prdgen", "ext_grd_prdgen", + "wx_prdgen", "wx_ext_prdgen"] + for job in mos_jobs: + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_{job}'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps + dependencies) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 06000338be..7f5725783e 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -29,7 +29,10 @@ class Tasks: 'gempak', 'gempakmeta', 'gempakmetancdc', 'gempakncdcupapgif', 'gempakpgrb2spec', 'npoess_pgrb2_0p5deg' 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep', - 'npoess'] + 'npoess', + 'mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] def __init__(self, app_config: AppConfig, cdump: str) -> None: @@ -149,6 +152,9 @@ def get_resource(self, task_name): threads = task_config[f'nth_{task_name}_gfs'] memory = task_config.get(f'memory_{task_name}', None) + if scheduler in ['pbspro']: + if task_config.get('prepost', False): + memory += ':prepost=true' native = None if scheduler in ['pbspro']: From c5ca82f04a2efae78389dec1c46b94ef4fb5bbe5 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 14 Dec 2023 16:05:33 -0500 Subject: [PATCH 6/7] Correct fbwind (#2161) --- jobs/rocoto/{fbwinds.sh => fbwind.sh} | 4 ++-- workflow/applications/gfs_cycled.py | 2 +- workflow/applications/gfs_forecast_only.py | 2 +- workflow/rocoto/gfs_tasks.py | 4 ++-- workflow/rocoto/tasks.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) rename jobs/rocoto/{fbwinds.sh => fbwind.sh} (83%) diff --git a/jobs/rocoto/fbwinds.sh b/jobs/rocoto/fbwind.sh similarity index 83% rename from jobs/rocoto/fbwinds.sh rename to jobs/rocoto/fbwind.sh index 078e71586f..fdf14f5473 100755 --- a/jobs/rocoto/fbwinds.sh +++ b/jobs/rocoto/fbwind.sh @@ -7,11 +7,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? if (( status != 0 )); then exit "${status}"; fi -export job="fbwinds" +export job="fbwind" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/jobs/JGFS_ATMOS_FBWINDS" +"${HOMEgfs}/jobs/JGFS_ATMOS_FBWIND" status=$? exit "${status}" diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index a3caf8685d..29c6b18f43 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -242,7 +242,7 @@ def get_task_names(self): gfs_tasks += ['gempakpgrb2spec'] if self.do_awips: - gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwinds'] + gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] if self.do_mos: gfs_tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index ba1d1af0e1..564fd382b9 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -121,7 +121,7 @@ def get_task_names(self): tasks += ['gempak', 'gempakmeta', 'gempakncdcupapgif', 'gempakpgrb2spec'] if self.do_awips: - tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwinds'] + tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] if self.do_ocean or self.do_ice: tasks += ['ocnpost'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index d4ecf8e4d5..2e58e63184 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -812,7 +812,7 @@ def postsnd(self): return task - def fbwinds(self): + def fbwind(self): atmos_prod_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_GRIB_GRID_TMPL"], {'RUN': self.cdump, 'GRID': '0p25'}) deps = [] @@ -834,7 +834,7 @@ def fbwinds(self): # prematurely starting with partial files. Unfortunately, the # ability to "group" post would make this more convoluted than # it should be and not worth the complexity. - task = create_wf_task('fbwinds', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + task = create_wf_task('fbwind', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 7f5725783e..cfa11059e3 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -25,7 +25,7 @@ class Tasks: 'verfozn', 'verfrad', 'vminmon', 'metp', 'tracker', 'genesis', 'genesis_fsu', - 'postsnd', 'awips_g2', 'awips_20km_1p0deg', 'fbwinds', + 'postsnd', 'awips_g2', 'awips_20km_1p0deg', 'fbwind', 'gempak', 'gempakmeta', 'gempakmetancdc', 'gempakncdcupapgif', 'gempakpgrb2spec', 'npoess_pgrb2_0p5deg' 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep', From 9505cb4abc6ca2643a8ab254d14ca2000747fa44 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Thu, 14 Dec 2023 16:15:31 -0500 Subject: [PATCH 7/7] CI Updates to support Hercules (#2155) If merged, this PR will update the CI scripts so they will run on Hercules for forecast only NOTE: This is a stop gap to issue #2131 until its block on #1588 is removed. Resolves #2154 --- ci/cases/pr/C96C48_hybatmDA.yaml | 3 +++ ci/cases/pr/C96_atm3DVar.yaml | 3 +++ ci/platforms/config.hercules | 8 ++++++++ ci/platforms/config.orion | 2 +- parm/config/gfs/config.ocnpost | 4 ++-- 5 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 ci/platforms/config.hercules diff --git a/ci/cases/pr/C96C48_hybatmDA.yaml b/ci/cases/pr/C96C48_hybatmDA.yaml index c3aa6e8892..1f3e973ae7 100644 --- a/ci/cases/pr/C96C48_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_hybatmDA.yaml @@ -16,3 +16,6 @@ arguments: gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - hercules diff --git a/ci/cases/pr/C96_atm3DVar.yaml b/ci/cases/pr/C96_atm3DVar.yaml index 5215cb0d90..360e81e9d7 100644 --- a/ci/cases/pr/C96_atm3DVar.yaml +++ b/ci/cases/pr/C96_atm3DVar.yaml @@ -15,3 +15,6 @@ arguments: gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - hercules diff --git a/ci/platforms/config.hercules b/ci/platforms/config.hercules new file mode 100644 index 0000000000..e5a638a827 --- /dev/null +++ b/ci/platforms/config.hercules @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/HERCULES +export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR +export STMP="/work2/noaa/stmp/${USER}" +export SLURM_ACCOUNT=nems +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion index 3e87ef97a1..3ddd05c034 100644 --- a/ci/platforms/config.orion +++ b/ci/platforms/config.orion @@ -1,6 +1,6 @@ #!/usr/bin/bash -export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT +export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/ORION export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR export STMP="/work2/noaa/stmp/${USER}" export SLURM_ACCOUNT=nems diff --git a/parm/config/gfs/config.ocnpost b/parm/config/gfs/config.ocnpost index 2505431401..851c476e6c 100644 --- a/parm/config/gfs/config.ocnpost +++ b/parm/config/gfs/config.ocnpost @@ -17,8 +17,8 @@ case "${OCNRES}" in *) export MAKE_OCN_GRIB="NO";; esac -if [[ "${machine}" = "WCOSS2" ]]; then - #Currently the conversion to netcdf uses NCL which is not on WCOSS2 +if [[ "${machine}" = "WCOSS2" ]] || [[ "${machine}" = "HERCULES" ]]; then + #Currently the conversion to netcdf uses NCL which is not on WCOSS2 or HERCULES #This should be removed when this is updated export MAKE_OCN_GRIB="NO" fi