diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index 245e219dd44..f398ca4baf9 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -15,7 +15,7 @@ on: workflow_dispatch: inputs: pr_number: - description: 'Pull Request Number (use 0 for non-PR)' + description: 'PR Number (use 0 for non-PR)' required: true default: '0' os: @@ -31,24 +31,60 @@ env: MACHINE_ID: noaacloud jobs: + + run-start-clusters: + runs-on: ubuntu-latest + env: + PW_PLATFORM_HOST: noaa.parallel.works + steps: + - name: Checkout pw-cluster-automation repository + uses: actions/checkout@v4 + with: + repository: TerrenceMcGuinness-NOAA/pw-cluster-automation + path: pw-cluster-automation + ref: pw_cluster_noaa + + - name: Run startClusters + run: | + mkdir -p ~/.ssh + echo "${{ secrets.ID_RSA_AWS }}" > ~/.ssh/id_rsa + echo "${{ secrets.PW_API_KEY }}" > ~/.ssh/pw_api.key + chmod 700 ~/.ssh + chmod 600 ~/.ssh/id_rsa + chmod 600 ~/.ssh/pw_api.key + if [ "${{ github.event.inputs.os }}" == "rocky" ]; then + clustername="globalworkflowciplatformrocky8" + elif [ "${{ github.event.inputs.os }}" == "centos" ]; then + clustername="awsemctmcgc7i48xlargeciplatform" + fi + python3 pw-cluster-automation/startClusters.py $clustername + fetch-branch: + needs: run-start-clusters runs-on: ubuntu-latest env: GH_TOKEN: ${{ secrets.GITHUBTOKEN }} outputs: branch: ${{ steps.get-branch.outputs.branch }} + repo: ${{ steps.get-branch.outputs.repo }} steps: - - name: Fetch branch name for PR + - name: Fetch branch name and repo for PR id: get-branch run: | pr_number=${{ github.event.inputs.pr_number }} - repo=${{ github.repository }} if [ "$pr_number" -eq "0" ]; then branch=${{ github.event.inputs.ref }} + repo=${{ github.repository }} else branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') + repo_owner=$(gh pr view $pr_number --repo $repo --json headRepositoryOwner --jq '.headRepositoryOwner.login') + repo_name=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.name') + repo="$repo_owner/$repo_name" fi - echo "::set-output name=branch::$branch" + { + echo "branch=$branch" + echo "repo=$repo" + } >> $GITHUB_OUTPUT checkout: needs: fetch-branch @@ -64,6 +100,7 @@ jobs: with: path: ${{ github.run_id }}/HOMEgfs submodules: 'recursive' + repository: ${{ needs.fetch-branch.outputs.repo }} ref: ${{ needs.fetch-branch.outputs.branch }} build-link: diff --git a/.gitignore b/.gitignore index 8fc6d0b20bc..4ec62993d3b 100644 --- a/.gitignore +++ b/.gitignore @@ -171,11 +171,6 @@ ush/bufr2ioda_insitu* versions/build.ver versions/run.ver -# wxflow checkout and symlinks -ush/python/wxflow -workflow/wxflow -ci/scripts/wxflow - # jcb checkout and symlinks ush/python/jcb workflow/jcb diff --git a/ci/cases/pr/C48mx500_hybAOWCDA.yaml b/ci/cases/pr/C48mx500_hybAOWCDA.yaml new file mode 100644 index 00000000000..036aa8ca603 --- /dev/null +++ b/ci/cases/pr/C48mx500_hybAOWCDA.yaml @@ -0,0 +1,26 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2S + resdetatmos: 48 + resdetocean: 5.0 + resensatmos: 48 + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500/20240610 + idate: 2021032412 + edate: 2021032418 + nens: 3 + interval: 0 + start: warm + yaml: {{ HOMEgfs }}/ci/cases/yamls/soca_gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - wcoss2 + - orion + - hercules + - hera + - gaea diff --git a/ci/scripts/wxflow b/ci/scripts/wxflow new file mode 120000 index 00000000000..9dbee42bc86 --- /dev/null +++ b/ci/scripts/wxflow @@ -0,0 +1 @@ +../../sorc/wxflow/src/wxflow \ No newline at end of file diff --git a/env/HERA.env b/env/HERA.env index 09743967b55..259461b1ace 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -9,7 +9,7 @@ fi step=$1 -export launcher="srun -l --export=ALL" +export launcher="srun -l --export=ALL --hint=nomultithread" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" #export POSTAMBLE_CMD='report-mem' @@ -50,7 +50,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export POE="NO" export BACK="NO" export sys_tp="HERA" - export launcher_PREP="srun" + export launcher_PREP="srun --hint=nomultithread" elif [[ "${step}" = "prepsnowobs" ]]; then @@ -153,10 +153,10 @@ elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" -elif [[ "${step}" = "marineanalletkf" ]]; then +elif [[ "${step}" = "marineanlletkf" ]]; then - export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export NTHREADS_MARINEANLLETKF=${NTHREADSmax} + export APRUN_MARINEANLLETKF=${APRUN_default} elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then diff --git a/env/HERCULES.env b/env/HERCULES.env index 9ec112c6992..bed1d112811 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -9,7 +9,7 @@ fi step=$1 -export launcher="srun -l --export=ALL" +export launcher="srun -l --export=ALL --hint=nomultithread" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment @@ -48,7 +48,7 @@ case ${step} in export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="HERCULES" - export launcher_PREP="srun" + export launcher_PREP="srun --hint=nomultithread" ;; "prepsnowobs") diff --git a/env/ORION.env b/env/ORION.env index 3b8053d060f..06ae2c1a63a 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -9,7 +9,7 @@ fi step=$1 -export launcher="srun -l --export=ALL" +export launcher="srun -l --export=ALL --hint=nomultithread" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment @@ -45,7 +45,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="ORION" - export launcher_PREP="srun" + export launcher_PREP="srun --hint=nomultithread" elif [[ "${step}" = "prepsnowobs" ]]; then @@ -149,10 +149,10 @@ elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" -elif [[ "${step}" = "marineanalletkf" ]]; then +elif [[ "${step}" = "marineanlletkf" ]]; then - export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export NTHREADS_MARINEANLLETKF=${NTHREADSmax} + export APRUN_MARINEANLLETKF="${APRUN_default}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index cea24fb26bd..c67c16f929d 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -126,10 +126,10 @@ elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" -elif [[ "${step}" = "marineanalletkf" ]]; then +elif [[ "${step}" = "marineanlletkf" ]]; then - export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export NTHREADS_MARINEANLLETKF=${NTHREADSmax} + export APRUN_MARINEANLLETKF="${APRUN_default}" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG index a1e0c9f1d57..c47bd4a47b9 100755 --- a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG @@ -27,8 +27,9 @@ export OPREFIX="${RUN/enkf}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export APREFIX="${RUN}.t${cyc}z." -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS -mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL +mkdir -m 775 -p "${COMOUT_ATMOS_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG index cc8c933cc81..3daa8bfb733 100755 --- a/jobs/JGDAS_ENKF_DIAG +++ b/jobs/JGDAS_ENKF_DIAG @@ -30,56 +30,58 @@ export APREFIX="${RUN}.t${cyc}z." export GPREFIX="${GDUMP_ENS}.t${gcyc}z." GPREFIX_DET="${GDUMP}.t${gcyc}z." -RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS +RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_OBS_PREV:COM_OBS_TMPL \ - COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL + COMIN_OBS_PREV:COM_OBS_TMPL \ + COMIN_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL MEMDIR="ensstat" RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + COMIN_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL -export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.ensmean.nc" +export ATMGES_ENSMEAN="${COMIN_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.ensmean.nc" if [ ! -f ${ATMGES_ENSMEAN} ]; then echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" exit 1 fi # Link observational data -export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" +export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" if [[ ! -f ${PREPQC} ]]; then echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" fi -export TCVITL="${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00" +export TCVITL="${COMIN_OBS}/${OPREFIX}syndata.tcvitals.tm00" if [[ ${DONST} = "YES" ]]; then - export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" + export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" fi -export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" +export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" # Guess Bias correction coefficients related to control -export GBIAS=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias -export GBIASPC=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc -export GBIASAIR=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air -export GRADSTAT=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat +export GBIAS=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias +export GBIASPC=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc +export GBIASAIR=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air +export GRADSTAT=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat # Bias correction coefficients related to ensemble mean -export ABIAS="${COM_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" -export ABIASPC="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" -export ABIASAIR="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" -export ABIASe="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" +export ABIAS="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" +export ABIASPC="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" +export ABIASAIR="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" +export ABIASe="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" # Diagnostics related to ensemble mean -export GSISTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" -export CNVSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" -export OZNSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" -export RADSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" +export GSISTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" +export CNVSTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" +export OZNSTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" +export RADSTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" # Select observations based on ensemble mean export RUN_SELECT="YES" export USE_SELECT="NO" -export SELECT_OBS="${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" +export SELECT_OBS="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" export DIAG_SUFFIX="_ensmean" export DIAG_COMPRESS="NO" diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF b/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF index 38dc3049f90..2a88f89eab7 100755 --- a/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF +++ b/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF @@ -1,6 +1,13 @@ #!/bin/bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanalletkf" -c "base ocnanal marineanalletkf" + +export DATAjob="${DATAROOT}/${RUN}marineanalysis.${PDY:-}${cyc}" +export DATA="${DATAjob}/${jobid}" +# Create the directory to hold ensemble perturbations +export DATAens="${DATAjob}/ensdata" +if [[ ! -d "${DATAens}" ]]; then mkdir -p "${DATAens}"; fi + +source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlletkf" -c "base marineanl marineanlletkf" ############################################## # Set variables used in the script @@ -11,12 +18,18 @@ GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" +export OPREFIX="${RUN}.t${cyc}z." -YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ - COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL +RUN="${GDUMP}" YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ + COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_OCEAN_LETKF:COM_OCEAN_LETKF_TMPL \ + COMOUT_ICE_LETKF:COM_ICE_LETKF_TMPL ############################################## # Begin JOB SPECIFIC work @@ -25,7 +38,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL ############################################################### # Run relevant script -EXSCRIPT=${GDASOCNLETKFPY:-${HOMEgfs}/scripts/exgdas_global_marine_analysis_letkf.py} +EXSCRIPT=${GDASOCNLETKFPY:-${HOMEgfs}/scripts/exglobal_marine_analysis_letkf.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/rocoto/marineanalletkf.sh b/jobs/rocoto/marineanlletkf.sh similarity index 95% rename from jobs/rocoto/marineanalletkf.sh rename to jobs/rocoto/marineanlletkf.sh index f2bfb9f70c3..d4333461f3c 100755 --- a/jobs/rocoto/marineanalletkf.sh +++ b/jobs/rocoto/marineanlletkf.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="marineanalletkf" +export job="marineanlletkf" export jobid="${job}.$$" ############################################################### diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index 9bd55afa54e..00861214502 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -26,18 +26,6 @@ source "${EXPDIR}/config.ufs" ${string} # Get task specific resources source "${EXPDIR}/config.resources" efcs -# nggps_diag_nml -export FHOUT=${FHOUT_ENKF:-3} -if [[ "${RUN}" == "enkfgfs" ]]; then - export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}} -fi - -# model_configure -export FHMAX=${FHMAX_ENKF:-9} -if [[ "${RUN}" == "enkfgfs" ]]; then - export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}} -fi - # Stochastic physics parameters (only for ensemble forecasts) export DO_SKEB="YES" export SKEB="0.8,-999,-999,-999,-999" @@ -74,6 +62,6 @@ if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then else export ODA_INCUPD="False" fi -export restart_interval="${restart_interval_enkfgfs:-12}" +export restart_interval="${restart_interval_gfs:-12}" echo "END: config.efcs" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index efdedb24f41..b2a9c10afe3 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -35,7 +35,8 @@ IFS=', ' read -ra segments <<< "${FCST_SEGMENTS}" # Determine MIN and MAX based on the forecast segment export FHMIN=${segments[${FCST_SEGMENT}]} export FHMAX=${segments[${FCST_SEGMENT}+1]} -# Cap other FHMAX variables at FHMAX for the segment +# Cap other FH variables at FHMAX for the segment +export FHMIN_WAV=$(( FHMIN > FHMIN_WAV ? FHMIN : FHMIN_WAV )) export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS )) export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV )) # shellcheck disable=SC2153 diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 61d592561d7..d949edb33a6 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -82,12 +82,14 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model/ocean/history' declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model/ocean/restart' declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model/ocean/input' declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_LETKF_TMPL=${COM_BASE}'/analysis/ocean/letkf' declare -rx COM_OCEAN_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ocean' declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice' +declare -rx COM_ICE_LETKF_TMPL=${COM_BASE}'/analysis/ice/letkf' declare -rx COM_ICE_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ice' declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model/ice/input' declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model/ice/history' diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index da336ff73bd..571e6cafb5f 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -38,7 +38,8 @@ case ${RUN} in # Determine MIN and MAX based on the forecast segment export FHMIN=${segments[${FCST_SEGMENT}]} export FHMAX=${segments[${FCST_SEGMENT}+1]} - # Cap other FHMAX variables at FHMAX for the segment + # Cap other FH variables at FHMAX for the segment + export FHMIN_WAV=$(( FHMIN > FHMIN_WAV ? FHMIN : FHMIN_WAV )) export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS )) export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV )) # shellcheck disable=SC2153 diff --git a/parm/config/gfs/config.marineanalletkf b/parm/config/gfs/config.marineanalletkf deleted file mode 100644 index fde3433a13d..00000000000 --- a/parm/config/gfs/config.marineanalletkf +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -########## config.marineanalletkf ########## -# Ocn Analysis specific - -echo "BEGIN: config.marineanalletkf" - -# Get task specific resources -. "${EXPDIR}/config.resources" marineanalletkf - -export MARINE_LETKF_EXEC="${JEDI_BIN}/gdas.x" -export MARINE_LETKF_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf.yaml.j2" -export MARINE_LETKF_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_stage.yaml.j2" - -export GRIDGEN_EXEC="${JEDI_BIN}/gdas_soca_gridgen.x" -export GRIDGEN_YAML="${PARMgfs}/gdas/soca/gridgen/gridgen.yaml" - -echo "END: config.marineanalletkf" diff --git a/parm/config/gfs/config.marineanlletkf b/parm/config/gfs/config.marineanlletkf new file mode 100644 index 00000000000..8b84af4eaa5 --- /dev/null +++ b/parm/config/gfs/config.marineanlletkf @@ -0,0 +1,20 @@ +#!/bin/bash + +########## config.marineanlletkf ########## +# Ocn Analysis specific + +echo "BEGIN: config.marineanlletkf" + +# Get task specific resources +. "${EXPDIR}/config.resources" marineanlletkf + +export MARINE_LETKF_EXEC="${EXECgfs}/gdas.x" +export MARINE_LETKF_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf.yaml.j2" +export MARINE_LETKF_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_stage.yaml.j2" +export MARINE_LETKF_SAVE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_save.yaml.j2" + +export GRIDGEN_EXEC="${EXECgfs}/gdas_soca_gridgen.x" +export GRIDGEN_YAML="${PARMgfs}/gdas/soca/gridgen/gridgen.yaml" +export DIST_HALO_SIZE=500000 + +echo "END: config.marineanlletkf" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 79dbb487db3..14e6f0d7fbe 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -601,7 +601,7 @@ case ${step} in tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; - "marineanalletkf") + "marineanlletkf") ntasks=16 case ${OCNRES} in "025") diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml index fea88df2bb9..f961fab83f0 100644 --- a/parm/post/oceanice_products_gefs.yaml +++ b/parm/post/oceanice_products_gefs.yaml @@ -33,7 +33,7 @@ ocean: {% elif model_grid == 'mx500' %} ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267] {% endif %} - subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo'] + subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'tob', 'so', 'uo', 'vo'] data_in: copy: - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"] diff --git a/parm/product/gefs.0p25.fFFF.paramlist.a.txt b/parm/product/gefs.0p25.fFFF.paramlist.a.txt index 303752ac17f..4bb87c32ff1 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.a.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.a.txt @@ -19,6 +19,7 @@ :CIN:180-0 mb above ground: :CIN:surface: :HLCY:3000-0 m above ground: +:PEVPR:surface: :TCDC:entire atmosphere (considered as a single layer): :WEASD:surface: :SNOD:surface: diff --git a/parm/product/gefs.0p25.fFFF.paramlist.b.txt b/parm/product/gefs.0p25.fFFF.paramlist.b.txt index ccad9da4d04..5c406ce34d8 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.b.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.b.txt @@ -151,7 +151,6 @@ :O3MR:5 mb: :O3MR:70 mb: :O3MR:7 mb: -:PEVPR:surface: :PLI:30-0 mb above ground: :PLPL:255-0 mb above ground: :POT:0.995 sigma level: diff --git a/parm/stage/ocean_ens_perturbations.yaml.j2 b/parm/stage/ocean_ens_perturbations.yaml.j2 index fede3816a70..586b9f66cb2 100644 --- a/parm/stage/ocean_ens_perturbations.yaml.j2 +++ b/parm/stage/ocean_ens_perturbations.yaml.j2 @@ -9,5 +9,5 @@ ocean_ens_perturbation: {% for mem in range(first_mem + 1, last_mem + 1) %} {% set imem = mem - first_mem %} {% set COMOUT_OCEAN_ANALYSIS_MEM = COMOUT_OCEAN_ANALYSIS_MEM_list[imem] %} - - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.mom6_perturbation.nc", "{{ COMOUT_OCEAN_ANALYSIS_MEM }}/mom6_increment.nc"] + - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.mom6_perturbation.nc", "{{ COMOUT_OCEAN_ANALYSIS_MEM }}/{{ RUN }}.t{{ current_cycle_HH }}z.ocninc.nc"] {% endfor %} # mem loop diff --git a/parm/stage/wave.yaml.j2 b/parm/stage/wave.yaml.j2 index d610430bc79..2788a24343c 100644 --- a/parm/stage/wave.yaml.j2 +++ b/parm/stage/wave.yaml.j2 @@ -9,5 +9,9 @@ wave: {% for mem in range(first_mem, last_mem + 1) %} {% set imem = mem - first_mem %} {% set COMOUT_WAVE_RESTART_PREV_MEM = COMOUT_WAVE_RESTART_PREV_MEM_list[imem] %} - - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.{{ waveGRD }}", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}"] + {% if path_exists(ICSDIR ~ "/" ~ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) ~ "/" ~ m_prefix ~ ".restart." ~ waveGRD) %} + - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.{{ waveGRD }}", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}/{{ m_prefix }}.restart.ww3"] + {% else %} + - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.ww3", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}"] + {% endif %} {% endfor %} # mem loop diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index ed9bef05dfa..46a6e9863c2 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -49,10 +49,10 @@ SENDDBN=${SENDDBN:-"NO"} # Analysis files export APREFIX=${APREFIX:-""} -RADSTAT=${RADSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}radstat} -PCPSTAT=${PCPSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}pcpstat} -CNVSTAT=${CNVSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat} -OZNSTAT=${OZNSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat} +RADSTAT=${RADSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}radstat} +PCPSTAT=${PCPSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}oznstat} # Remove stat file if file already exists [[ -s $RADSTAT ]] && rm -f $RADSTAT @@ -74,7 +74,7 @@ nm="" if [ $CFP_MP = "YES" ]; then nm=0 fi -DIAG_DIR=${DIAG_DIR:-${COM_ATMOS_ANALYSIS}/gsidiags} +DIAG_DIR=${DIAG_DIR:-${COMOUT_ATMOS_ANALYSIS}/gsidiags} REMOVE_DIAG_DIR=${REMOVE_DIAG_DIR:-"NO"} # Set script / GSI control parameters diff --git a/scripts/exgdas_global_marine_analysis_letkf.py b/scripts/exglobal_marine_analysis_letkf.py similarity index 100% rename from scripts/exgdas_global_marine_analysis_letkf.py rename to scripts/exglobal_marine_analysis_letkf.py diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index a00cc0949e2..856a42076a6 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit a00cc0949e2f901e73b58d54834517743916c69a +Subproject commit 856a42076a65256aaae9b29f4891532cb4a3fbca diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 870ddc5eba9..3d81f7b7d4b 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -86,15 +86,6 @@ esac # Source fix version file source "${HOMEgfs}/versions/fix.ver" -# Link python pacakges in ush/python -# TODO: This will be unnecessary when these are part of the virtualenv -packages=("wxflow") -for package in "${packages[@]}"; do - cd "${HOMEgfs}/ush/python" || exit 1 - [[ -s "${package}" ]] && rm -f "${package}" - ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" . -done - # Link GDASapp python packages in ush/python packages=("jcb") for package in "${packages[@]}"; do @@ -103,15 +94,6 @@ for package in "${packages[@]}"; do ${LINK} "${HOMEgfs}/sorc/gdas.cd/sorc/${package}/src/${package}" . done -# Link wxflow in workflow and ci/scripts -# TODO: This will be unnecessary when wxflow is part of the virtualenv -cd "${HOMEgfs}/workflow" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . -cd "${HOMEgfs}/ci/scripts" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . - # Link fix directories if [[ -n "${FIX_DIR}" ]]; then if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index 603447f6121..72064ac7f5e 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -93,12 +93,9 @@ UFS_det(){ # Check for WW3 restart availability if [[ "${cplwav}" == ".true." ]]; then - local ww3_grid - for ww3_grid in ${waveGRD} ; do - if [[ ! -f "${DATArestart}/WW3_RESTART/${rdate:0:8}.${rdate:8:2}0000.restart.${ww3_grid}" ]]; then - ww3_rst_ok="NO" - fi - done + if [[ ! -f "${DATArestart}/WW3_RESTART/${rdate:0:8}.${rdate:8:2}0000.restart.ww3" ]]; then + ww3_rst_ok="NO" + fi fi # Collective check diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 58755d41d91..25b2e28d754 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -326,7 +326,7 @@ FV3_out() { WW3_postdet() { echo "SUB ${FUNCNAME[0]}: Linking input data for WW3" - local ww3_grid + local ww3_grid first_ww3_restart_out ww3_restart_file # Copy initial condition files: if [[ "${warm_start}" == ".true." ]]; then local restart_date restart_dir @@ -338,29 +338,35 @@ WW3_postdet() { restart_dir="${COMIN_WAVE_RESTART_PREV}" fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" - local ww3_restart_file - for ww3_grid in ${waveGRD} ; do - ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.${ww3_grid}" - if [[ ! -f "${ww3_restart_file}" ]]; then - echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!" - if [[ "${RERUN}" == "YES" ]]; then - # In the case of a RERUN, the WW3 restart file is required - echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!" - exit 1 - fi - fi - if [[ "${waveMULTIGRID}" == ".true." ]]; then - ${NCP} "${ww3_restart_file}" "${DATA}/restart.${ww3_grid}" \ - || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) + ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3" + if [[ -f "${ww3_restart_file}" ]]; then + ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \ + || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) + else + if [[ "${RERUN}" == "YES" ]]; then + # In the case of a RERUN, the WW3 restart file is required + echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!" + exit 1 else - ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \ - || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) + echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!" fi - done + fi + + first_ww3_restart_out=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) else # cold start echo "WW3 will start from rest!" + first_ww3_restart_out="${model_start_date_current_cycle}" fi # [[ "${warm_start}" == ".true." ]] + # Link restart files + local ww3_restart_file + # Use restart_date if it was determined above, otherwise use initialization date + for (( vdate = first_ww3_restart_out; vdate <= forecast_end_cycle; + vdate = $(date --utc -d "${vdate:0:8} ${vdate:8:2} + ${restart_interval} hours" +%Y%m%d%H) )); do + ww3_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3" + ${NLN} "${DATArestart}/WW3_RESTART/${ww3_restart_file}" "${ww3_restart_file}" + done + # Link output files local wavprfx="${RUN}wave${WAV_MEMBER:-}" if [[ "${waveMULTIGRID}" == ".true." ]]; then @@ -460,12 +466,13 @@ MOM6_postdet() { fi # GEFS perturbations - # TODO if [[ $RUN} == "gefs" ]] block maybe be needed - # to ensure it does not interfere with the GFS when ensemble is updated in the GFS - if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then - ${NCP} "${COMIN_OCEAN_ANALYSIS}/mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \ - || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) - fi + if [[ "${RUN}" == "gefs" ]]; then + # to ensure it does not interfere with the GFS + if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then + ${NCP} "${COMIN_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \ + || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) + fi + fi # if [[ "${RUN}" == "gefs" ]]; then fi # if [[ "${RERUN}" == "NO" ]]; then # Link output files diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 5aa9dc9ac7a..d359a86622c 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -556,10 +556,10 @@ WW3_predet(){ echo "SUB ${FUNCNAME[0]}: WW3 before run type determination" if [[ ! -d "${COMOUT_WAVE_HISTORY}" ]]; then mkdir -p "${COMOUT_WAVE_HISTORY}"; fi - if [[ ! -d "${COMOUT_WAVE_RESTART}" ]]; then mkdir -p "${COMOUT_WAVE_RESTART}" ; fi + if [[ ! -d "${COMOUT_WAVE_RESTART}" ]]; then mkdir -p "${COMOUT_WAVE_RESTART}"; fi - if [[ ! -d "${DATArestart}/WAVE_RESTART" ]]; then mkdir -p "${DATArestart}/WAVE_RESTART"; fi - ${NLN} "${DATArestart}/WAVE_RESTART" "${DATA}/restart_wave" + if [[ ! -d "${DATArestart}/WW3_RESTART" ]]; then mkdir -p "${DATArestart}/WW3_RESTART"; fi + # Wave restarts are linked in postdet to only create links for files that will be created # Files from wave prep and wave init jobs # Copy mod_def files for wave grids diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index d138474e9af..108cd2ed275 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -88,11 +88,6 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str if not os.path.isdir(arch_dict.ROTDIR): raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!") - if arch_dict.RUN in ["gdas", "gfs"]: - - # Copy the cyclone track files and rename the experiments - Archive._rename_cyclone_expt(arch_dict) - # Collect datasets that need to be archived # Each dataset represents one tarball @@ -371,14 +366,14 @@ def _rename_cyclone_expt(arch_dict) -> None: if run == "gfs": in_track_file = (track_dir_in + "/avno.t" + - cycle_HH + "z.cycle.trackatcfunix") + cycle_HH + "z.cyclone.trackatcfunix") in_track_p_file = (track_dir_in + "/avnop.t" + - cycle_HH + "z.cycle.trackatcfunixp") + cycle_HH + "z.cyclone.trackatcfunix") elif run == "gdas": in_track_file = (track_dir_in + "/gdas.t" + - cycle_HH + "z.cycle.trackatcfunix") + cycle_HH + "z.cyclone.trackatcfunix") in_track_p_file = (track_dir_in + "/gdasp.t" + - cycle_HH + "z.cycle.trackatcfunixp") + cycle_HH + "z.cyclone.trackatcfunix") if not os.path.isfile(in_track_file): # Do not attempt to archive the outputs @@ -416,7 +411,7 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s with open("/tmp/track_file", "w") as new_file: new_file.writelines(out_lines) - shutil.move("tmp/track_file", filename_out) + shutil.move("/tmp/track_file", filename_out) replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4) replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 93329f05ac0..a4a5b4f1447 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -318,7 +318,7 @@ def finalize(self: Task) -> None: FileHandler({'copy': diagb_list}).sync() # Copy the ensemble perturbation diagnostics to the ROTDIR - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 3: + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: window_middle_iso = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ') weight_list = [] src = os.path.join(self.task_config.DATA, f"ocn.ens_weights.incr.{window_middle_iso}.nc") diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py index 36c26d594b3..54d40f8d665 100644 --- a/ush/python/pygfs/task/marine_letkf.py +++ b/ush/python/pygfs/task/marine_letkf.py @@ -1,11 +1,13 @@ #!/usr/bin/env python3 import f90nml +import pygfs.utils.marine_da_utils as mdau from logging import getLogger import os from pygfs.task.analysis import Analysis from typing import Dict from wxflow import (AttrDict, + Executable, FileHandler, logit, parse_j2yaml, @@ -41,6 +43,8 @@ def __init__(self, config: Dict) -> None: 'soca', 'localensembleda', _letkf_yaml_file] + # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert + _enspert_relpath = os.path.relpath(self.task_config.DATAens, self.task_config.DATA) self.task_config.WINDOW_MIDDLE = self.task_config.current_cycle self.task_config.WINDOW_BEGIN = self.task_config.current_cycle - _half_assim_freq @@ -49,6 +53,7 @@ def __init__(self, config: Dict) -> None: self.task_config.mom_input_nml_tmpl = os.path.join(self.task_config.DATA, 'mom_input.nml.tmpl') self.task_config.mom_input_nml = os.path.join(self.task_config.DATA, 'mom_input.nml') self.task_config.obs_dir = os.path.join(self.task_config.DATA, 'obs') + self.task_config.ENSPERT_RELPATH = _enspert_relpath @logit(logger) def initialize(self): @@ -64,26 +69,50 @@ def initialize(self): logger.info("initialize") # make directories and stage ensemble background files - ensbkgconf = AttrDict() - keys = ['previous_cycle', 'current_cycle', 'DATA', 'NMEM_ENS', - 'PARMgfs', 'ROTDIR', 'COM_OCEAN_HISTORY_TMPL', 'COM_ICE_HISTORY_TMPL'] - for key in keys: - ensbkgconf[key] = self.task_config[key] - ensbkgconf.RUN = 'enkfgdas' - soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.SOCA_ENS_BKG_STAGE_YAML_TMPL, ensbkgconf) - FileHandler(soca_ens_bkg_stage_list).sync() soca_fix_stage_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config) FileHandler(soca_fix_stage_list).sync() - letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, self.task_config) + stageconf = AttrDict() + keys = ['current_cycle', + 'previous_cycle', + 'COM_ICE_LETKF_TMPL', + 'COM_OCEAN_LETKF_TMPL', + 'COM_ICE_HISTORY_TMPL', + 'COM_OCEAN_HISTORY_TMPL', + 'COMIN_OCEAN_HISTORY_PREV', + 'COMIN_ICE_HISTORY_PREV', + 'COMOUT_ICE_LETKF', + 'COMOUT_OCEAN_LETKF', + 'DATA', + 'ENSPERT_RELPATH', + 'GDUMP_ENS', + 'NMEM_ENS', + 'OPREFIX', + 'PARMgfs', + 'ROTDIR', + 'RUN', + 'WINDOW_BEGIN', + 'WINDOW_MIDDLE'] + for key in keys: + stageconf[key] = self.task_config[key] + + # stage ensemble background files + soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, stageconf) + FileHandler(soca_ens_bkg_stage_list).sync() + + # stage letkf-specific files + letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, stageconf) FileHandler(letkf_stage_list).sync() - obs_list = parse_j2yaml(self.task_config.OBS_YAML, self.task_config) + obs_list = parse_j2yaml(self.task_config.MARINE_OBS_LIST_YAML, self.task_config) # get the list of observations obs_files = [] for ob in obs_list['observers']: obs_name = ob['obs space']['name'].lower() - obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc" + # TODO(AFE) - this should be removed when the obs config yamls are jinjafied + if 'distribution' not in ob['obs space']: + ob['obs space']['distribution'] = {'name': 'Halo', 'halo size': self.task_config['DIST_HALO_SIZE']} + obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc4" obs_files.append((obs_filename, ob)) obs_files_to_copy = [] @@ -102,12 +131,7 @@ def initialize(self): FileHandler({'copy': obs_files_to_copy}).sync() # make the letkf.yaml - letkfconf = AttrDict() - keys = ['WINDOW_BEGIN', 'WINDOW_MIDDLE', 'RUN', 'gcyc', 'NMEM_ENS'] - for key in keys: - letkfconf[key] = self.task_config[key] - letkfconf.RUN = 'enkfgdas' - letkf_yaml = parse_j2yaml(self.task_config.MARINE_LETKF_YAML_TMPL, letkfconf) + letkf_yaml = parse_j2yaml(self.task_config.MARINE_LETKF_YAML_TMPL, stageconf) letkf_yaml.observations.observers = obs_to_use letkf_yaml.save(self.task_config.letkf_yaml_file) @@ -133,6 +157,18 @@ def run(self): logger.info("run") + exec_cmd_gridgen = Executable(self.task_config.APRUN_MARINEANLLETKF) + exec_cmd_gridgen.add_default_arg(self.task_config.GRIDGEN_EXEC) + exec_cmd_gridgen.add_default_arg(self.task_config.GRIDGEN_YAML) + + mdau.run(exec_cmd_gridgen) + + exec_cmd_letkf = Executable(self.task_config.APRUN_MARINEANLLETKF) + for letkf_exec_arg in self.task_config.letkf_exec_args: + exec_cmd_letkf.add_default_arg(letkf_exec_arg) + + mdau.run(exec_cmd_letkf) + @logit(logger) def finalize(self): """Method finalize for ocean and sea ice LETKF task @@ -145,3 +181,11 @@ def finalize(self): """ logger.info("finalize") + + letkfsaveconf = AttrDict() + keys = ['current_cycle', 'DATA', 'NMEM_ENS', 'WINDOW_BEGIN', 'GDUMP_ENS', + 'PARMgfs', 'ROTDIR', 'COM_OCEAN_LETKF_TMPL', 'COM_ICE_LETKF_TMPL'] + for key in keys: + letkfsaveconf[key] = self.task_config[key] + letkf_save_list = parse_j2yaml(self.task_config.MARINE_LETKF_SAVE_YAML_TMPL, letkfsaveconf) + FileHandler(letkf_save_list).sync() diff --git a/ush/python/wxflow b/ush/python/wxflow new file mode 120000 index 00000000000..9dbee42bc86 --- /dev/null +++ b/ush/python/wxflow @@ -0,0 +1 @@ +../../sorc/wxflow/src/wxflow \ No newline at end of file diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index da78166ede5..f92bf95fbaf 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -46,7 +46,7 @@ def _get_app_configs(self): if self.do_jediocnvar: configs += ['prepoceanobs', 'marineanlinit', 'marinebmat', 'marineanlvar'] if self.do_hybvar: - configs += ['ocnanalecen'] + configs += ['marineanlletkf', 'ocnanalecen'] configs += ['marineanlchkpt', 'marineanlfinal'] if self.do_vrfy_oceanda: configs += ['ocnanalvrfy'] @@ -148,7 +148,7 @@ def get_task_names(self): if self.do_jediocnvar: gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'marineanlinit', 'marinebmat', 'marineanlvar'] if self.do_hybvar: - gdas_gfs_common_tasks_before_fcst += ['ocnanalecen'] + gdas_gfs_common_tasks_before_fcst += ['marineanlletkf', 'ocnanalecen'] gdas_gfs_common_tasks_before_fcst += ['marineanlchkpt', 'marineanlfinal'] if self.do_vrfy_oceanda: gdas_gfs_common_tasks_before_fcst += ['ocnanalvrfy'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 82dfb9f1d48..d3bb68a6b88 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -664,6 +664,32 @@ def prepoceanobs(self): return task + def marineanlletkf(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'enkfgdas_fcst', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run}_prepoceanobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('marineanlletkf') + task_name = f'{self.run}_marineanlletkf' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/marineanlletkf.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def marinebmat(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 92ceea73aa9..b989def13ff 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -15,7 +15,7 @@ class Tasks: 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', - 'marineanlinit', 'marinebmat', 'marineanlvar', 'ocnanalecen', 'marineanlchkpt', 'marineanlfinal', 'ocnanalvrfy', + 'marineanlinit', 'marineanlletkf', 'marinebmat', 'marineanlvar', 'ocnanalecen', 'marineanlchkpt', 'marineanlfinal', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', diff --git a/workflow/wxflow b/workflow/wxflow new file mode 120000 index 00000000000..7ea96a12bfc --- /dev/null +++ b/workflow/wxflow @@ -0,0 +1 @@ +../sorc/wxflow/src/wxflow \ No newline at end of file