From e514b926561bfc8fa3de741876505aff74255c95 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 15 Nov 2024 14:56:27 -0500 Subject: [PATCH 1/2] Update JEDI hashes to include Model Variable Renaming Sprint (#1355) --- ci/driver.sh | 139 ++++++---- ci/gw_driver.sh | 144 ++++++---- ci/run_ci.sh | 4 +- ci/run_gw_ci.sh | 4 +- parm/io/fv3jedi_fieldmetadata_fv3inc.yaml | 2 +- parm/io/fv3jedi_fieldmetadata_history.yaml | 12 +- parm/io/fv3jedi_fieldmetadata_restart.yaml | 2 +- parm/jcb-gdas | 2 +- parm/soca/fields_metadata.yaml | 256 +++++++++++++----- parm/soca/letkf/letkf.yaml.j2 | 2 +- parm/soca/marine-jcb-base.yaml | 2 +- .../soca/obs/config/insitu_profile_bathy.yaml | 7 + .../soca/obs/config/insitu_profile_tesac.yaml | 7 + .../soca/obs/config/insitu_surface_trkob.yaml | 7 + parm/soca/obs/obs_list.yaml | 10 +- sorc/fv3-jedi | 2 +- sorc/ioda | 2 +- sorc/iodaconv | 2 +- sorc/oops | 2 +- sorc/saber | 2 +- sorc/soca | 2 +- sorc/ufo | 2 +- sorc/vader | 2 +- .../testinput/gdasapp_fv3jedi_fv3inc.yaml | 2 +- utils/soca/gdas_soca_diagb.h | 21 +- 25 files changed, 432 insertions(+), 207 deletions(-) diff --git a/ci/driver.sh b/ci/driver.sh index ce6372c59..933b9223b 100755 --- a/ci/driver.sh +++ b/ci/driver.sh @@ -1,6 +1,9 @@ #!/bin/bash --login +echo "Start at $(date)" + my_dir="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )" +echo "Set my_dir ${my_dir}" # ============================================================================== usage() { @@ -51,6 +54,14 @@ esac cd $GDAS_CI_ROOT/repo CI_LABEL="${GDAS_CI_HOST}-RT" gh pr list --label "$CI_LABEL" --state "open" | awk '{print $1;}' > $GDAS_CI_ROOT/open_pr_list + +open_pr=`cat $GDAS_CI_ROOT/open_pr_list | wc -l` +if (( $open_pr == 0 )); then + echo "No open PRs with ${CI_LABEL}, exit." + echo "Finish at $(date)" + exit +fi + open_pr_list=$(cat $GDAS_CI_ROOT/open_pr_list) # ============================================================================== @@ -58,72 +69,86 @@ open_pr_list=$(cat $GDAS_CI_ROOT/open_pr_list) repo_url="https://github.com/NOAA-EMC/GDASApp.git" # loop through all open PRs for pr in $open_pr_list; do - gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running - echo "Processing Pull Request #${pr}" + echo " " + echo "Start processing Pull Request #${pr} at $(date)" # get the branch name used for the PR gdasapp_branch=$(gh pr view $pr --json headRefName -q ".headRefName") - - # get the fork information - pr_details=$(gh pr view $pr --repo ${repo_url} --json headRepository,headRepositoryOwner,headRefName) - # extract the necessary info - fork_owner=$(gh pr view $pr --repo ${repo_url} --json headRepositoryOwner --jq '.headRepositoryOwner.login') - fork_name=$(gh pr view $pr --repo ${repo_url} --json headRepository --jq '.headRepository.name') - - # construct the fork URL - gdasapp_url="https://github.com/$fork_owner/${fork_name}.git" + # get additional branch information + branch_owner=$(gh pr view $pr --repo ${repo_url} --json headRepositoryOwner --jq '.headRepositoryOwner.login') + branch_name=$(gh pr view $pr --repo ${repo_url} --json headRepository --jq '.headRepository.name') + pr_assignees=$(gh pr view $pr --repo ${repo_url} --json assignees --jq '.assignees[].login') + + # check if any assignee is authorized to run CI + authorized_by="" + for str in ${pr_assignees[@]}; do + grep $str /scratch1/NCEPDEV/da/role.jedipara/CI/GDASApp/authorized_users + rc=$? + if (( rc == 0 )); then + authorized_by=${str} + echo "FOUND MATCH $str, rc $rc" + break + fi + done + + # Authorized to run CI + if (( rc == 0 )); then + echo "Run CI" + + # update PR label + gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running + + # construct the fork URL + gdasapp_url="https://github.com/$branch_owner/${branch_name}.git" - echo "Fork URL: $gdasapp_url" - echo "Branch Name: $gdasapp_branch" - - # create PR specific directory - if [ -d $GDAS_CI_ROOT/PR/$pr ]; then - rm -rf $GDAS_CI_ROOT/PR/$pr - fi - mkdir -p $GDAS_CI_ROOT/PR/$pr - cd $GDAS_CI_ROOT/PR/$pr - - # clone copy of repo - git clone --recursive --jobs 8 --branch $gdasapp_branch $gdasapp_url - cd GDASApp - - # checkout pull request - git pull - gh pr checkout $pr - git submodule update --init --recursive - - # get commit hash - commit=$(git log --pretty=format:'%h' -n 1) - echo "$commit" > $GDAS_CI_ROOT/PR/$pr/commit - - # load modules - case ${TARGET} in - hera | orion) - echo "Loading modules on $TARGET" - module purge - module use $GDAS_CI_ROOT/PR/$pr/GDASApp/modulefiles - module load GDAS/$TARGET - module list - ;; - *) - echo "Unsupported platform. Exiting with error." - exit 1 - ;; - esac - - # run build and testing command - $my_dir/run_ci.sh -d $GDAS_CI_ROOT/PR/$pr/GDASApp -o $GDAS_CI_ROOT/PR/$pr/output_${commit} - ci_status=$? - gh pr comment $pr --repo ${repo_url} --body-file $GDAS_CI_ROOT/PR/$pr/output_${commit} - if [ $ci_status -eq 0 ]; then - gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + echo "GDASApp URL: $gdasapp_url" + echo "GDASApp branch Name: $gdasapp_branch" + echo "CI authorized by $authorized_by at $(date)" + + # create PR specific directory + if [ -d $GDAS_CI_ROOT/PR/$pr ]; then + rm -rf $GDAS_CI_ROOT/PR/$pr + fi + mkdir -p $GDAS_CI_ROOT/PR/$pr + cd $GDAS_CI_ROOT/PR/$pr + pwd + + # clone copy of repo + git clone --recursive --jobs 8 --branch $gdasapp_branch $gdasapp_url + cd GDASApp + pwd + + # checkout GDASApp pull request + git pull + gh pr checkout $pr + git submodule update --init --recursive + + # get commit hash + commit=$(git log --pretty=format:'%h' -n 1) + echo "$commit" > $GDAS_CI_ROOT/PR/$pr/commit + + # run build and testing command + echo "Execute $my_dir/run_ci.sh for $GDAS_CI_ROOT/PR/$pr/GDASApp at $(date)" + $my_dir/run_ci.sh -d $GDAS_CI_ROOT/PR/$pr/GDASApp -o $GDAS_CI_ROOT/PR/$pr/output_${commit} + ci_status=$? + echo "After run_ci.sh with ci_status ${ci_status} at $(date)" + gh pr comment $pr --repo ${repo_url} --body-file $GDAS_CI_ROOT/PR/$pr/output_${commit} + if [ $ci_status -eq 0 ]; then + gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + else + gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + fi + + # Not authorized to run CI else - gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + echo "Do NOT run CI" fi + + echo "Finish processing Pull Request #{pr} at $(date)" done # ============================================================================== # scrub working directory for older files find $GDAS_CI_ROOT/PR/* -maxdepth 1 -mtime +3 -exec rm -rf {} \; - +echo "Finish at $(date)" diff --git a/ci/gw_driver.sh b/ci/gw_driver.sh index e85684f84..c40ff4026 100755 --- a/ci/gw_driver.sh +++ b/ci/gw_driver.sh @@ -1,6 +1,9 @@ #!/bin/bash --login +echo "Start at $(date)" + my_dir="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )" +echo "Set my_dir ${my_dir}" # ============================================================================== usage() { @@ -32,7 +35,7 @@ done case ${TARGET} in hera | orion) - echo "Running automated testing with workflow on $TARGET" + echo "Running Automated GW Testing on $TARGET" source $MODULESHOME/init/sh source $my_dir/${TARGET}.sh module purge @@ -51,69 +54,116 @@ esac cd $GDAS_CI_ROOT/repo CI_LABEL="${GDAS_CI_HOST}-GW-RT" gh pr list --label "$CI_LABEL" --state "open" | awk '{print $1;}' > $GDAS_CI_ROOT/open_pr_list_gw + +open_pr=`cat $GDAS_CI_ROOT/open_pr_list_gw | wc -l` +if (( $open_pr == 0 )); then + echo "No open PRs with ${CI_LABEL}, exit." + echo "Finish at $(date)" + exit +fi + open_pr_list=$(cat $GDAS_CI_ROOT/open_pr_list_gw) # ============================================================================== # clone, checkout, build, test, etc. repo_url="https://github.com/NOAA-EMC/GDASApp.git" workflow_url="https://github.com/NOAA-EMC/global-workflow.git" +workflow_branch="develop" # loop through all open PRs for pr in $open_pr_list; do - gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running - echo "Processing Pull Request #${pr}" + echo " " + echo "Start processing Pull Request #${pr} at $(date)" # get the branch name used for the PR gdasapp_branch=$(gh pr view $pr --json headRefName -q ".headRefName") - # check for a companion PR in the global-workflow - companion_pr_exists=$(gh pr list --repo ${workflow_url} --head ${gdasapp_branch} --state open) - if [ -n "$companion_pr_exists" ]; then - # get the PR number - companion_pr=$(echo "$companion_pr_exists" | awk '{print $1;}') - - # extract the necessary info - fork_owner=$(gh pr view $companion_pr --repo $workflow_url --json headRepositoryOwner --jq '.headRepositoryOwner.login') - fork_name=$(gh pr view $companion_pr --repo $workflow_url --json headRepository --jq '.headRepository.name') - - # Construct the fork URL - workflow_url="https://github.com/$fork_owner/$fork_name.git" - - echo "Fork URL: $workflow_url" - echo "Branch Name: $gdasapp_branch" - fi - - # create PR specific directory - if [ -d $GDAS_CI_ROOT/workflow/PR/$pr ]; then - rm -rf $GDAS_CI_ROOT/workflow/PR/$pr - fi - mkdir -p $GDAS_CI_ROOT/workflow/PR/$pr - cd $GDAS_CI_ROOT/workflow/PR/$pr + # get additional branch information + branch_owner=$(gh pr view $pr --repo ${repo_url} --json headRepositoryOwner --jq '.headRepositoryOwner.login') + branch_name=$(gh pr view $pr --repo ${repo_url} --json headRepository --jq '.headRepository.name') + pr_assignees=$(gh pr view $pr --repo ${repo_url} --json assignees --jq '.assignees[].login') + + # check if any assignee is authorized to run CI + authorized_by="" + for str in ${pr_assignees[@]}; do + grep $str /scratch1/NCEPDEV/da/role.jedipara/CI/GDASApp/authorized_users + rc=$? + if (( rc == 0 )); then + authorized_by=${str} + echo "FOUND MATCH $str, rc $rc" + break + fi + done + + # Authorized to run CI + if (( rc == 0 )); then + echo "Run CI" + + # update PR label + gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running + + # check for a companion PR in the global-workflow + companion_pr_exists=$(gh pr list --repo ${workflow_url} --head ${gdasapp_branch} --state open) + if [ -n "$companion_pr_exists" ]; then + # get the PR number + companion_pr=$(echo "$companion_pr_exists" | awk '{print $1;}') + + # extract the necessary info + branch_owner=$(gh pr view $companion_pr --repo $workflow_url --json headRepositoryOwner --jq '.headRepositoryOwner.login') + branch_name=$(gh pr view $companion_pr --repo $workflow_url --json headRepository --jq '.headRepository.name') + + # Construct fork URL. Update workflow branch name + workflow_url="https://github.com/$branch_owner/$branch_name.git" + workflow_branch=$gdasapp_branch + + fi + + echo "Workflow URL: $workflow_url" + echo "Workflow branch name: $workflow_branch" + echo "GDASApp branch name: $gdasapp_branch" + echo "CI authorized by $authorized_by at $(date)" + + # create PR specific directory + if [ -d $GDAS_CI_ROOT/workflow/PR/$pr ]; then + rm -rf $GDAS_CI_ROOT/workflow/PR/$pr + fi + mkdir -p $GDAS_CI_ROOT/workflow/PR/$pr + cd $GDAS_CI_ROOT/workflow/PR/$pr + pwd - # clone global workflow develop branch - git clone --recursive --jobs 8 --branch dev/gdasapp $workflow_url - - # checkout pull request - cd $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow/sorc/gdas.cd - git checkout develop - git pull - gh pr checkout $pr - git submodule update --init --recursive - - # get commit hash - commit=$(git log --pretty=format:'%h' -n 1) - echo "$commit" > $GDAS_CI_ROOT/workflow/PR/$pr/commit - - $my_dir/run_gw_ci.sh -d $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow -o $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} - ci_status=$? - gh pr comment $pr --body-file $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} - if [ $ci_status -eq 0 ]; then - gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + # clone global workflow develop branch + git clone --recursive --jobs 8 --branch $workflow_branch $workflow_url + + # checkout GDASApp pull request + cd $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow/sorc/gdas.cd + git pull + gh pr checkout $pr + git submodule update --init --recursive + + # get commit hash + commit=$(git log --pretty=format:'%h' -n 1) + echo "$commit" > $GDAS_CI_ROOT/workflow/PR/$pr/commit + + # run build and testing command + echo "Execute $my_dir/run_gw_ci.sh for $GDAS_CI_ROOT/PR/workflow/PR/$pr/global-workflow at $(date)" + $my_dir/run_gw_ci.sh -d $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow -o $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} + ci_status=$? + echo "After run_gw_ci.sh with ci_status ${ci_status} at $(date)" + gh pr comment $pr --body-file $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} + if [ $ci_status -eq 0 ]; then + gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + else + gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + fi + + # Not authorized to run CI else - gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + echo "Do NOT run CI" fi + + echo "Finish processing Pull Request #{pr} at $(date)" done # ============================================================================== # scrub working directory for older files find $GDAS_CI_ROOT/workflow/PR/* -maxdepth 1 -mtime +3 -exec rm -rf {} \; - +echo "Finish at $(date)" diff --git a/ci/run_ci.sh b/ci/run_ci.sh index b62f78a88..1b5f27231 100755 --- a/ci/run_ci.sh +++ b/ci/run_ci.sh @@ -1,5 +1,5 @@ #!/bin/bash -#set -eu +set -u # ============================================================================== usage() { @@ -61,7 +61,7 @@ module use $GDAS_MODULE_USE module load GDAS/$TARGET echo "---------------------------------------------------" >> $outfile rm -rf log.ctest -ctest -E "manual" -R gdasapp --output-on-failure &>> log.ctest +ctest -R gdasapp --output-on-failure &>> log.ctest ctest_status=$? npassed=$(cat log.ctest | grep "tests passed") if [ $ctest_status -eq 0 ]; then diff --git a/ci/run_gw_ci.sh b/ci/run_gw_ci.sh index 59758e37f..ba1874107 100755 --- a/ci/run_gw_ci.sh +++ b/ci/run_gw_ci.sh @@ -1,5 +1,5 @@ #!/bin/bash -#set -eu +set -u # ============================================================================== usage() { @@ -31,7 +31,7 @@ done # ============================================================================== # start output file -echo "Automated Global-Workflow GDASApp Testing Results:" > $outfile +echo "Automated GW GDASApp Testing Results:" > $outfile echo "Machine: ${TARGET}" >> $outfile echo '```' >> $outfile echo "Start: $(date) on $(hostname)" >> $outfile diff --git a/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml b/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml index 2f8acb839..4750967b1 100644 --- a/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml +++ b/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml @@ -9,7 +9,7 @@ field metadata: - long name: air_temperature io name: T_inc -- long name: specific_humidity +- long name: water_vapor_mixing_ratio_wrt_moist_air io name: sphum_inc - long name: cloud_liquid_water diff --git a/parm/io/fv3jedi_fieldmetadata_history.yaml b/parm/io/fv3jedi_fieldmetadata_history.yaml index a8532c32c..2e59dccb1 100644 --- a/parm/io/fv3jedi_fieldmetadata_history.yaml +++ b/parm/io/fv3jedi_fieldmetadata_history.yaml @@ -6,7 +6,7 @@ field metadata: - long name: northward_wind io name: vgrd -- long name: specific_humidity +- long name: water_vapor_mixing_ratio_wrt_moist_air io name: spfh - long name: air_temperature @@ -45,11 +45,11 @@ field metadata: - long name: upward_air_velocity io name: dzdt -- long name: surface_pressure +- long name: air_pressure_at_surface io name: pressfc io file: atmosphere -- long name: surface_geopotential_height +- long name: geopotential_height_at_surface io name: hgtsfc - long name: u_component_of_native_D_grid_wind @@ -74,7 +74,7 @@ field metadata: - long name: sheleg io name: weasd -- long name: sea_surface_temperature +- long name: skin_temperature_at_surface io name: tmpsfc - long name: stype @@ -86,8 +86,8 @@ field metadata: - long name: totalSnowDepthMeters io name: snod -- long name: surface_eastward_wind +- long name: eastward_wind_at_surface io name: ugrd_hyblev1 -- long name: surface_northward_wind +- long name: northward_wind_at_surface io name: vgrd_hyblev1 diff --git a/parm/io/fv3jedi_fieldmetadata_restart.yaml b/parm/io/fv3jedi_fieldmetadata_restart.yaml index d4a4a3a09..ccba447dc 100644 --- a/parm/io/fv3jedi_fieldmetadata_restart.yaml +++ b/parm/io/fv3jedi_fieldmetadata_restart.yaml @@ -10,7 +10,7 @@ field metadata: io name: T - long name: air_pressure_thickness - io name: DELP + io name: delp - long name: layer_thickness io name: DZ diff --git a/parm/jcb-gdas b/parm/jcb-gdas index b8e995a4c..c41e7d7ac 160000 --- a/parm/jcb-gdas +++ b/parm/jcb-gdas @@ -1 +1 @@ -Subproject commit b8e995a4cbf01fa4a662c3da3e7d818f8457ec4e +Subproject commit c41e7d7aca3f4053b177709adaa66488f3643980 diff --git a/parm/soca/fields_metadata.yaml b/parm/soca/fields_metadata.yaml index 586d8557e..444bd8418 100644 --- a/parm/soca/fields_metadata.yaml +++ b/parm/soca/fields_metadata.yaml @@ -1,103 +1,150 @@ # -------------------------------------------------------------------------------------------------- # Field metadata for SOCA. Each field can contain the following information: # -# name: Internal name used by soca code and config files +# name: name used by soca and by the rest of JEDI +# name surface: JEDI variable name for 2D surface of a 3D field (Default: ) # grid: "h", "u", or "v" (Default: h) # masked: use land mask if true (Default: true) # levels: "1" or "full_ocn" (Default: 1) -# getval_name: variable name expected by GetValues (Default: ) -# getval_name_surface: GetValues variable name for 2D surface of a 3D field (Default: ) -# io_file: The restart file domain "ocn", "sfc", or "ice" (Default: ) -# io_name: The variable name used in the restart IO (Default: ) +# io file: The restart file domain "ocn", "sfc", or "ice" (Default: ) +# io name: The variable name used in the restart IO (Default: ) +# constant value: Used for "dummy" fields. Sets the entire field to the given constant globally +# This parameter cannot be used with io_file/io_name +# fill value: If the field is masked, this value will be used for the masked areas. +# (Default: 0.0) +# categories: Number of categories for a field with a category dimension (Default: -1) +# If > 0, then the fields "name", and "io name" can use the +# placeholder "" which will be replaced with the category number # -------------------------------------------------------------------------------------------------- # -------------------------------------------------------------------------------------------------- # Ocean state variables # -------------------------------------------------------------------------------------------------- -- name: tocn +- name: sea_water_potential_temperature + name surface: sea_surface_temperature levels: full_ocn - getval name: sea_water_potential_temperature - getval name surface: sea_surface_temperature io file: ocn io name: Temp - fill value: 0.0 -- name: socn +- name: sea_water_salinity + name surface: sea_surface_salinity levels: full_ocn - getval name: sea_water_salinity - getval name surface: sea_surface_salinity io file: ocn io name: Salt property: positive_definite - fill value: 0.0 -- name: uocn +- name: eastward_sea_water_velocity + name surface: surface_eastward_sea_water_velocity grid: u levels: full_ocn - getval name: eastward_sea_water_velocity - getval name surface: surface_eastward_sea_water_velocity io file: ocn io name: u - fill value: 0.0 -- name: vocn +- name: northward_sea_water_velocity + name surface: surface_northward_sea_water_velocity grid: v levels: full_ocn - getval name: northward_sea_water_velocity - getval name surface: surface_northward_sea_water_velocity io file: ocn io name: v - fill value: 0.0 -- name: hocn +- name: sea_water_cell_thickness levels: full_ocn - getval name: sea_water_cell_thickness io file: ocn io name: h - fill value: 0.001 vert interp: false -- name: ssh - getval name: sea_surface_height_above_geoid +- name: sea_surface_height_above_geoid io file: ocn io name: ave_ssh - fill value: 0.0 - name: mom6_mld io file: ocn io name: MLD fill value: 0.0 + +# -------------------------------------------------------------------------------------------------- +# ice state variables with no categories +# -------------------------------------------------------------------------------------------------- +- name: sea_ice_thickness + io file: ice + io name: hi_h #note, was hicen + property: positive_definite + +- name: sea_ice_area_fraction + io file: ice + io name: aice_h #note, was aicen + +- name: sea_ice_snow_thickness + io file: ice + io name: hs_h #note, was hsnon + property: positive_definite + +- name: snow_ice_surface_temperature + io file: ice + io name: Tsfc_h + +- name: air_temperature + io file: ice + io name: Tair_h + +- name: bulk_ice_salinity + io file: ice + io name: sice_h # -------------------------------------------------------------------------------------------------- -# ice state variables +# ice state variables with category dimension # -------------------------------------------------------------------------------------------------- -- name: hicen - getval name: sea_ice_category_thickness +- name: sea_ice_category_area_fraction + categories: 5 io file: ice - io name: hi_h + io sup name: aicen_h + io name: aice_h property: positive_definite - fill value: 0.0 -- name: cicen - getval name: sea_ice_category_area_fraction - getval name surface: sea_ice_area_fraction # note: not accurate, should be "sum" not "surface" +- name: sea_ice_category_volume + categories: 5 io file: ice - io name: aice_h - fill value: 0.0 + io sup name: vicen_h + io name: vice_h + property: positive_definite -- name: hsnon - getval name: sea_ice_category_snow_thickness +- name: sea_ice_snow_category_volume + categories: 5 io file: ice - io name: hs_h + io sup name: vsnon_h + io name: vsno_h property: positive_definite - fill value: 0.0 + +# -------------------------------------------------------------------------------------------------- +# Thermodynamic ice state variables with category and level dimension +# -------------------------------------------------------------------------------------------------- +- name: sea_ice_category_temperature + categories: 5 + levels: 7 + io file: ice + io sup name: Tinz_h + io name: tiz_h + +- name: sea_ice_category_salinity + categories: 5 + levels: 7 + io file: ice + io sup name: Sinz_h + io name: siz_h + +- name: sea_ice_snow_category_temperature + categories: 5 + levels: 1 + io file: ice + io sup name: Tsnz_h + io name: tsz_h + # -------------------------------------------------------------------------------------------------- # wave state variables # -------------------------------------------------------------------------------------------------- -- name: swh - getval name: sea_surface_wave_significant_height +- name: sea_surface_wave_significant_height io file: wav io name: hs property: positive_definite @@ -105,33 +152,28 @@ # -------------------------------------------------------------------------------------------------- # sea surface variables # -------------------------------------------------------------------------------------------------- -- name: sw +- name: net_downwelling_shortwave_radiation masked: false - getval name: net_downwelling_shortwave_radiation io file: sfc io name: sw_rad -- name: lw +- name: net_downwelling_longwave_radiation masked: false - getval name: net_downwelling_longwave_radiation io file: sfc io name: lw_rad -- name: lhf +- name: upward_latent_heat_flux_in_air masked: false - getval name: upward_latent_heat_flux_in_air io file: sfc io name: latent_heat -- name: shf +- name: upward_sensible_heat_flux_in_air masked: false - getval name: upward_sensible_heat_flux_in_air io file: sfc io name: sens_heat -- name: us +- name: friction_velocity_over_water masked: false - getval name: friction_velocity_over_water io file: sfc io name: fric_vel @@ -139,18 +181,16 @@ # -------------------------------------------------------------------------------------------------- # BGC # -------------------------------------------------------------------------------------------------- -- name: chl +- name: mass_concentration_of_chlorophyll_in_sea_water + name surface: sea_surface_chlorophyll levels: full_ocn - getval name: mass_concentration_of_chlorophyll_in_sea_water - getval name surface: sea_surface_chlorophyll io file: ocn io name: chl property: positive_definite -- name: biop +- name: molar_concentration_of_biomass_in_sea_water_in_p_units + name surface: sea_surface_biomass_in_p_units levels: full_ocn - getval name: molar_concentration_of_biomass_in_sea_water_in_p_units - getval name surface: sea_surface_biomass_in_p_units io file: ocn io name: biomass_p property: positive_definite @@ -161,20 +201,106 @@ - name: distance_from_coast masked: false -- name: layer_depth - levels: full_ocn - vert interp: false - - name: mesoscale_representation_error -- name: mld +- name: ocean_mixed_layer_thickness - name: sea_floor_depth_below_sea_surface - name: sea_area_fraction masked: false -- name: surface_temperature_where_sea +- name: skin_temperature_at_surface_where_sea - name: sea_water_depth levels: full_ocn + +- name: latitude +- name: longitude + +# -------------------------------------------------------------------------------------------------- +# variables that VADER should be responsible for +# -------------------------------------------------------------------------------------------------- +- name: sea_water_temperature + levels: full_ocn + +# -------------------------------------------------------------------------------------------------- +- name: dummy_atm1 + constant value: 5.0 + +- name: ozone_thickness + constant value: 275 #The average amount of ozone in the atm. is 300 Dobson Units + +- name: water_vapor #g/cm^2 + constant value: 1.2 + +- name: wind_speed_at_surface + constant value: 6 + +- name: air_pressure_at_surface + constant value: 999 + +- name: relative_humidity + constant value: 89 + +- name: cloud_liquid_water_path + constant value: 163 + +- name: cloud_area_fraction_in_atmosphere_layer + constant value: 80 + +- name: aerosol_optical_thickness + constant value: 0.16 + +- name: single_scattering_albedo + constant value: 0.71 + +- name: asymmetry_parameter + constant value: 0.97 + +#---------------------ocean bio +- name: Carbon_nitrogen_detritus_concentration + levels: full_ocn + io file: bio + io name: CDET + property: positive_definite + +- name: Particulate_inorganic_carbon + levels: full_ocn + io file: bio + io name: PIC + +- name: colored_dissolved_organic_carbon + levels: full_ocn + io file: bio + io name: CDC + +- name: diatom_concentration + levels: full_ocn + io file: bio + io name: DIATOM + +- name: chlorophyte_concentration + levels: full_ocn + io file: bio + io name: CHLORO + +- name: cyano-bacteria_concentration + levels: full_ocn + io file: bio + io name: CYANO + +- name: coccolithophore_concentration + levels: full_ocn + io file: bio + io name: COCCO + +- name: dinoflagellate_concentration + levels: full_ocn + io file: bio + io name: DINO + +- name: phaeocystis_concentration + levels: full_ocn + io file: bio + io name: PHAEO diff --git a/parm/soca/letkf/letkf.yaml.j2 b/parm/soca/letkf/letkf.yaml.j2 index 1d5e93d7f..ef60c6ec9 100644 --- a/parm/soca/letkf/letkf.yaml.j2 +++ b/parm/soca/letkf/letkf.yaml.j2 @@ -17,7 +17,7 @@ background: ice_filename: "ice.%mem%.nc" read_from_file: 1 basename: {{ ENSPERT_RELPATH }}/ens/ - state variables: [socn, tocn, ssh, hocn, uocn, vocn, cicen] + state variables: [sea_water_salinity, sea_water_potential_temperature, sea_surface_height_above_geoid, sea_water_cell_thickness, eastward_sea_water_velocity, northward_sea_water_velocity, sea_ice_area_fraction] pattern: '%mem%' nmembers: {{ NMEM_ENS }} diff --git a/parm/soca/marine-jcb-base.yaml b/parm/soca/marine-jcb-base.yaml index d07edcd8c..3a9d40223 100644 --- a/parm/soca/marine-jcb-base.yaml +++ b/parm/soca/marine-jcb-base.yaml @@ -24,7 +24,7 @@ minimizer: RPCG final_diagnostics_departures: oman final_prints_frequency: PT3H number_of_outer_loops: 1 -analysis_variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] +analysis_variables: [sea_ice_area_fraction, sea_ice_thickness, sea_ice_snow_thickness, sea_water_salinity, sea_water_potential_temperature, eastward_sea_water_velocity, northward_sea_water_velocity, sea_surface_height_above_geoid] # Model things diff --git a/parm/soca/obs/config/insitu_profile_bathy.yaml b/parm/soca/obs/config/insitu_profile_bathy.yaml index 0dc2db0aa..d78cacdb6 100644 --- a/parm/soca/obs/config/insitu_profile_bathy.yaml +++ b/parm/soca/obs/config/insitu_profile_bathy.yaml @@ -26,3 +26,10 @@ obs filters: - ObsError/waterTemperature coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/insitu_profile_tesac.yaml b/parm/soca/obs/config/insitu_profile_tesac.yaml index 5c966f88a..b2cf1769d 100644 --- a/parm/soca/obs/config/insitu_profile_tesac.yaml +++ b/parm/soca/obs/config/insitu_profile_tesac.yaml @@ -29,3 +29,10 @@ obs filters: - ObsError/waterTemperature coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/insitu_surface_trkob.yaml b/parm/soca/obs/config/insitu_surface_trkob.yaml index 3b058e527..5846b83d6 100644 --- a/parm/soca/obs/config/insitu_surface_trkob.yaml +++ b/parm/soca/obs/config/insitu_surface_trkob.yaml @@ -27,3 +27,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/obs_list.yaml b/parm/soca/obs/obs_list.yaml index 0ac8ab5af..c11dc1ace 100644 --- a/parm/soca/obs/obs_list.yaml +++ b/parm/soca/obs/obs_list.yaml @@ -25,15 +25,15 @@ observers: #- !INC ${MARINE_OBS_YAML_DIR}/icec_ssmis_f17_l2.yaml # in situ: monthly -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_bathy.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_bathy.yaml - !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_argo.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_glider.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_glider.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac_salinity.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_marinemammal.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_xbtctd.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_xbtctd.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_altkob.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob_salinity.yaml # in situ: daily diff --git a/sorc/fv3-jedi b/sorc/fv3-jedi index 88279a632..136dfb9c2 160000 --- a/sorc/fv3-jedi +++ b/sorc/fv3-jedi @@ -1 +1 @@ -Subproject commit 88279a63280c23d6b8974991a8c89380afaf5db7 +Subproject commit 136dfb9c2f8541584e62fa74c616d686602bcdad diff --git a/sorc/ioda b/sorc/ioda index 3fa4a997e..22cd20eae 160000 --- a/sorc/ioda +++ b/sorc/ioda @@ -1 +1 @@ -Subproject commit 3fa4a997e25b3bd018d30e308a26b3e98af0fe6f +Subproject commit 22cd20eae0685914a5b967e13f95779b57bb448c diff --git a/sorc/iodaconv b/sorc/iodaconv index 23e58ed76..6f87a0f27 160000 --- a/sorc/iodaconv +++ b/sorc/iodaconv @@ -1 +1 @@ -Subproject commit 23e58ed76da3628cbd508bd4ac40f8a01c789d7d +Subproject commit 6f87a0f279e836fd604e5b313a25bd1e54bff80e diff --git a/sorc/oops b/sorc/oops index 0d2c235d7..1ba321ff9 160000 --- a/sorc/oops +++ b/sorc/oops @@ -1 +1 @@ -Subproject commit 0d2c235d791e1ba0023ce300103174dddf71aed7 +Subproject commit 1ba321ff912c6338d7362667eff37ddbf569cb18 diff --git a/sorc/saber b/sorc/saber index 1f23a3665..de5015c83 160000 --- a/sorc/saber +++ b/sorc/saber @@ -1 +1 @@ -Subproject commit 1f23a36657f6d10b770348de0f5454e01d377105 +Subproject commit de5015c8328f5b3d64acc99739fbaa64ef571172 diff --git a/sorc/soca b/sorc/soca index 4d7ef21e7..7f2ddb61b 160000 --- a/sorc/soca +++ b/sorc/soca @@ -1 +1 @@ -Subproject commit 4d7ef21e74d78a065156c942a72806ef2e2eb08e +Subproject commit 7f2ddb61bc86796c83dfcd4801c91bffd829ffb4 diff --git a/sorc/ufo b/sorc/ufo index b0cd94558..85ef98cb9 160000 --- a/sorc/ufo +++ b/sorc/ufo @@ -1 +1 @@ -Subproject commit b0cd94558643380ccceea864abac2c34fa291677 +Subproject commit 85ef98cb99f3eae1ed15e39f0cb05046af36fef1 diff --git a/sorc/vader b/sorc/vader index 05eb007e2..3049658d1 160000 --- a/sorc/vader +++ b/sorc/vader @@ -1 +1 @@ -Subproject commit 05eb007e242af3fdc4969c7146a480e12663e452 +Subproject commit 3049658d185c8095caf0e506795d0e5995fa92cf diff --git a/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml b/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml index f59603d92..7ee403ccc 100644 --- a/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml +++ b/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml @@ -1,6 +1,6 @@ variable change: variable change name: Model2GeoVaLs - input variables: &bkgvars [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr,surface_geopotential_height] + input variables: &bkgvars [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr,hgtsfc] output variables: &fv3incrvars [ua,va,t,sphum,ice_wat,liq_wat,o3mr,delp,delz] jedi increment variables: [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr] fv3 increment variables: *fv3incrvars diff --git a/utils/soca/gdas_soca_diagb.h b/utils/soca/gdas_soca_diagb.h index f45ac8313..563d303b9 100644 --- a/utils/soca/gdas_soca_diagb.h +++ b/utils/soca/gdas_soca_diagb.h @@ -261,7 +261,7 @@ namespace gdasapp { // Get the layer thicknesses and convert to layer depth oops::Log::info() << "====================== calculate layer depth" << std::endl; - auto viewHocn = atlas::array::make_view(xbFs["hocn"]); + auto viewHocn = atlas::array::make_view(xbFs["sea_water_cell_thickness"]); atlas::array::ArrayT depth(viewHocn.shape(0), viewHocn.shape(1)); auto viewDepth = atlas::array::make_view(depth); for (atlas::idx_t jnode = 0; jnode < depth.shape(0); ++jnode) { @@ -299,7 +299,7 @@ namespace gdasapp { } // Update the layer thickness halo - nodeColumns.haloExchange(xbFs["hocn"]); + nodeColumns.haloExchange(xbFs["sea_water_cell_thickness"]); // Loop through variables for (auto & var : configD.socaVars.variables()) { @@ -307,7 +307,7 @@ namespace gdasapp { nodeColumns.haloExchange(xbFs[var]); // Skip the layer thickness variable - if (var == "hocn") { + if (var == "sea_water_cell_thickness") { continue; } oops::Log::info() << "====================== std dev for " << var << std::endl; @@ -330,7 +330,7 @@ namespace gdasapp { stdDevFilt(jnode, 0, 0, configD.depthMin, neighbors, 0, viewHocn, bkg, viewBathy, stdDevBkg, false, 4); - if (var == "ssh") { + if (var == "sea_surface_height_above_geoid") { // TODO(G): Extract the unbalanced ssh variance, in the mean time, do this: stdDevBkg(jnode, 0) = std::min(configD.sshMax, stdDevBkg(jnode, 0)); } @@ -353,7 +353,7 @@ namespace gdasapp { if (configD.simpleSmoothing) { for (auto & var : configD.socaVars.variables()) { // Skip the layer thickness variable - if (var == "hocn") { + if (var == "sea_water_cell_thickness") { continue; } @@ -365,7 +365,8 @@ namespace gdasapp { // Loops through nodes and levels for (atlas::idx_t level = 0; level < xbFs[var].shape(1); ++level) { - for (atlas::idx_t jnode = 0; jnode < xbFs["tocn"].shape(0); ++jnode) { + for (atlas::idx_t jnode = 0; + jnode < xbFs["sea_water_potential_temperature"].shape(0); ++jnode) { // Early exit if on a ghost cell if (ghostView(jnode) > 0) { continue; @@ -403,7 +404,8 @@ namespace gdasapp { auto stdDevBkg = atlas::array::make_view(bkgErrFs[var]); auto tmpArray(stdDevBkg); for (int iter = 0; iter < configD.niterVert; ++iter) { - for (atlas::idx_t jnode = 0; jnode < xbFs["tocn"].shape(0); ++jnode) { + for (atlas::idx_t jnode = 0; + jnode < xbFs["sea_water_potential_temperature"].shape(0); ++jnode) { for (atlas::idx_t level = 1; level < xbFs[var].shape(1)-1; ++level) { stdDevBkg(jnode, level) = (tmpArray(jnode, level-1) + tmpArray(jnode, level) + @@ -424,14 +426,15 @@ namespace gdasapp { << std::endl; // Create the diffusion object oops::GeometryData geometryData(geom.functionSpace(), - bkgErrFs["tocn"], true, this->getComm()); + bkgErrFs["sea_water_potential_temperature"], + true, this->getComm()); oops::Diffusion diffuse(geometryData); diffuse.calculateDerivedGeom(geometryData); // Lambda function to construct a field with a constant filtering value auto assignScale = [&](double scale, const std::string& fieldName) { atlas::Field field; - auto levels = xbFs["tocn"].shape(1); + auto levels = xbFs["sea_water_potential_temperature"].shape(1); field = geom.functionSpace().createField(atlas::option::levels(levels) | atlas::option::name(fieldName)); auto viewField = atlas::array::make_view(field); From 2bcedf26cea991fc61f640ccb5be39409b1bd931 Mon Sep 17 00:00:00 2001 From: Mindo Choi <141867620+apchoiCMD@users.noreply.github.com> Date: Fri, 15 Nov 2024 15:45:14 -0500 Subject: [PATCH 2/2] Enable the manual operation of the marine verification tool (#1373) #### This PR enables the Marine Verification Tool to run outside of the g-w CI workflow by submitting an `sbatch` job manually on Hera Includes, - Vrfy task run by a simple driver in the offline #1345 - Improve cosmetic issues we found #1349 - Bug fixes and more #1314 - ~~Move `exgdas_global_marine_analysis_vrfy.py` to `scripts/old` directory~~ Most up-to-date plots can be found at ``` /scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210827/00/analysis/ocean/vrfy_final_PR ``` The wall time is as follows: ``` [Mindo.Choi@hfe02 vrfy]$ sacct -j 2477688 --format=JobID,JobName,State,ExitCode,Elapsed JobID JobName State ExitCode Elapsed ------------ ---------- ---------- -------- ---------- 2477688 marine_vr+ COMPLETED 0:0 00:11:54 2477688.bat+ batch COMPLETED 0:0 00:11:54 2477688.ext+ extern COMPLETED 0:0 00:11:54 ``` Additional plotting work will be added by consolidating vrfy task as follows: - SST/SSH time series - Omb time series - Spatial SSH/SST/OHC - HTML (?) Close #1314 , Close #1345 , Close #1349 --------- Co-authored-by: Guillaume Vernieres --- scripts/exgdas_global_marine_analysis_vrfy.py | 0 ush/eva/marine_eva_post.py | 4 +- ush/eva/marine_gdas_plots.yaml | 6 +- ush/soca/soca_vrfy.py | 50 +++-- ...gdas_global_marine_analysis_vrfy_manual.py | 210 ++++++++++++++++++ .../run_marine_analysis_vrfy_manual.job | 45 ++++ 6 files changed, 297 insertions(+), 18 deletions(-) mode change 100755 => 100644 scripts/exgdas_global_marine_analysis_vrfy.py create mode 100644 utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py create mode 100644 utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job diff --git a/scripts/exgdas_global_marine_analysis_vrfy.py b/scripts/exgdas_global_marine_analysis_vrfy.py old mode 100755 new mode 100644 diff --git a/ush/eva/marine_eva_post.py b/ush/eva/marine_eva_post.py index a355621a1..b537ddb3a 100755 --- a/ush/eva/marine_eva_post.py +++ b/ush/eva/marine_eva_post.py @@ -12,7 +12,9 @@ vminmax = {'seaSurfaceTemperature': {'vmin': -2.0, 'vmax': 2.0}, 'seaIceFraction': {'vmin': -0.2, 'vmax': 0.2}, 'seaSurfaceSalinity': {'vmin': -0.2, 'vmax': 0.2}, # TODO: this should be changed - 'absoluteDynamicTopography': {'vmin': -0.2, 'vmax': 0.2}} + 'absoluteDynamicTopography': {'vmin': -0.2, 'vmax': 0.2}, + 'waterTemperature': {'vmin': -2.0, 'vmax': 2.0}, + 'salinity': {'vmin': -0.2, 'vmax': 0.2}} def marine_eva_post(inputyaml, outputdir, diagdir): diff --git a/ush/eva/marine_gdas_plots.yaml b/ush/eva/marine_gdas_plots.yaml index 5bedd1f69..0a903d0c4 100644 --- a/ush/eva/marine_gdas_plots.yaml +++ b/ush/eva/marine_gdas_plots.yaml @@ -73,7 +73,7 @@ graphics: data variable: experiment::OmBQC::${variable} figure: layout: [1,1] - figure size: [11,5] + figure size: [20,10] title: 'OmB post QC | @NAME@ @CYCLE@ | ${variable_title}' output name: map_plots/@NAME@/${variable}/@CHANNELVAR@/@NAME@_${variable}@CHANNELVAR@OmBQC.png tight_layout: true @@ -94,11 +94,11 @@ graphics: data: variable: experiment::OmBQC::${variable} @CHANNELKEY@ - markersize: 1 + markersize: 0.01 label: '$(variable)' colorbar: true # below may need to be edited/removed - cmap: ${dynamic_cmap} + cmap: 'seismic' vmin: ${dynamic_vmin} vmax: ${dynamic_vmax} diff --git a/ush/soca/soca_vrfy.py b/ush/soca/soca_vrfy.py index 854d7ab69..a4060fecd 100755 --- a/ush/soca/soca_vrfy.py +++ b/ush/soca/soca_vrfy.py @@ -38,6 +38,18 @@ def plotConfig(grid_file=[], proj='set me', projs=['Global']): + # Map variable names to their units + variable_units = { + 'ave_ssh': 'meter', + 'Temp': 'deg C', + 'Salt': 'psu', + 'aice_h': 'meter', + 'hi_h': 'meter', + 'hs_h': 'meter', + 'u': 'm/s', + 'v': 'm/s' + } + """ Prepares the configuration for the plotting functions below """ @@ -64,6 +76,9 @@ def plotConfig(grid_file=[], config['variable'] = variable # the variable currently plotted config['projs'] = projs # all the projections etc. config['proj'] = proj + + # Add units to the config for each variable + config['variable_units'] = variable_units return config @@ -78,6 +93,7 @@ def plotHorizontalSlice(config): os.makedirs(dirname, exist_ok=True) variable = config['variable'] + unit = config['variable_units'].get(config['variable'], 'unknown') exp = config['exp'] PDY = config['PDY'] cyc = config['cyc'] @@ -85,12 +101,12 @@ def plotHorizontalSlice(config): if variable in ['Temp', 'Salt', 'u', 'v']: level = config['levels'][0] slice_data = np.squeeze(data[variable])[level, :, :] - label_colorbar = variable + ' Level ' + str(level) + label_colorbar = f"{variable} ({unit}) Level {level}" figname = os.path.join(dirname, variable + '_Level_' + str(level)) title = f"{exp} {PDY} {cyc} {variable} Level {level}" else: slice_data = np.squeeze(data[variable]) - label_colorbar = variable + label_colorbar = f"{variable} ({unit})" figname = os.path.join(dirname, variable + '_' + config['proj']) title = f"{exp} {PDY} {cyc} {variable}" @@ -99,17 +115,17 @@ def plotHorizontalSlice(config): fig, ax = plt.subplots(figsize=(8, 5), subplot_kw={'projection': projs[config['proj']]}) - # Plot the filled contours - contourf_plot = ax.contourf(np.squeeze(grid.lon), + # Use pcolor to plot the data + pcolor_plot = ax.pcolormesh(np.squeeze(grid.lon), np.squeeze(grid.lat), slice_data, - levels=100, vmin=bounds[0], vmax=bounds[1], transform=ccrs.PlateCarree(), - cmap=config['colormap']) + cmap=config['colormap'], + zorder=0) # Add colorbar for filled contours - cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.75, orientation='horizontal') + cbar = fig.colorbar(pcolor_plot, ax=ax, shrink=0.75, orientation='horizontal') cbar.set_label(label_colorbar) # Add contour lines with specified linewidths @@ -120,16 +136,20 @@ def plotHorizontalSlice(config): levels=contour_levels, colors='black', linewidths=0.1, - transform=ccrs.PlateCarree()) + transform=ccrs.PlateCarree(), + zorder=2) - ax.coastlines() # TODO: make this work on hpc + try: + ax.coastlines() # TODO: make this work on hpc + except Exception as e: + print(f"Warning: could not add coastlines. {e}") ax.set_title(title) if config['proj'] == 'South': ax.set_extent([-180, 180, -90, -50], ccrs.PlateCarree()) if config['proj'] == 'North': ax.set_extent([-180, 180, 50, 90], ccrs.PlateCarree()) # ax.add_feature(cartopy.feature.LAND) # TODO: make this work on hpc - plt.savefig(figname, bbox_inches='tight', dpi=600) + plt.savefig(figname, bbox_inches='tight', dpi=300) plt.close(fig) @@ -138,6 +158,7 @@ def plotZonalSlice(config): Contourf of a zonal slice of an ocean field """ variable = config['variable'] + unit = config['variable_units'].get(config['variable'], 'unknown') exp = config['exp'] PDY = config['PDY'] cyc = config['cyc'] @@ -171,7 +192,7 @@ def plotZonalSlice(config): # Add colorbar for filled contours cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.5, orientation='horizontal') - cbar.set_label(variable + ' Lat ' + str(lat)) + cbar.set_label(f"{config['variable']} ({unit}) Lat {lat}") # Set the colorbar ticks cbar.set_ticks(contour_levels) @@ -184,7 +205,7 @@ def plotZonalSlice(config): os.makedirs(dirname, exist_ok=True) figname = os.path.join(dirname, config['variable'] + 'zonal_lat_' + str(int(lat)) + '_' + str(int(config['max depth'])) + 'm') - plt.savefig(figname, bbox_inches='tight', dpi=600) + plt.savefig(figname, bbox_inches='tight', dpi=300) plt.close(fig) @@ -193,6 +214,7 @@ def plotMeridionalSlice(config): Contourf of a Meridional slice of an ocean field """ variable = config['variable'] + unit = config['variable_units'].get(config['variable'], 'unknown') exp = config['exp'] PDY = config['PDY'] cyc = config['cyc'] @@ -226,7 +248,7 @@ def plotMeridionalSlice(config): # Add colorbar for filled contours cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.5, orientation='horizontal') - cbar.set_label(variable + ' Lon ' + str(lon)) + cbar.set_label(f"{config['variable']} ({unit}) Lon {lon}") # Set the colorbar ticks cbar.set_ticks(contour_levels) @@ -239,7 +261,7 @@ def plotMeridionalSlice(config): os.makedirs(dirname, exist_ok=True) figname = os.path.join(dirname, config['variable'] + 'meridional_lon_' + str(int(lon)) + '_' + str(int(config['max depth'])) + 'm') - plt.savefig(figname, bbox_inches='tight', dpi=600) + plt.savefig(figname, bbox_inches='tight', dpi=300) plt.close(fig) diff --git a/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py b/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py new file mode 100644 index 000000000..7c8efd0a6 --- /dev/null +++ b/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py @@ -0,0 +1,210 @@ +import os +import numpy as np +import gen_eva_obs_yaml +import marine_eva_post +import diag_statistics +from multiprocessing import Process +from soca_vrfy import statePlotter, plotConfig +import subprocess + +comout = os.getenv('COM_OCEAN_ANALYSIS') +com_ice_history = os.getenv('COM_ICE_HISTORY_PREV') +com_ocean_history = os.getenv('COM_OCEAN_HISTORY_PREV') +cyc = os.getenv('cyc') +RUN = os.getenv('RUN') + +bcyc = str((int(cyc) - 3) % 24).zfill(2) +gcyc = str((int(cyc) - 6) % 24).zfill(2) +grid_file = os.path.join(comout, f'{RUN}.t'+bcyc+'z.ocngrid.nc') +layer_file = os.path.join(comout, f'{RUN}.t'+cyc+'z.ocninc.nc') + +# for eva +diagdir = os.path.join(comout, 'diags') +HOMEgfs = os.getenv('HOMEgfs') + +# Get flags from environment variables (set in the bash driver) +run_ensemble_analysis = os.getenv('RUN_ENSENBLE_ANALYSIS', 'OFF').upper() == 'ON' +run_bkgerr_analysis = os.getenv('RUN_BACKGROUND_ERROR_ANALYSIS', 'OFF').upper() == 'ON' +run_bkg_analysis = os.getenv('RUN_BACKGROUND_ANALYSIS', 'OFF').upper() == 'ON' +run_increment_analysis = os.getenv('RUN_INCREMENT_ANLYSIS', 'OFF').upper() == 'ON' + +# Initialize an empty list for the main config +configs = [plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ocnana.nc'), + variables_horiz={'ave_ssh': [-1.8, 1.3], + 'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + colormap='nipy_spectral', + comout=os.path.join(comout, 'vrfy', 'ana')), # ocean surface analysis + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.iceana.nc'), + variables_horiz={'aice_h': [0.0, 1.0], + 'hi_h': [0.0, 4.0], + 'hs_h': [0.0, 0.5]}, + colormap='jet', + projs=['North', 'South', 'Global'], + comout=os.path.join(comout, 'vrfy', 'ana'))] # sea ice analysis + +# Define each config and add to main_config if its flag is True +if run_ensemble_analysis: + config_ens = [plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.recentering_error.nc'), + variables_horiz={'ave_ssh': [-1, 1]}, + colormap='seismic', + comout=os.path.join(comout, 'vrfy', 'recentering_error')), # recentering error + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_steric_stddev.nc'), + variables_horiz={'ave_ssh': [0, 0.8]}, + colormap='gist_ncar', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_steric_stddev')), # ssh steric stddev + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_unbal_stddev.nc'), + variables_horiz={'ave_ssh': [0, 0.8]}, + colormap='gist_ncar', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_unbal_stddev')), # ssh unbal stddev + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_total_stddev.nc'), + variables_horiz={'ave_ssh': [0, 0.8]}, + colormap='gist_ncar', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_total_stddev')), # ssh total stddev + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.steric_explained_variance.nc'), + variables_horiz={'ave_ssh': [0, 1]}, + colormap='seismic', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'steric_explained_variance'))] # steric explained variance + configs.extend(config_ens) + +if run_bkgerr_analysis: + config_bkgerr = [plotConfig(grid_file=grid_file, + layer_file=layer_file, + data_file=os.path.join(comout, os.path.pardir, os.path.pardir, + 'bmatrix', 'ocean', f'{RUN}.t'+cyc+'z.ocean.bkgerr_stddev.nc'), + lats=np.arange(-60, 60, 10), + lons=np.arange(-280, 80, 30), + variables_zonal={'Temp': [0, 2], + 'Salt': [0, 0.2], + 'u': [0, 0.2], + 'v': [0, 0.2]}, + variables_meridional={'Temp': [0, 2], + 'Salt': [0, 0.2], + 'u': [0, 0.2], + 'v': [0, 0.2]}, + variables_horiz={'Temp': [0, 2], + 'Salt': [0, 0.2], + 'u': [0, 0.2], + 'v': [0, 0.2], + 'ave_ssh': [0, 0.1]}, + colormap='jet', + comout=os.path.join(comout, 'vrfy', 'bkgerr'))] # ocn bkgerr stddev + configs.extend(config_bkgerr) + +if run_bkg_analysis: + config_bkg = [plotConfig(grid_file=grid_file, + data_file=os.path.join(com_ice_history, f'{RUN}.ice.t{gcyc}z.inst.f006.nc'), + variables_horiz={'aice_h': [0.0, 1.0], + 'hi_h': [0.0, 4.0], + 'hs_h': [0.0, 0.5]}, + colormap='jet', + projs=['North', 'South', 'Global'], + comout=os.path.join(comout, 'vrfy', 'bkg')), # sea ice background + plotConfig(grid_file=grid_file, + layer_file=layer_file, + data_file=os.path.join(com_ocean_history, f'{RUN}.ocean.t{gcyc}z.inst.f006.nc'), + lats=np.arange(-60, 60, 10), + lons=np.arange(-280, 80, 30), + variables_zonal={'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + variables_meridional={'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + variables_horiz={'ave_ssh': [-1.8, 1.3], + 'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + colormap='nipy_spectral', + comout=os.path.join(comout, 'vrfy', 'bkg'))] + configs.extend(config_bkg) + +if run_increment_analysis: + config_incr = [plotConfig(grid_file=grid_file, + layer_file=layer_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ocninc.nc'), + lats=np.arange(-60, 60, 10), + lons=np.arange(-280, 80, 30), + variables_zonal={'Temp': [-0.5, 0.5], + 'Salt': [-0.1, 0.1]}, + variables_horiz={'Temp': [-0.5, 0.5], + 'Salt': [-0.1, 0.1], + 'ave_ssh': [-0.1, 0.1]}, + variables_meridional={'Temp': [-0.5, 0.5], + 'Salt': [-0.1, 0.1]}, + colormap='seismic', + comout=os.path.join(comout, 'vrfy', 'incr')), # ocean increment + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ice.incr.nc'), + lats=np.arange(-60, 60, 10), + variables_horiz={'aice_h': [-0.2, 0.2], + 'hi_h': [-0.5, 0.5], + 'hs_h': [-0.1, 0.1]}, + colormap='seismic', + projs=['North', 'South'], + comout=os.path.join(comout, 'vrfy', 'incr'))] # sea ice increment + configs.extend(config_incr) + + +# plot marine analysis vrfy + +def plot_marine_vrfy(config): + ocnvrfyPlotter = statePlotter(config) + ocnvrfyPlotter.plot() + + +# Number of processes +num_processes = len(configs) + +# Create a list to store the processes +processes = [] + +# Iterate over configs +for config in configs[:num_processes]: + process = Process(target=plot_marine_vrfy, args=(config,)) + process.start() + processes.append(process) + +# Wait for all processes to finish +for process in processes: + process.join() + +####################################### +# eva plots +####################################### + +evadir = os.path.join(HOMEgfs, 'sorc', f'{RUN}.cd', 'ush', 'eva') +marinetemplate = os.path.join(evadir, 'marine_gdas_plots.yaml') +varyaml = os.path.join(comout, 'yaml', 'var_original.yaml') + +# it would be better to refrence the dirs explicitly with the comout path +# but eva doesn't allow for specifying output directories +os.chdir(os.path.join(comout, 'vrfy')) +if not os.path.exists('preevayamls'): + os.makedirs('preevayamls') +if not os.path.exists('evayamls'): + os.makedirs('evayamls') + +gen_eva_obs_yaml.gen_eva_obs_yaml(varyaml, marinetemplate, 'preevayamls') + +files = os.listdir('preevayamls') +for file in files: + infile = os.path.join('preevayamls', file) + marine_eva_post.marine_eva_post(infile, 'evayamls', diagdir) + +files = os.listdir('evayamls') +for file in files: + infile = os.path.join('evayamls', file) + print('running eva on', infile) + subprocess.run(['eva', infile], check=True) + +####################################### +# calculate diag statistics +####################################### + +# As of 11/12/2024 not working +# diag_statistics.get_diag_stats() diff --git a/utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job b/utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job new file mode 100644 index 000000000..38ce48ffc --- /dev/null +++ b/utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job @@ -0,0 +1,45 @@ +#!/bin/bash +#SBATCH --job-name=marine_vrfy # Assign a name to the job (customize as needed) +#SBATCH --account=da-cpu +#SBATCH --qos=debug +#SBATCH -A da-cpu +#SBATCH --output=run_marine_vrfy_analysis.out +#SBATCH --nodes=1 # Request 1 node +#SBATCH --ntasks=40 # Request 40 total tasks (processors across nodes) +#SBATCH --partition=hera # Specify the partition (cluster) named "hera" +#SBATCH --cpus-per-task=1 # Set 1 CPU per task (equivalent to ppn=40 and tpp=1) +#SBATCH --mem=24GB # Request 24GB of memory +#SBATCH --time=00:30:00 # Set the walltime limit to 30 minutes + +# Define HOMEgfs +export HOMEgfs="/scratch1/NCEPDEV/da/Mindo.Choi/workflow_11122024/global-workflow/" + +# Load EVA module +module use ${HOMEgfs}sorc/gdas.cd/modulefiles +module load EVA/hera + +# Set PYTHONPATH using HOMEgfs +export PYTHONPATH="${HOMEgfs}sorc/gdas.cd/ush/:\ +${HOMEgfs}sorc/gdas.cd/ush/eva/:\ +${HOMEgfs}sorc/gdas.cd/ush/soca/:\ +$PYTHONPATH" + +# Set flags to control plotConfig in the Python script +export RUN_ENSENBLE_ANALYSIS=OFF # Check if ensemble run is ON +export RUN_BACKGROUND_ERROR_ANALYSIS=ON +export RUN_BACKGROUND_ANALYSIS=ON +export RUN_INCREMENT_ANLYSIS=ON + +# Define and export the environment variables +export cyc="00" +export RUN="gdas" +export PSLOT="gdas_test" +export PDY="20210827" + +# Define and export environment variables with paths +export COM_OCEAN_ANALYSIS="/scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210827/00/analysis/ocean" +export COM_ICE_HISTORY_PREV="/scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210826/18/model/ice/history" +export COM_OCEAN_HISTORY_PREV="/scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210826/18/model/ocean/history" + +# Excute Marine Verify Analysis +python3 ${HOMEgfs}sorc/gdas.cd/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py