diff --git a/docs/Users_Guide/figure/tmp.vert_profile.MID_CONUS.png b/docs/Users_Guide/figure/tmp.vert_profile.MID_CONUS.png index 7f138422..a9b38633 100644 Binary files a/docs/Users_Guide/figure/tmp.vert_profile.MID_CONUS.png and b/docs/Users_Guide/figure/tmp.vert_profile.MID_CONUS.png differ diff --git a/docs/Users_Guide/figure/tmp.vert_profile.png b/docs/Users_Guide/figure/tmp.vert_profile.png index 4cd47657..363d30bb 100644 Binary files a/docs/Users_Guide/figure/tmp.vert_profile.png and b/docs/Users_Guide/figure/tmp.vert_profile.png differ diff --git a/docs/Users_Guide/figure/tmp_32.0N-115.0E-34.0N-82.0E.png b/docs/Users_Guide/figure/tmp_32.0N-115.0E-34.0N-82.0E.png deleted file mode 100644 index 3d196e80..00000000 Binary files a/docs/Users_Guide/figure/tmp_32.0N-115.0E-34.0N-82.0E.png and /dev/null differ diff --git a/docs/Users_Guide/figure/tmp_500hPa.png b/docs/Users_Guide/figure/tmp_500hPa.png index 0cc05b58..ea48477e 100644 Binary files a/docs/Users_Guide/figure/tmp_500hPa.png and b/docs/Users_Guide/figure/tmp_500hPa.png differ diff --git a/docs/Users_Guide/figure/tmp_pbl.png b/docs/Users_Guide/figure/tmp_pbl.png index b8c98595..097b5d6a 100644 Binary files a/docs/Users_Guide/figure/tmp_pbl.png and b/docs/Users_Guide/figure/tmp_pbl.png differ diff --git a/docs/Users_Guide/figure/ugrd_28.0N-120.0E-26.0N-75.0E.png b/docs/Users_Guide/figure/ugrd_28.0N-120.0E-26.0N-75.0E.png new file mode 100644 index 00000000..c2ec6088 Binary files /dev/null and b/docs/Users_Guide/figure/ugrd_28.0N-120.0E-26.0N-75.0E.png differ diff --git a/docs/Users_Guide/fv3_physics.rst b/docs/Users_Guide/fv3_physics.rst index 19700858..cdd3e86a 100644 --- a/docs/Users_Guide/fv3_physics.rst +++ b/docs/Users_Guide/fv3_physics.rst @@ -9,9 +9,14 @@ and spatial domain. Tendencies are partitioned into physics parameterizations an dynamics. Physics parameterizations include schemes like deep convection, convective gravity wave drag, short wave radiation, planetary boundary layer, microphysics, and others listed below. Non-physics tendencies (or dynamics) are due to horizontal -and vertical motion. The residual (which should be zero) is the difference between +and vertical motion (advection). + +residual = all tendencies - actual tendency + +The residual (which should be close to zero) is the +difference between the actual change in the state variable over the requested time window and the -expected change due to physics parameterizations and dynamics tendencies. One can plot +combined change due to all physics parameterizations and dynamics tendencies. One can plot a single tendency component at multiple pressure levels or plot all tendency components at a single pressure level. Plan views (horizontal cross sections), vertical profiles, and difference plots are also available. @@ -19,7 +24,7 @@ and difference plots are also available. Required Packages: ================== -* cartopy (0.20.3 only) +* cartopy * matplotlib @@ -47,80 +52,81 @@ Save this file in a directory where you have read and write permissions, such as $WORKING_DIR/data/fv3_physics_tend, where $WORKING_DIR is the path to the directory where you will save input data. -For additional details see `grid description in UFS Short Range Weather App user manual `_ - - -Available Tendency Variables ----------------------------- - -A small description of each tendency variable and their nicknames are shown below. Some -tendencies do not apply to all four state variables, so these cells are blank. - -+----------------------------+-------------+-------------------+-------------+-------------+ -| tendency | temperature | specific humidity | u-wind | v-wind | -+============================+=============+===================+=============+=============+ -|convective gravity wave drag| congwd| | congwd | congwd | -+----------------------------+-------------+-------------------+-------------+-------------+ -| deep convection | deepcnv| deepcnv | deepcnv| deepcnv| -+----------------------------+-------------+-------------------+-------------+-------------+ -| long wave radiation | lw | | | | -+----------------------------+-------------+-------------------+-------------+-------------+ -| microphysics | mp | mp | mp | mp | -+----------------------------+-------------+-------------------+-------------+-------------+ -|orographic gravity wave drag| orogwd| | orogwd | orogwd | -+----------------------------+-------------+-------------------+-------------+-------------+ -| planetary boundary layer | pbl | pbl | pbl | pbl | -+----------------------------+-------------+-------------------+-------------+-------------+ -| Rayleigh damping | rdamp | | rdamp | rdamp | -+----------------------------+-------------+-------------------+-------------+-------------+ -| shallow convection | shalcnv| shalcnv | shalcnv| shalcnv| -+----------------------------+-------------+-------------------+-------------+-------------+ -| short wave radiation | sw | | | | -+----------------------------+-------------+-------------------+-------------+-------------+ -| total physics (all above) | phys | phys | phys | phys | -+----------------------------+-------------+-------------------+-------------+-------------+ -| dynamics | nophys| nophys | nophys| nophys| -+----------------------------+-------------+-------------------+-------------+-------------+ -| state variable at validtime| tmp | spfh | ugrd | vgrd | -+----------------------------+-------------+-------------------+-------------+-------------+ -| actual change in state var | dtmp | dspfh | dugrd | dvgrd | -+----------------------------+-------------+-------------------+-------------+-------------+ - - - - -The expected names of the netCDF variables in the history file are shown below. If your history -file is different, one can change them in YAML config file -*$METPLOTPY_BASE/test/fv3_physics_tend/fv3_physics_tend_defaults.yaml* +For additional details see +`grid description in UFS Short Range Weather App user manual `_ + + +Default tendency variable names +------------------------------- + +Default tendency variable names are below. The tendencies that are available depend on the +physics suite that the user selects when running FV3; more specifically, its contents are +determined by the diag_table file that the user sets up. The history file that we +use our example is for a specific diag_table and so may change with different FV3 configurations. +The user must make sure the names in the configuration file +*$METPLOTPY_BASE/test/fv3_physics_tend/fv3_physics_tend_defaults.yaml* +match the names used in fv3_history.nc for their case. +Some tendencies do not apply to all four state variables, so these cells are left blank. **NOTE**: *$METPLOTPY_BASE* is the directory where the METplotpy code is saved (e.g. */path/to/user/dir/METplotpy*). -+----------------------------+-------------+-------------------+-------------+-------------+ -| tendency | temperature | specific humidity | u-wind | v-wind | -+============================+=============+===================+=============+=============+ -|convective gravity wave drag| dt3dt_congwd| |du3dt_congwd |dv3dt_congwd | -+----------------------------+-------------+-------------------+-------------+-------------+ -| deep convection |dt3dt_deepcnv| dq3dt_deepcnv |du3dt_deepcnv|dv3dt_deepcnv| -+----------------------------+-------------+-------------------+-------------+-------------+ -| long wave radiation | dt3dt_lw | | | | -+----------------------------+-------------+-------------------+-------------+-------------+ -| microphysics | dt3dt_mp | dq3dt_mp | du3dt_mp | dv3dt_mp | -+----------------------------+-------------+-------------------+-------------+-------------+ -|orographic gravity wave drag| dt3dt_orogwd| |du3dt_orogwd |dv3dt_orogwd | -+----------------------------+-------------+-------------------+-------------+-------------+ -| planetary boundary layer | dt3dt_pbl | dq3dt_pbl | du3dt_pbl | dv3dt_pbl | -+----------------------------+-------------+-------------------+-------------+-------------+ -| Rayleigh damping | dt3dt_rdamp | | du3dt_rdamp | dv3dt_rdamp | -+----------------------------+-------------+-------------------+-------------+-------------+ -| shallow convection |dt3dt_shalcnv| dq3dt_shalcnv |du3dt_shalcnv|dv3dt_shalcnv| -+----------------------------+-------------+-------------------+-------------+-------------+ -| short wave radiation | dt3dt_sw | | | | -+----------------------------+-------------+-------------------+-------------+-------------+ -| total physics (all above) | dt3dt_phys | dq3dt_phys |du3dt_phys | dv3dt_phys | -+----------------------------+-------------+-------------------+-------------+-------------+ -| dynamics | dt3dt_nophys| dq3dt_nophys | du3dt_nophys| dv3dt_nophys| -+----------------------------+-------------+-------------------+-------------+-------------+ ++-----------------------------+-------------+-------------------+-------------+-------------+ +| State Variable | temperature | specific humidity | u-wind | v-wind | ++=============================+=============+===================+=============+=============+ +| expected name | tmp | spfh | ugrd | vgrd | ++-----------------------------+-------------+-------------------+-------------+-------------+ + +Tendency variables: + ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| Tendency Variable | temperature | specific humidity | u-wind | v-wind | ++=============================+===================+===================+================+================+ +| convective gravity wave drag| dtend_temp_cnvgwd | | dtend_u_cnvgwd | dtend_v_cnvgwd | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| deep convection | dtend_temp_deepcnv| dtend_qv_deepcnv | dtend_u_deepcnv| dtend_v_deepcnv| ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| long wave radiation | dtend_temp_lw | | | | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| microphysics | dtend_temp_mp | dtend_qv_mp | | | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| orographic gravity wave drag| dtend_temp_orogwd | | dtend_u_orogwd | dtend_v_orogwd | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| planetary boundary layer | dtend_temp_pbl | dtend_qv_pbl | dtend_u_pbl | dtend_v_pbl | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| Rayleigh damping | dtend_temp_rdamp | | dtend_u_rdamp | dtend_v_rdamp | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| shallow convection | dtend_temp_shalcnv| dtend_qv_shalcnv | dtend_u_shalcnv| dtend_v_shalcnv| ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| short wave radiation | dtend_temp_sw | | | | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| all physics tendencies | dtend_temp_phys | dtend_qv_phys | dtend_u_phys | dtend_v_phys | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| dynamics (advection) | dtend_temp_nophys | dtend_qv_nophys | dtend_u_nophys | dtend_v_nophys | ++-----------------------------+-------------------+-------------------+----------------+----------------+ + + +Derived tendency variables that show up in plots: + ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| Derived Variable | temperature | specific humidity | u-wind | v-wind | ++=============================+===================+===================+================+================+ +| all phys and nophys | all | all | all | all | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| actual tendency | actual | actual | actual | actual | ++-----------------------------+-------------------+-------------------+----------------+----------------+ +| residual tend. (all-actual) | resid | resid | resid | resid | ++-----------------------------+-------------------+-------------------+----------------+----------------+ + +If time window overlaps initialization time +------------------------------------------- + +The history file does not necessarily have the temperature, moisture, or wind at the exact +time of model initialization. It is usally the next timestep (e.g. 180 seconds later). +This means you cannot derive the actual change in temperature starting at the model initialization +time. You must choose a later valid time and/or a shorter time window that does not overlap +the initialization time. In other words, it is a problem if your model initialization time is 0z, your +valid time is 1z and your time window is one hour. Example ======= @@ -149,7 +155,8 @@ There is a YAML config file located in Run from the Command Line ========================= -To generate example tendency plots using settings in the **fv3_physics_defaults.yaml** configuration file, perform the following: +To generate example tendency plots using settings in the **fv3_physics_defaults.yaml** +configuration file, perform the following: .. code-block:: bash @@ -164,8 +171,9 @@ Plan View :: usage: planview_fv3.py [-h] [-d] [--method {nearest,linear,loglinear}] [--ncols NCOLS] - [--nofineprint] [-o OFILE] [-p PFULL [PFULL ...]] [-s SHP] - [--subtract SUBTRACT] [-t TWINDOW] [-v VALIDTIME] + [--nofineprint] [--norobust] [-o OFILE] [-p PFULL [PFULL ...]] + [-s SHP] [--subtract SUBTRACT] [-t TWINDOW] [-v VALIDTIME] + [--vmin VMIN] [--vmax VMAX] config historyfile gridfile statevariable fill Plan view of FV3 diagnostic tendency @@ -185,6 +193,8 @@ Plan View --ncols NCOLS number of columns (default: None) --nofineprint Don't add metadata and created by date (for comparing images) (default: False) + --norobust compute colormap range with extremes, not 2nd and 98th + percentiles (default: False) -o OFILE, --ofile OFILE name of output image file (default: None) -p PFULL [PFULL ...], --pfull PFULL [PFULL ...] @@ -198,13 +208,16 @@ Plan View time window in hours (default: 3) -v VALIDTIME, --validtime VALIDTIME valid time (default: None) + --vmin VMIN color bar minimum (overrides robust=True) (default: None) + --vmax VMAX color bar maximum (overrides robust=True) (default: None) -Generate a plan view of all tendencies at 500 hPa: +Generate a plan view of all tendencies at 500 hPa for the 1-hour time window ending 20190615 20z: .. code-block:: bash - python planview_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp pbl -p 500 -t 1 -v 20190504T14 --nofineprint + python planview_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp pbl \ + -p 500 -t 1 -v 20190615T20 --nofineprint .. image:: figure/tmp_500hPa.png @@ -212,7 +225,8 @@ Generate a plan view of PBL tendency at default pressure levels: .. code-block:: bash - python planview_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp pbl -t 1 -v 20190504T13 --nofineprint + python planview_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp pbl \ + -t 1 -v 20190615T20 --nofineprint .. image:: figure/tmp_pbl.png @@ -227,6 +241,7 @@ Vertical Profile usage: vert_profile_fv3.py [-h] [-d] [--nofineprint] [-o OFILE] [--resid] [-s SHP] [--subtract SUBTRACT] [-t TWINDOW] [-v VALIDTIME] + [--xmin XMIN] [--xmax XMAX] config historyfile gridfile statevariable Vertical profile of FV3 diagnostic tendencies @@ -251,12 +266,16 @@ Vertical Profile time window in hours (default: 3) -v VALIDTIME, --validtime VALIDTIME valid time (default: None) - -Generate vertical profile of temperature tendencies averaged over the mid-CONUS region: + --xmin XMIN x-axis minimum (default: None) + --xmax XMAX x-axis maximum (default: None) + +Generate vertical profile of temperature tendencies averaged over the central US. Plot residual +tendency and its components. Limit the x-axis range with --xmin and --xmax. .. code-block:: bash - python vert_profile_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp -t 2 -v 20190504T14 -s $METPLOTPY_BASE/metplotpy/contributed/fv3_physics_tend/shapefiles/MID_CONUS --nofineprint + python vert_profile_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp \ + -t 1 -v 20190615T20 -s shapefiles/MID_CONUS --resid --xmin -0.0005 --xmax 0.0004 --nofineprint .. image:: figure/tmp.vert_profile.MID_CONUS.png @@ -267,14 +286,14 @@ Vertical Cross Section python cross_section_vert.py -h -Usage:: +:: - usage: cross_section_vert.py [-h] [-d] [--dindex DINDEX] [--ncols NCOLS] [--nofineprint] - [-o OFILE] [-s START START] [-e END END] - [--subtract SUBTRACT] [-t TWINDOW] [-v VALIDTIME] + usage: cross_section_vert.py [-h] [-d] [--ncols NCOLS] [--nofineprint] [--norobust] [-o OFILE] + [-s START START] [-e END END] [--subtract SUBTRACT] [-t TWINDOW] + [-v VALIDTIME] [--vmin VMIN] [--vmax VMAX] config historyfile gridfile statevariable - Vertical cross section of FV3 diagnostic tendency + Vertical cross section of FV3 diagnostic tendencies positional arguments: config yaml configuration file @@ -285,29 +304,33 @@ Usage:: optional arguments: -h, --help show this help message and exit -d, --debug - --dindex DINDEX tick and gridline interval along cross section (default: 20) --ncols NCOLS number of columns (default: None) - --nofineprint Don't add metadata and created by date (for comparing images) - (default: False) + --nofineprint Don't add metadata and created by date (for comparing images) (default: False) + --norobust compute colormap range with extremes, not 2nd and 98th percentiles (default: + False) -o OFILE, --ofile OFILE name of output image file (default: None) -s START START, --start START START - start point (default: (28, -115)) + start point lat lon (default: (28, -115)) -e END END, --end END END - end point (default: (30, -82)) + end point lat lon (default: (30, -82)) --subtract SUBTRACT FV3 history file to subtract (default: None) -t TWINDOW, --twindow TWINDOW time window in hours (default: 3) -v VALIDTIME, --validtime VALIDTIME valid time (default: None) - -Generate vertical cross section from 32°N 115°W to 34°N 82°W: + --vmin VMIN color bar minimum (overrides robust=True) (default: None) + --vmax VMAX color bar maximum (overrides robust=True) (default: None) + +Generate vertical cross section of u-wind tendencies from 28°N 120°W to 26°N 75°W over one-hour +time window ending 23z June 15, 2019. .. code-block:: bash - python cross_section_vert.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp -t 2 -v 20190504T14 -s 32 -115 -e 34 -82 --nofineprint + python cross_section_vert.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp \ + -t 1 -v "2019-06-15 23" -s 28 -120 -e 26 -75 --nofineprint -.. image:: figure/tmp_32.0N-115.0E-34.0N-82.0E.png +.. image:: figure/ugrd_28.0N-120.0E-26.0N-75.0E.png Difference Plot --------------- @@ -317,7 +340,8 @@ Put file you want to subtract after the --subtract argument: .. code-block:: bash - python vert_profile_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp -t 1 --subtract $WORKING_DIR/fv3_history.nc --nofineprint + python vert_profile_fv3.py $CONFIG $WORKING_DIR/fv3_history.nc $WORKING_DIR/grid_spec.nc tmp \ + -t 1 --subtract $WORKING_DIR/fv3_history.nc --nofineprint .. image:: figure/tmp.vert_profile.png diff --git a/metplotpy/contributed/fv3_physics_tend/cross_section_vert.py b/metplotpy/contributed/fv3_physics_tend/cross_section_vert.py index a5970d2b..32310cde 100644 --- a/metplotpy/contributed/fv3_physics_tend/cross_section_vert.py +++ b/metplotpy/contributed/fv3_physics_tend/cross_section_vert.py @@ -1,70 +1,89 @@ +""" vertcal cross section of tendencies """ import argparse -import cartopy import datetime import logging +import os +import cartopy import matplotlib.pyplot as plt from matplotlib.ticker import MultipleLocator from metpy.interpolate import cross_section from metpy.units import units import numpy as np -import os import pandas as pd -import pdb -from . import physics_tend -import sys import xarray import yaml - -""" -Plan view of tendencies of t, q, u, or v from physics parameterizations, dynamics (non-physics), their total, and residual. -Total change is the actual change in state variable from first time to last time. Total change differs from cumulative change -attributed to physics and non-physics tendencies when residual is not zero. -""" +import physics_tend def parse_args(): + """ + parse command line arguments + """ # =============Arguments=================== - parser = argparse.ArgumentParser(description = "Vertical cross section of FV3 diagnostic tendency", formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = argparse.ArgumentParser( + description="Vertical cross section of FV3 diagnostic tendencies", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) # ==========Mandatory Arguments=================== - parser.add_argument("config", type=argparse.FileType('r'), help="yaml configuration file") - parser.add_argument("historyfile", type=argparse.FileType("r"), help="FV3 history file") - parser.add_argument("gridfile", type=argparse.FileType("r"), help="FV3 grid spec file") - parser.add_argument("statevariable", type=str, help="moisture, temperature, or wind component variable name") + parser.add_argument("config", help="yaml configuration file") + parser.add_argument("historyfile", help="FV3 history file") + parser.add_argument("gridfile", help="FV3 grid spec file") + parser.add_argument( + "statevariable", help="moisture, temperature, or wind component variable name") # ==========Optional Arguments=================== parser.add_argument("-d", "--debug", action='store_true') - parser.add_argument("--dindex", type=int, default=20, help="tick and gridline interval along cross section") - parser.add_argument("--ncols", type=int, default=None, help="number of columns") - parser.add_argument("--nofineprint", action='store_true', help="Don't add metadata and created by date (for comparing images)") - parser.add_argument("-o", "--ofile", type=str, help="name of output image file") - parser.add_argument("-s", "--start", nargs=2, type=float, default=(28, -115), help="start point") - parser.add_argument("-e", "--end", nargs=2, type=float, default=(30, -82), help="end point") - parser.add_argument("--subtract", type=argparse.FileType("r"), help="FV3 history file to subtract") - parser.add_argument("-t", "--twindow", type=int, default=3, help="time window in hours") - parser.add_argument("-v", "--validtime", type=lambda x:pd.to_datetime(x), help="valid time") + parser.add_argument("--ncols", type=int, default=None, + help="number of columns") + parser.add_argument("--nofineprint", action='store_true', + help="Don't add metadata and created by date (for comparing images)") + parser.add_argument("--norobust", action='store_true', + help="compute colormap range with extremes, not 2nd and 98th percentiles") + parser.add_argument("-o", "--ofile", help="name of output image file") + parser.add_argument("-s", "--start", nargs=2, type=float, + default=(28, -115), help="start point lat lon") + parser.add_argument("-e", "--end", nargs=2, type=float, + default=(30, -82), help="end point lat lon") + parser.add_argument("--subtract", help="FV3 history file to subtract") + parser.add_argument("-t", "--twindow", type=float, + default=3, help="time window in hours") + parser.add_argument("-v", "--validtime", help="valid time") + parser.add_argument("--vmin", type=float, + help="color bar minimum (overrides robust=True)") + parser.add_argument("--vmax", type=float, + help="color bar maximum (overrides robust=True)") args = parser.parse_args() return args def main(): + """ + Vertical cross section view of tendencies of t, q, u, or v from physics parameterizations, + dynamics (non-physics), the combination of all tendencies (physics and non-physics), + the actual tendency, and the residual. Residual is the sum of all tendencies minus the + actual tendency. + """ args = parse_args() - gfile = args.gridfile - ifile = args.historyfile - variable = args.statevariable - config = args.config - debug = args.debug - dindex = args.dindex - ncols = args.ncols - nofineprint= args.nofineprint - ofile = args.ofile - startpt = args.start - endpt = args.end - subtract = args.subtract - twindow = datetime.timedelta(hours = args.twindow) - validtime = args.validtime + gfile = args.gridfile + ifile = args.historyfile + variable = args.statevariable + config = args.config + ncols = args.ncols + nofineprint = args.nofineprint + ofile = args.ofile + startpt = args.start + endpt = args.end + robust = not args.norobust + subtract = args.subtract + twindow = datetime.timedelta(hours=args.twindow) + twindow_quantity = twindow.total_seconds() * units.seconds + validtime = pd.to_datetime(args.validtime) + vmin = args.vmin + vmax = args.vmax level = logging.INFO - if debug: level = logging.DEBUG - logging.basicConfig(format='%(asctime)s - %(message)s', level=level) # prepend log message with time + if args.debug: + level = logging.DEBUG + # prepend log message with time + logging.basicConfig(format='%(asctime)s - %(message)s', level=level) logging.debug(args) # Output filename. @@ -74,135 +93,171 @@ def main(): ofile = os.path.realpath(args.ofile) odir = os.path.dirname(ofile) if not os.path.exists(odir): - logging.info(f"output directory {odir} does not exist. Creating it") + logging.info( + f"output directory {odir} does not exist. Creating it") os.mkdir(odir) - logging.info(f"output filename={ofile}") - + logging.debug("output filename=%s", ofile) # Reload fv3 in case user specifies a custom --config file - fv3 = yaml.load(open(config.name), Loader=yaml.FullLoader) + fv3 = yaml.load(open(config, encoding="utf8"), Loader=yaml.FullLoader) # Read lat/lon from gfile logging.debug(f"read lat/lon from {gfile}") - gds = xarray.open_dataset(gfile.name) + gds = xarray.open_mfdataset(gfile) lont = gds[fv3["lon_name"]] latt = gds[fv3["lat_name"]] # Open input file - logging.debug(f"About to open {ifile}") - fv3ds = xarray.open_dataset(ifile.name) + logging.debug("open %s", ifile) + fv3ds = xarray.open_dataset(ifile) + + if subtract: + logging.info("subtracting %s", subtract) + with xarray.set_options(keep_attrs=True): + fv3ds -= xarray.open_mfdataset(subtract) + datetimeindex = fv3ds.indexes['time'] if hasattr(datetimeindex, "to_datetimeindex"): - # Convert from CFTime to pandas datetime. I get a warning CFTimeIndex from non-standard calendar 'julian'. Maybe history file should be saved with standard calendar. + # Convert from CFTime to pandas datetime or get warning + # CFTimeIndex from non-standard calendar 'julian'. + # Maybe history file should be saved with standard calendar. # To turn off warning, set unsafe=True. datetimeindex = datetimeindex.to_datetimeindex(unsafe=True) + ragged_times = datetimeindex != datetimeindex.round('1ms') + if any(ragged_times): + logging.info( + f"round times to nearest millisec. before: {datetimeindex[ragged_times].values}") + datetimeindex = datetimeindex.round('1ms') + logging.info(f"after: {datetimeindex[ragged_times].values}") fv3ds['time'] = datetimeindex - if subtract: - logging.debug(f"subtracting {subtract.name}") - with xarray.set_options(keep_attrs=True): - fv3ds -= xarray.open_dataset(subtract.name) - fv3ds = fv3ds.assign_coords(lont=lont, latt=latt) # lont and latt used by pcolorfill() - tendency_vars = fv3["tendency_varnames"][variable] # list of tendency variable names for requested state variable - fv3ds = physics_tend.add_time0(fv3ds, variable, fv3) - tendencies = fv3ds[tendency_vars] # subset of original Dataset + # lont and latt used by pcolorfill() + fv3ds = fv3ds.assign_coords(lont=lont, latt=latt) if validtime is None: - logging.debug("validtime not provided on command line, so use latest time in history file.") validtime = fv3ds.time.values[-1] validtime = pd.to_datetime(validtime) + logging.info( + "validtime not provided on command line. Using last time in history file %s.", + validtime) time0 = validtime - twindow - time1 = time0 + datetime.timedelta(hours=1) - logging.info(f"Sum tendencies {time1}-{validtime}") - tindex = dict(time=slice(time1, validtime)) # slice of time from hour after time0 through validtime - tendencies_avg = tendencies.sel(tindex).mean(dim="time") # average tendencies in time - - # Dynamics (nophys) tendency is not reset every hour. Just calculate change from time0 to validtime. - nophys_var = [x for x in tendency_vars if x.endswith("_nophys")] - assert len(nophys_var) == 1 - nophys_var = nophys_var[0] # we don't want a 1-element list; we want a string. So that tendencies[nophys_var] is a DataArray, not a Dataset. - logging.info(f"Subtract nophys tendency at {time0} from {validtime}") - nophys_delta = tendencies[nophys_var].sel(time=validtime) - tendencies[nophys_var].sel(time=time0) - tendencies_avg[nophys_var] = nophys_delta / args.twindow - - - # Restore units after .mean() removed them. Copy units from 1st tendency variable. - tendency_units = units.parse_expression(fv3ds[tendency_vars[0]].units) - logging.debug(f"restoring {tendency_units} units after .mean() method removed them.") - tendencies_avg *= tendency_units - for da in tendencies_avg: - tendencies_avg[da] = tendencies_avg[da].metpy.convert_units("K/hour") - long_names = [fv3ds[da].attrs["long_name"] for da in tendencies_avg] # Make list of long_names before .to_array() loses them. - - # Remove characters up to and including 1st underscore (e.g. du3dt_) in DataArray name. - # for example dt3dt_pbl -> pbl - name_dict = {da : "_".join(da.split("_")[1:]) for da in tendencies_avg.data_vars} + assert time0 in fv3ds.time, (f"time0 {time0} not in history file. Closest is " + f"{fv3ds.time.sel(time=time0, method='nearest').time.data}") + + # list of tendency variable names for requested state variable + tendency_vars = fv3["tendency_varnames"][variable] + tendencies = fv3ds[tendency_vars] # subset of original Dataset + # convert DataArrays to Quantities to protect units. DataArray.mean drops units attribute. + tendencies = tendencies.metpy.quantify() + + # Define time slice starting with time-after-time0 and ending with validtime. + # We use the time *after* time0 because the time range corresponding to the tendency + # output is the period immediately prior to the tendency timestamp. + # That way, slice(time_after_time0, validtime) has a time range of [time0,validtime]. + idx_first_time_after_time0 = (fv3ds.time > time0).argmax() + time_after_time0 = fv3ds.time[idx_first_time_after_time0] + tindex = {"time": slice(time_after_time0, validtime)} + logging.debug( + "Time-weighted mean tendencies for time index slice %s", tindex) + timeweights = fv3ds.time.diff("time").sel(tindex) + time_weighted_tendencies = tendencies.sel(tindex) * timeweights + tendencies_avg = time_weighted_tendencies.sum( + dim="time") / timeweights.sum(dim="time") + + # Make list of long_names before .to_array() loses them. + long_names = [fv3ds[da].attrs["long_name"] for da in tendencies_avg] + + # Keep characters after final underscore. The first part is redundant. + # for example dtend_u_pbl -> pbl + name_dict = {da: "_".join(da.split("_")[-1:]) + for da in tendencies_avg.data_vars} + logging.debug("rename %s", name_dict) tendencies_avg = tendencies_avg.rename(name_dict) # Stack variables along new tendency dimension of new DataArray. tendency_dim = f"{variable} tendency" tendencies_avg = tendencies_avg.to_array(dim=tendency_dim) - # Assign long_names to a new DataArray coordinate. It will have the same shape as tendency dimension. - tendencies_avg = tendencies_avg.assign_coords({"long_name":(tendency_dim,long_names)}) - - logging.info(f"calculate actual change in {variable}") - state_variable = fv3ds[variable].metpy.quantify() # Tried metpy.quantify() with open_dataset, but pint.errors.UndefinedUnitError: 'dBz' is not defined in the unit registry - dstate_variable = state_variable.sel(time = validtime) - state_variable.sel(time = time0) - dstate_variable = dstate_variable.assign_coords(time=validtime) - dstate_variable.attrs["long_name"] = f"actual change in {state_variable.attrs['long_name']}" - - # Add all tendencies together and subtract actual rate of change in state variable. - # This is residual. - total = tendencies_avg.sum(dim=tendency_dim) - twindow_quantity = twindow.total_seconds() * units.seconds - resid = total - dstate_variable/twindow_quantity - - - logging.info("Define DataArray to plot (da2plot).") - # Plot all tendencies. - da2plot = tendencies_avg - # Add total and resid DataArrays to tendency_dim. - total = total.expand_dims({tendency_dim:["total"]}).assign_coords(long_name="sum of tendencies") - resid = resid.expand_dims({tendency_dim:["resid"]}).assign_coords(long_name=f"sum of tendencies - actual rate of change of {variable} (residual)") - da2plot = xarray.concat([da2plot, total, resid], dim=tendency_dim) + # Assign long_names to a new DataArray coordinate. + # It will have the same shape as tendency dimension. + tendencies_avg = tendencies_avg.assign_coords( + {"long_name": (tendency_dim, long_names)}) + + logging.info("calculate actual change in %s", variable) + # Tried metpy.quantify() with open_dataset, but + # pint.errors.UndefinedUnitError: 'dBz' is not defined in the unit registry + state_variable = fv3ds[variable].metpy.quantify() + actual_change = state_variable.sel(time=validtime) - state_variable.sel( + time=time0, method="nearest", tolerance=datetime.timedelta(milliseconds=1)) + actual_change = actual_change.assign_coords(time=validtime) + actual_change.attrs["long_name"] = f"actual change in {state_variable.attrs['long_name']}" + + # Sum all tendencies (physics and non-physics) + all_tendencies = tendencies_avg.sum(dim=tendency_dim) + + # Subtract physics tendency variable if it was in tendency_vars. Don't want to double-count. + phys_var = [x for x in tendency_vars if x.endswith("_phys")] + if phys_var: + logging.info("subtracting 'phys' tendency variable " + "from all_tendencies to avoid double-counting") + # use .data to avoid re-introducing tendency coordinate + all_tendencies = all_tendencies - \ + tendencies_avg.sel({tendency_dim: "phys"}).data + + # Calculate actual tendency of state variable. + actual_tendency = actual_change / twindow_quantity + logging.info( + "subtract actual tendency from all_tendencies to get residual") + resid = all_tendencies - actual_tendency + + # Concatenate all_tendencies, actual_tendency, and resid DataArrays. + # Give them a name and long_name along tendency_dim. + all_tendencies = all_tendencies.expand_dims({tendency_dim: ["all"]}).assign_coords( + long_name="sum of tendencies") + actual_tendency = actual_tendency.expand_dims({tendency_dim: ["actual"]}).assign_coords( + long_name=f"actual rate of change of {variable}") + resid = resid.expand_dims({tendency_dim: ["resid"]}).assign_coords( + long_name=f"sum of tendencies - actual rate of change of {variable} (residual)") + da2plot = xarray.concat( + [tendencies_avg, all_tendencies, actual_tendency, resid], dim=tendency_dim) col = tendency_dim if da2plot.metpy.vertical.attrs["units"] == "mb": - da2plot.metpy.vertical.attrs["units"] = "hPa" # For MetPy. Otherwise, mb is interpreted as millibarn. - + # For MetPy. Otherwise, mb is interpreted as millibarn. + da2plot.metpy.vertical.attrs["units"] = "hPa" # Make default dimensions of facetgrid kind of square. if not ncols: # Default # of cols is square root of # of panels ncols = int(np.ceil(np.sqrt(len(da2plot)))) - nrows = int(np.ceil(len(da2plot)/ncols)) - - # dequantify moves units from DataArray to Attributes. Now they show up in colorbar. - da2plot = da2plot.metpy.dequantify() - - if da2plot["pfull"].size == 1: - # Avoid ValueError: ('grid_yt', 'grid_xt') must be a permuted list of ('pfull', 'grid_yt', 'grid_xt'), unless `...` is included - # Error occurs in pcolormesh(). - da2plot=da2plot.squeeze() - # Kludgy steps to prepare metadata for metpy cross section - da2plot = da2plot.drop_vars(['grid_yt','grid_xt','long_name']).rename(dict(grid_yt="y",grid_xt="x")) # these confuse metpy + # grid_yt and grid_xt confuse metpy + da2plot = da2plot.drop_vars(['grid_yt', 'grid_xt', 'long_name']).rename( + {"grid_yt":"y", "grid_xt":"x"}) # fv3 uses Extended Schmidt Gnomomic grid for regional applications. This is not in cartopy. # Found similar Lambert Conformal projection by trial and error. - da2plot = da2plot.metpy.assign_crs( grid_mapping_name="lambert_conformal_conic", standard_parallel=fv3["standard_parallel"], longitude_of_central_meridian=-97.5, latitude_of_projection_origin=fv3["standard_parallel"]).metpy.assign_y_x(force=True, tolerance=44069*units.m) - # Define cross section. Use different variable than da2plot because da2plot is used later for inset. - # upgraded xarray to 0.21.1 to avoid FutureWarning: Passing method to Float64Index.get_loc is deprecated + da2plot = da2plot.metpy.assign_crs( + grid_mapping_name="lambert_conformal_conic", + standard_parallel=fv3["standard_parallel"], + longitude_of_central_meridian=-97.5, + latitude_of_projection_origin=fv3["standard_parallel"]).metpy.assign_y_x( + force=True, tolerance=44069*units.m) + logging.info("Define cross section.") cross = cross_section(da2plot, startpt, endpt) + # dequantify moves units from DataArray to attributes. Now they show up in colorbar. + da2plot = da2plot.metpy.dequantify() logging.info("plot pcolormesh") - w,h = 0.18, 0.18 # normalized width and height of inset. Shrink colorbar to provide space. - pc = cross.plot.pcolormesh(x="index", y="pfull", yincrease=False, col=col, col_wrap=ncols, robust=True, infer_intervals=True, - cmap=fv3["cmap"], cbar_kwargs={'shrink':1-h, 'anchor':(0,0.25-h)}) # robust (bool, optional) – If True and vmin or vmax are absent, the colormap range is computed with 2nd and 98th percentiles instead of the extreme values - for ax in pc.axes.flat: + # normalized width and height of inset. Shrink colorbar to provide space. + wid_inset, hgt_inset = 0.18, 0.18 + pcm = cross.squeeze().plot.pcolormesh(x="lont", y="pfull", yincrease=False, + col=col, col_wrap=ncols, robust=robust, infer_intervals=True, + vmin=vmin, vmax=vmax, cmap=fv3["cmap"], + cbar_kwargs={'shrink': 1-hgt_inset, 'anchor': (0, 0.25-hgt_inset)}) + + for ax in pcm.axes.flat: ax.grid(visible=True, color="grey", alpha=0.5, lw=0.5) - ax.xaxis.set_major_locator(MultipleLocator(dindex)) ax.yaxis.set_major_locator(MultipleLocator(100)) ax.yaxis.set_minor_locator(MultipleLocator(25)) ax.grid(which="minor", alpha=0.3, lw=0.4) @@ -210,36 +265,39 @@ def main(): # Add time to title title = f'{time0}-{validtime} ({twindow_quantity.to("hours"):~} time window)' plt.suptitle(title, wrap=True) - # pad top and bottom for title and fineprint. Unfortunately, you must redefine right pad, as xarray no longer controls it. - plt.subplots_adjust(top=0.9,right=0.8,bottom=0.1) + # pad top and bottom for title and fineprint. + # Unfortunately, you must redefine right pad, as xarray no longer controls it. + plt.subplots_adjust(top=0.9, right=0.8, bottom=0.1) # Locate cross section on conus map background. Put in inset. data_crs = da2plot.metpy.cartopy_crs - ax_inset = plt.gcf().add_axes([.999-w,.999-h, w, h], projection=data_crs) + ax_inset = plt.gcf().add_axes( + [.995-wid_inset, .999-hgt_inset, wid_inset, hgt_inset], projection=data_crs) # Plot the endpoints of the cross section (make sure they match path) - endpoints = data_crs.transform_points(cartopy.crs.Geodetic(), *np.vstack([startpt, endpt]).transpose()[::-1]) - bb = ax_inset.scatter(endpoints[:, 0], endpoints[:, 1], s=1.3, c='k', zorder=2) - ax_inset.scatter(cross['x'][dindex::dindex], cross['y'][dindex::dindex], s=3.4, c='white', linewidths=0.2, edgecolors='k', zorder=bb.get_zorder()+1) + endpoints = data_crs.transform_points( + cartopy.crs.Geodetic(), *np.vstack([startpt, endpt]).transpose()[::-1]) + bb = ax_inset.scatter(endpoints[:, 0], endpoints[:, 1], c='k', zorder=2) + ax_inset.scatter(cross['x'], cross['y'], + s=3.4, c='white', linewidths=0.2, edgecolors='k', zorder=bb.get_zorder()+1) # Plot the path of the cross section ax_inset.plot(cross['x'], cross['y'], c='k', zorder=2) physics_tend.add_conus_features(ax_inset) extent = fv3["extent"] ax_inset.set_extent(extent) - # Annotate figure with details about figure creation. - fineprint = f"history: {os.path.realpath(ifile.name)}" - if subtract: - fineprint += f"\nsubtract: {os.path.realpath(subtract.name)}" - fineprint += f"\ngrid_spec: {os.path.realpath(gfile.name)}" - fineprint += f"\ncreated {datetime.datetime.now(tz=None)}" + # Annotate figure with args namespace and timestamp + fineprint = f"{args} " + fineprint += f", created {datetime.datetime.now(tz=None)}" if nofineprint: - logging.info(fineprint) + logging.debug(fineprint) else: - fineprint_obj = plt.annotate(text=fineprint, xy=(1,1), xycoords='figure pixels', fontsize=5) - + logging.debug("add fineprint to image") + plt.figtext(0, 0, fineprint, fontsize='xx-small', + va="bottom", wrap=True) plt.savefig(ofile, dpi=fv3["dpi"]) - logging.info(f'created {os.path.realpath(ofile)}') + logging.info('created %s', os.path.realpath(ofile)) + if __name__ == "__main__": main() diff --git a/metplotpy/contributed/fv3_physics_tend/physics_tend.py b/metplotpy/contributed/fv3_physics_tend/physics_tend.py index e21444e0..dad0dab0 100644 --- a/metplotpy/contributed/fv3_physics_tend/physics_tend.py +++ b/metplotpy/contributed/fv3_physics_tend/physics_tend.py @@ -1,65 +1,25 @@ +"""Common functions for fv3_physics_tend""" +import logging +import os import cartopy.io.shapereader as shpreader import cartopy.feature as cfeature -import datetime -import logging import matplotlib.path import numpy as np -import os -import pandas as pd -from shapely.geometry import Point, multipolygon -#from tqdm import tqdm # progress bar +from shapely.geometry import multipolygon +# from tqdm import tqdm # progress bar import xarray -import yaml def add_conus_features(ax): - cl = ax.add_feature(cfeature.COASTLINE.with_scale('50m'), linewidth=0.3) - bd = ax.add_feature(cfeature.BORDERS.with_scale('50m'), linewidth=0.3) - st = ax.add_feature(cfeature.STATES.with_scale('50m'), linewidth=0.1) - lk = ax.add_feature(cfeature.LAKES.with_scale('50m'), edgecolor='k', linewidth=0.25, facecolor='k', alpha=0.1) - return (cl, bd, st, lk) - - -def add_time0(ds, variable, fv3, interval=np.timedelta64(1,'h')): - tendency_varnames = fv3["tendency_varnames"] - time0_varname = fv3["time0_varname"] - # This takes a while so only keep requested state variable and - # its tendency_varnames. - tokeep = tendency_varnames[variable].copy() - tokeep.append(time0_varname[variable]) - tokeep.append(variable) - ds = ds[tokeep] - - # Return new Dataset with time0 (initialization time) - # Assume initialization time is an interval before first time - # in ds. Interval is 1 hour by default, but can be changed. - time0 = ds.time.values[0] - interval - times = np.insert(ds.time.values,0,time0) # new time array - # Allocate new Dataset with additional time at time0 - coords = dict(ds.coords) # extract ds.coords as dictionary so it can be changed. - coords.update(dict(time=times)) - data_vars = {} - dims = ds[variable].dims - # state variable at time0 and other times - vari = ds[time0_varname[variable]] - data = np.insert(ds[variable].data, 0, vari.data, axis=0) - data_vars[variable] = (dims, data) - # tendency=0 at time0 - logging.info(f"Adding time0 to {variable}. This could take a while...") - #for tendency in tqdm(tendency_varnames[variable]): - for tendency in tendency_varnames[variable]: - logging.debug(tendency) - dims = ds[tendency].dims - data = ds[tendency].data - data = np.insert(data, 0, np.zeros_like(data[0]), axis=0) - data_vars[tendency] = (dims, data) - ds0 = xarray.Dataset(data_vars=data_vars, coords=coords) - ds0.attrs = ds.attrs - for da in ds0: - ds0[da].attrs = ds[da].attrs - return ds0 + """ add borders """ + ax.add_feature(cfeature.COASTLINE.with_scale('50m'), linewidth=0.3) + ax.add_feature(cfeature.BORDERS.with_scale('50m'), linewidth=0.3) + ax.add_feature(cfeature.STATES.with_scale('50m'), linewidth=0.1) + ax.add_feature(cfeature.LAKES.with_scale( + '50m'), edgecolor='k', linewidth=0.25, facecolor='k', alpha=0.1) + return ax -def pts_in_shp(lats, lons, shp, debug=False): +def pts_in_shp(lats, lons, shp): # Map longitude to -180 to +180 range lons = np.where(lons > 180, lons-360, lons) # If shp is a directory, point to .shp file of same name in it. @@ -67,7 +27,8 @@ def pts_in_shp(lats, lons, shp, debug=False): if os.path.isdir(shp): shp = shp + "/" + os.path.basename(shp) + ".shp" shape = shpreader.Reader(shp) - ll_array = np.hstack((lons.flatten()[:,np.newaxis],lats.flatten()[:,np.newaxis])) + ll_array = np.hstack( + (lons.flatten()[:, np.newaxis], lats.flatten()[:, np.newaxis])) mask = np.full(lats.flatten().shape, False) # How to make shapefile for EAST_CONUS (CONUS east of 105W) # import shapefile @@ -79,20 +40,21 @@ def pts_in_shp(lats, lons, shp, debug=False): # shape.to_file("EAST_CONUS") # It is as simple as that. - # This seems kind of hacky. Can you recurse through a mixture of Polygons and Multipolygons more elegantly? + # This seems kind of hacky. + # Can you recurse through a mixture of Polygons and Multipolygons more elegantly? # Tried geopandas read_shape . geometry but it was no more elegant. for g in shape.geometries(): logging.debug(f"{__name__} pts_in_shp area {g.area}") # How to deal with 3-D polygons (i.e. POLYGON Z)? some shape files are 3D. - if g.has_z: - logging.error(f"Uh oh. shape geometry has z-coordinate in {shp}") - logging.error("I don't know how to process 3-D polygons (i.e. POLYGON Z).") - sys.exit(1) + assert not g.has_z, (f"Uh oh. shape geometry has z-coordinate in {shp}" + "I don't know how to process 3-D polygons (i.e. POLYGON Z).") if isinstance(g, multipolygon.MultiPolygon): for mp in g.geoms: - mask = mask | matplotlib.path.Path(mp.exterior.coords).contains_points(ll_array) + mask = mask | matplotlib.path.Path( + mp.exterior.coords).contains_points(ll_array) else: - mask = mask | matplotlib.path.Path(g.exterior.coords).contains_points(ll_array) - logging.debug(f"pts_in_shp: {mask.sum()} points") + mask = mask | matplotlib.path.Path( + g.exterior.coords).contains_points(ll_array) + logging.debug("pts_in_shp: %s points", mask.sum()) shape.close() return np.reshape(mask, lats.shape) diff --git a/metplotpy/contributed/fv3_physics_tend/planview_fv3.py b/metplotpy/contributed/fv3_physics_tend/planview_fv3.py index 22f31628..acde5971 100644 --- a/metplotpy/contributed/fv3_physics_tend/planview_fv3.py +++ b/metplotpy/contributed/fv3_physics_tend/planview_fv3.py @@ -1,70 +1,96 @@ +""" Plan view of tendencies """ import argparse -import cartopy import datetime import logging +import os +import cartopy import matplotlib.pyplot as plt from metpy.units import units import numpy as np -import os import pandas as pd -import pdb -from . import physics_tend -import sys import xarray import yaml - -""" -Plan view of tendencies of t, q, u, or v from physics parameterizations, dynamics (non-physics), their total, and residual. -Total change is the actual change in state variable from first time to last time. Total change differs from cumulative change -attributed to physics and non-physics tendencies when residual is not zero. -""" +import physics_tend def parse_args(): + """ + parse command line arguments + """ # =============Arguments=================== - parser = argparse.ArgumentParser(description = "Plan view of FV3 diagnostic tendency", formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = argparse.ArgumentParser( + description="Plan view of FV3 diagnostic tendencies", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) # ==========Mandatory Arguments=================== - parser.add_argument("config", type=argparse.FileType('r'), help="yaml configuration file") - parser.add_argument("historyfile", type=argparse.FileType("r"), help="FV3 history file") - parser.add_argument("gridfile", type=argparse.FileType("r"), help="FV3 grid spec file") - parser.add_argument("statevariable", type=str, help="moisture, temperature, or wind component variable name") - parser.add_argument("fill", type=str, help='type of tendency. ignored if pfull is a single level') + parser.add_argument("config", help="yaml configuration file") + parser.add_argument("historyfile", help="FV3 history file") + parser.add_argument("gridfile", help="FV3 grid spec file") + parser.add_argument( + "statevariable", help="moisture, temperature, or wind component variable name") + parser.add_argument( + "tendencytype", help='type of tendency. ignored if pfull is a single level') # ==========Optional Arguments=================== parser.add_argument("-d", "--debug", action='store_true') - parser.add_argument("--method", type=str, choices=["nearest", "linear","loglinear"], default="nearest", help="vertical interpolation method") - parser.add_argument("--ncols", type=int, default=None, help="number of columns") - parser.add_argument("--nofineprint", action='store_true', help="Don't add metadata and created by date (for comparing images)") - parser.add_argument("-o", "--ofile", type=str, help="name of output image file") - parser.add_argument("-p", "--pfull", nargs='+', type=float, default=[1000,925,850,700,500,300,200,100,0], help="pressure level(s) in hPa to plot. If only one pressure level is provided, the type-of-tendency argument will be ignored and all tendencies will be plotted.") - parser.add_argument("-s", "--shp", type=str, default=None, help="shape file directory for mask") - parser.add_argument("--subtract", type=argparse.FileType("r"), help="FV3 history file to subtract") - parser.add_argument("-t", "--twindow", type=int, default=3, help="time window in hours") - parser.add_argument("-v", "--validtime", type=lambda x:pd.to_datetime(x), help="valid time") + parser.add_argument("--method", choices=["nearest", "linear", "loglinear"], default="nearest", + help="vertical interpolation method") + parser.add_argument("--ncols", type=int, default=None, + help="number of columns") + parser.add_argument("--nofineprint", action='store_true', + help="Don't add metadata and created by date (for comparing images)") + parser.add_argument("--norobust", action='store_true', + help="compute colormap range with extremes, not 2nd and 98th percentiles") + parser.add_argument("-o", "--ofile", help="name of output image file") + parser.add_argument("-p", "--pfull", nargs='+', type=float, + default=[1000, 925, 850, 700, 500, 300, 200, 100, 0], + help=("pressure level(s) in hPa to plot. " + "If only one pressure level is provided, the type-of-tendency " + "argument will be ignored and all tendencies will be plotted.") + ) + parser.add_argument("-s", "--shp", help="shape file directory for mask") + parser.add_argument("--subtract", help="FV3 history file to subtract") + parser.add_argument("-t", "--twindow", type=float, + default=3, help="time window in hours") + parser.add_argument("-v", "--validtime", help="valid time") + parser.add_argument("--vmin", type=float, + help="color bar minimum (overrides robust=True)") + parser.add_argument("--vmax", type=float, + help="color bar maximum (overrides robust=True)") args = parser.parse_args() return args def main(): + """ + Plan view of tendencies of t, q, u, or v from physics parameterizations, + dynamics (non-physics), the combination of all tendencies (physics and non-physics), + the actual tendency, and the residual. Residual is the sum of all tendencies minus the + actual tendency. + """ args = parse_args() - gfile = args.gridfile - ifile = args.historyfile - variable = args.statevariable - fill = args.fill - config = args.config - debug = args.debug - method = args.method - ncols = args.ncols - nofineprint= args.nofineprint - ofile = args.ofile - pfull = args.pfull * units.hPa - shp = args.shp - subtract = args.subtract - twindow = datetime.timedelta(hours = args.twindow) - validtime = args.validtime + gfile = args.gridfile + ifile = args.historyfile + variable = args.statevariable + tendencytype = args.tendencytype + config = args.config + method = args.method + ncols = args.ncols + nofineprint = args.nofineprint + ofile = args.ofile + pfull = args.pfull * units.hPa + robust = not args.norobust + shp = args.shp + subtract = args.subtract + twindow = datetime.timedelta(hours=args.twindow) + twindow_quantity = twindow.total_seconds() * units.seconds + validtime = pd.to_datetime(args.validtime) + vmin = args.vmin + vmax = args.vmax level = logging.INFO - if debug: level = logging.DEBUG - logging.basicConfig(format='%(asctime)s - %(message)s', level=level) # prepend log message with time + if args.debug: + level = logging.DEBUG + # prepend log message with time + logging.basicConfig(format='%(asctime)s - %(message)s', level=level) logging.debug(args) # Output filename. @@ -74,136 +100,170 @@ def main(): ofile = os.path.realpath(args.ofile) odir = os.path.dirname(ofile) if not os.path.exists(odir): - logging.info(f"output directory {odir} does not exist. Creating it") + logging.info( + f"output directory {odir} does not exist. Creating it") os.mkdir(odir) - logging.info(f"output filename={ofile}") - + logging.debug("output filename=%s", ofile) # Reload fv3 in case user specifies a custom --config file - fv3 = yaml.load(open(config.name), Loader=yaml.FullLoader) + fv3 = yaml.load(open(config, encoding="utf8"), Loader=yaml.FullLoader) # Read lat/lon/area from gfile logging.debug(f"read lat/lon/area from {gfile}") - gds = xarray.open_dataset(gfile.name) + gds = xarray.open_mfdataset(gfile) lont = gds[fv3["lon_name"]] latt = gds[fv3["lat_name"]] area = gds["area"] # Open input file - logging.debug(f"About to open {ifile}") - fv3ds = xarray.open_dataset(ifile.name) + logging.debug("open %s", ifile) + fv3ds = xarray.open_dataset(ifile) + + if subtract: + logging.info("subtracting %s", subtract) + with xarray.set_options(keep_attrs=True): + fv3ds -= xarray.open_mfdataset(subtract) + datetimeindex = fv3ds.indexes['time'] if hasattr(datetimeindex, "to_datetimeindex"): - # Convert from CFTime to pandas datetime. I get a warning CFTimeIndex from non-standard calendar 'julian'. Maybe history file should be saved with standard calendar. + # Convert from CFTime to pandas datetime or get warning + # CFTimeIndex from non-standard calendar 'julian'. + # Maybe history file should be saved with standard calendar. # To turn off warning, set unsafe=True. datetimeindex = datetimeindex.to_datetimeindex(unsafe=True) + ragged_times = datetimeindex != datetimeindex.round('1ms') + if any(ragged_times): + logging.info( + f"round times to nearest millisec. before: {datetimeindex[ragged_times].values}") + datetimeindex = datetimeindex.round('1ms') + logging.info(f"after: {datetimeindex[ragged_times].values}") fv3ds['time'] = datetimeindex - if subtract: - logging.debug(f"subtracting {subtract.name}") - with xarray.set_options(keep_attrs=True): - fv3ds -= xarray.open_dataset(subtract.name) - fv3ds = fv3ds.assign_coords(lont=lont, latt=latt) # lont and latt used by pcolorfill() - tendency_vars = fv3["tendency_varnames"][variable] # list of tendency variable names for requested state variable - fv3ds = physics_tend.add_time0(fv3ds, variable, fv3) - tendencies = fv3ds[tendency_vars] # subset of original Dataset + # lont and latt used by pcolorfill() + fv3ds = fv3ds.assign_coords(lont=lont, latt=latt) if validtime is None: - logging.debug("validtime not provided on command line, so use latest time in history file.") validtime = fv3ds.time.values[-1] validtime = pd.to_datetime(validtime) + logging.info( + "validtime not provided on command line. Using last time in history file %s.", + validtime) time0 = validtime - twindow - time1 = time0 + datetime.timedelta(hours=1) - logging.info(f"Sum tendencies {time1}-{validtime}") - tindex = dict(time=slice(time1, validtime)) # slice of time from hour after time0 through validtime - tendencies_avg = tendencies.sel(tindex).mean(dim="time") # average tendencies in time - - # Dynamics (nophys) tendency is not reset every hour. Just calculate change from time0 to validtime. - nophys_var = [x for x in tendency_vars if x.endswith("_nophys")] - assert len(nophys_var) == 1 - nophys_var = nophys_var[0] # we don't want a 1-element list; we want a string. So that tendencies[nophys_var] is a DataArray, not a Dataset. - logging.info(f"Subtract nophys tendency at {time0} from {validtime}") - nophys_delta = tendencies[nophys_var].sel(time=validtime) - tendencies[nophys_var].sel(time=time0) - tendencies_avg[nophys_var] = nophys_delta / args.twindow - - - # Restore units after .mean() removed them. Copy units from 1st tendency variable. - tendency_units = units.parse_expression(fv3ds[tendency_vars[0]].units) - logging.debug(f"restoring {tendency_units} units after .mean() method removed them.") - tendencies_avg *= tendency_units - for da in tendencies_avg: - tendencies_avg[da] = tendencies_avg[da].metpy.convert_units("K/hour") - long_names = [fv3ds[da].attrs["long_name"] for da in tendencies_avg] # Make list of long_names before .to_array() loses them. - - # Remove characters up to and including 1st underscore (e.g. du3dt_) in DataArray name. - # for example dt3dt_pbl -> pbl - name_dict = {da : "_".join(da.split("_")[1:]) for da in tendencies_avg.data_vars} + assert time0 in fv3ds.time, (f"time0 {time0} not in history file. Closest is " + f"{fv3ds.time.sel(time=time0, method='nearest').time.data}") + + # list of tendency variable names for requested state variable + tendency_vars = fv3["tendency_varnames"][variable] + tendencies = fv3ds[tendency_vars] # subset of original Dataset + # convert DataArrays to Quantities to protect units. DataArray.mean drops units attribute. + tendencies = tendencies.metpy.quantify() + + # Define time slice starting with time-after-time0 and ending with validtime. + # We use the time *after* time0 because the time range corresponding to the tendency + # output is the period immediately prior to the tendency timestamp. + # That way, slice(time_after_time0, validtime) has a time range of [time0,validtime]. + idx_first_time_after_time0 = (fv3ds.time > time0).argmax() + time_after_time0 = fv3ds.time[idx_first_time_after_time0] + tindex = {"time": slice(time_after_time0, validtime)} + logging.debug( + "Time-weighted mean tendencies for time index slice %s", tindex) + timeweights = fv3ds.time.diff("time").sel(tindex) + time_weighted_tendencies = tendencies.sel(tindex) * timeweights + tendencies_avg = time_weighted_tendencies.sum( + dim="time") / timeweights.sum(dim="time") + + # Make list of long_names before .to_array() loses them. + long_names = [fv3ds[da].attrs["long_name"] for da in tendencies_avg] + + # Keep characters after final underscore. The first part is redundant. + # for example dtend_u_pbl -> pbl + name_dict = {da: "_".join(da.split("_")[-1:]) + for da in tendencies_avg.data_vars} + logging.debug("rename %s", name_dict) tendencies_avg = tendencies_avg.rename(name_dict) # Stack variables along new tendency dimension of new DataArray. tendency_dim = f"{variable} tendency" tendencies_avg = tendencies_avg.to_array(dim=tendency_dim) - # Assign long_names to a new DataArray coordinate. It will have the same shape as tendency dimension. - tendencies_avg = tendencies_avg.assign_coords({"long_name":(tendency_dim,long_names)}) - - logging.info(f"calculate actual change in {variable}") - state_variable = fv3ds[variable].metpy.quantify() # Tried metpy.quantify() with open_dataset, but pint.errors.UndefinedUnitError: 'dBz' is not defined in the unit registry - dstate_variable = state_variable.sel(time = validtime) - state_variable.sel(time = time0) - dstate_variable = dstate_variable.assign_coords(time=validtime) - dstate_variable.attrs["long_name"] = f"actual change in {state_variable.attrs['long_name']}" - - # Add all tendencies together and subtract actual rate of change in state variable. - # This is residual. - total = tendencies_avg.sum(dim=tendency_dim) - twindow_quantity = twindow.total_seconds() * units.seconds - resid = total - dstate_variable/twindow_quantity - - - logging.info("Define DataArray to plot (da2plot).") - if len(pfull) == 1: - # If only 1 pressure level was requested, plot all tendencies. - da2plot = tendencies_avg - # Add total and resid DataArrays to tendency_dim. - total = total.expand_dims({tendency_dim:["total"]}).assign_coords(long_name="sum of tendencies") - resid = resid.expand_dims({tendency_dim:["resid"]}).assign_coords(long_name=f"sum of tendencies - actual rate of change of {variable} (residual)") - da2plot = xarray.concat([da2plot, total, resid], dim=tendency_dim) - col = tendency_dim - else: - # otherwise pick a DataArray (resid, state_variable, dstate_variable, tendency) + # Assign long_names to a new DataArray coordinate. + # It will have the same shape as tendency dimension. + tendencies_avg = tendencies_avg.assign_coords( + {"long_name": (tendency_dim, long_names)}) + + logging.info("calculate actual change in %s", variable) + # Tried metpy.quantify() with open_dataset, but + # pint.errors.UndefinedUnitError: 'dBz' is not defined in the unit registry + state_variable = fv3ds[variable].metpy.quantify() + actual_change = state_variable.sel(time=validtime) - state_variable.sel( + time=time0, method="nearest", tolerance=datetime.timedelta(milliseconds=1)) + actual_change = actual_change.assign_coords(time=validtime) + actual_change.attrs["long_name"] = f"actual change in {state_variable.attrs['long_name']}" + + # Sum all tendencies (physics and non-physics) + all_tendencies = tendencies_avg.sum(dim=tendency_dim) + + # Subtract physics tendency variable if it was in tendency_vars. Don't want to double-count. + phys_var = [x for x in tendency_vars if x.endswith("_phys")] + if phys_var: + logging.info("subtracting 'phys' tendency variable " + "from all_tendencies to avoid double-counting") + # use .data to avoid re-introducing tendency coordinate + all_tendencies = all_tendencies - \ + tendencies_avg.sel({tendency_dim: "phys"}).data + + # Calculate actual tendency of state variable. + actual_tendency = actual_change / twindow_quantity + logging.info( + "subtract actual tendency from all_tendencies to get residual") + resid = all_tendencies - actual_tendency + + # Concatenate all_tendencies, actual_tendency, and resid DataArrays. + # Give them a name and long_name along tendency_dim. + all_tendencies = all_tendencies.expand_dims({tendency_dim: ["all"]}).assign_coords( + long_name="sum of tendencies") + actual_tendency = actual_tendency.expand_dims({tendency_dim: ["actual"]}).assign_coords( + long_name=f"actual rate of change of {variable}") + resid = resid.expand_dims({tendency_dim: ["resid"]}).assign_coords( + long_name=f"sum of tendencies - actual rate of change of {variable} (residual)") + da2plot = xarray.concat( + [tendencies_avg, all_tendencies, actual_tendency, resid], dim=tendency_dim) + col = tendency_dim + + if len(pfull) > 1: + # If more than one pressure level was specified col = "pfull" - if fill == 'resid': # residual - da2plot = resid - elif fill == "": # plain-old state variable - da2plot = state_variable.sel(time=validtime) - elif fill == "d"+variable: # actual change in state variable - da2plot = dstate_variable - else: # expected change in state variable from tendencies - da2plot = tendencies_avg.sel({tendency_dim:fill}) + # just select one type of tendency + da2plot = da2plot.sel({tendency_dim: tendencytype}) if da2plot.metpy.vertical.attrs["units"] == "mb": - da2plot.metpy.vertical.attrs["units"] = "hPa" # For MetPy. Otherwise, mb is interpreted as millibarn. + # For MetPy. Otherwise, mb is interpreted as millibarn. + da2plot.metpy.vertical.attrs["units"] = "hPa" - # dequantify moves units from DataArray to Attributes. Now they show up in colorbar. And they aren't lost in xarray.DataArray.interp. + # dequantify moves units from DataArray to attributes. Now they show up in colorbar. + # And they aren't lost in xarray.DataArray.interp. da2plot = da2plot.metpy.dequantify() - # Select vertical levels. + logging.info(f"Select vertical levels with '{method}' method") if method == "nearest": - da2plot = da2plot.metpy.sel(vertical=pfull, method=method, tolerance=10.*units.hPa) + da2plot = da2plot.metpy.sel( + vertical=pfull, method=method, tolerance=10.*units.hPa) elif method == "linear": - da2plot = da2plot.interp(coords={"pfull":pfull}, method=method) - elif method == "loglinear": # interpolate in log10(pressure) - da2plot["pfull"] = np.log10(da2plot.pfull) - da2plot = da2plot.interp(coords={"pfull":np.log10(pfull.m)}, method="linear") + da2plot = da2plot.interp(coords={"pfull": pfull}, method=method) + elif method == "loglinear": # interpolate in log10(pressure) + da2plot["pfull"] = np.log10(da2plot.pfull) + da2plot = da2plot.interp( + coords={"pfull": np.log10(pfull.m)}, method="linear") da2plot["pfull"] = 10**da2plot.pfull - # Mask points outside shape. - if shp: - # mask points outside shape - mask = physics_tend.pts_in_shp(latt.values, lont.values, shp, debug=debug) # Use .values to avoid AttributeError: 'DataArray' object has no attribute 'flatten' - mask = xarray.DataArray(mask, coords=[da2plot.grid_yt, da2plot.grid_xt]) + if shp is not None: + # Use .values to avoid AttributeError: 'DataArray' object has no attribute 'flatten' + mask = physics_tend.pts_in_shp( + latt.values, lont.values, shp) + mask = xarray.DataArray( + mask, coords=[da2plot.grid_yt, da2plot.grid_xt]) da2plot = da2plot.where(mask, drop=True) - area = area.where(mask).fillna(0) + area = area.where(mask).fillna(0) totalarea = area.metpy.convert_units("km**2").sum() @@ -212,61 +272,62 @@ def main(): # Default # of cols is square root of # of panels ncols = int(np.ceil(np.sqrt(len(da2plot)))) - nrows = int(np.ceil(len(da2plot)/ncols)) - - if da2plot["pfull"].size == 1: - # Avoid ValueError: ('grid_yt', 'grid_xt') must be a permuted list of ('pfull', 'grid_yt', 'grid_xt'), unless `...` is included - # Error occurs in pcolormesh(). - da2plot=da2plot.squeeze() - - - # central lon/lat from https://github.com/NOAA-EMC/regional_workflow/blob/release/public-v1/ush/Python/plot_allvars.py - subplot_kws = dict(projection=cartopy.crs.LambertConformal(central_longitude=-97.6, central_latitude=35.4)) - - - logging.info("plot pcolormesh") - pc = da2plot.plot.pcolormesh(x="lont", y="latt", col=col, col_wrap=ncols, robust=True, infer_intervals=True, - transform=cartopy.crs.PlateCarree(), - cmap=fv3["cmap"], cbar_kwargs={'shrink':0.8}, subplot_kws=subplot_kws) # robust (bool, optional) – If True and vmin or vmax are absent, the colormap range is computed with 2nd and 98th percentiles instead of the extreme values - for ax in pc.axes.flat: - ax.set_extent(fv3["extent"]) # Why needed only when col=tendency_dim? With col="pfull" it shrinks to unmasked size. + # Avoid ValueError in pcolormesh(). + da2plot = da2plot.squeeze() + + # central lon/lat from https://github.com/NOAA-EMC/regional_workflow/blob/ + # release/public-v1/ush/Python/plot_allvars.py + subplot_kws = { + "projection": cartopy.crs.LambertConformal( + central_longitude=-97.6, central_latitude=35.4, standard_parallels=None)} + + logging.debug("plot pcolormesh") + if robust: + logging.warning("compute colormap range with 2nd and 98th percentiles") + pcm = da2plot.plot.pcolormesh(x="lont", y="latt", col=col, col_wrap=ncols, robust=robust, + infer_intervals=True, transform=cartopy.crs.PlateCarree(), + vmin=vmin, vmax=vmax, cmap=fv3["cmap"], + cbar_kwargs={'shrink': 0.8}, subplot_kws=subplot_kws) + for ax in pcm.axes.flat: + # Why needed only when col=tendency_dim? With col="pfull" it shrinks to unmasked size. + ax.set_extent(fv3["extent"]) physics_tend.add_conus_features(ax) - # Add time to title + # Add time to title title = f'{time0}-{validtime} ({twindow_quantity.to("hours"):~} time window)' if col == tendency_dim: - title = f'pfull={pfull[0]:~.0f} {title}' + title = f'pfull={da2plot.pfull.metpy.quantify().data:~.1f} {title}' elif 'long_name' in da2plot.coords: title = f'{da2plot.coords["long_name"].data} {title}' plt.suptitle(title, wrap=True) - # Annotate figure with details about figure creation. - fineprint = f"history: {os.path.realpath(ifile.name)}" - if subtract: - fineprint += f"\nsubtract: {os.path.realpath(subtract.name)}" - fineprint += f"\ngrid_spec: {os.path.realpath(gfile.name)}" - if shp: fineprint += f"\nmask: {shp}" - fineprint += f"\ntotal area: {totalarea.data:~.0f}" - fineprint += f"\nvertical interpolation method: {method} requested levels: {pfull}" - fineprint += f"\ncreated {datetime.datetime.now(tz=None)}" + # Annotate figure with args namespace, total area, and timestamp + fineprint = f"{args} " + fineprint += (f"total area={totalarea.compute().data:~.0f}, " + f"created {datetime.datetime.now(tz=None)}") if nofineprint: - logging.info(fineprint) + logging.debug(fineprint) else: - fineprint_obj = plt.annotate(text=fineprint, xy=(1,1), xycoords='figure pixels', fontsize=5) - + logging.debug("add fineprint to image") + plt.figtext(0, 0, fineprint, fontsize='xx-small', + va="bottom", wrap=True) plt.savefig(ofile, dpi=fv3["dpi"]) - logging.info(f'created {os.path.realpath(ofile)}') + logging.info('created %s', os.path.realpath(ofile)) + def default_ofile(args): + """ + Return default output filename. + """ pfull = args.pfull * units.hPa if len(pfull) == 1: - pfull_str = f"{pfull[0]:~.0f}".replace(" ","") + pfull_str = f"{pfull[0]:~.0f}".replace(" ", "") ofile = f"{args.statevariable}_{pfull_str}.png" else: - ofile = f"{args.statevariable}_{args.fill}.png" - if args.shp: + ofile = f"{args.statevariable}_{args.tendencytype}.png" + if args.shp is not None: shp = shp.rstrip("/") # Add shapefile name to output filename shapename = os.path.basename(shp) diff --git a/metplotpy/contributed/fv3_physics_tend/vert_profile_fv3.py b/metplotpy/contributed/fv3_physics_tend/vert_profile_fv3.py index 19d4534f..20072ed9 100644 --- a/metplotpy/contributed/fv3_physics_tend/vert_profile_fv3.py +++ b/metplotpy/contributed/fv3_physics_tend/vert_profile_fv3.py @@ -1,65 +1,77 @@ +""" Vertical profile of tendencies """ import argparse -import cartopy import datetime import logging +import os +import cartopy import matplotlib.pyplot as plt from matplotlib.ticker import MultipleLocator from metpy.units import units -import numpy as np -import os import pandas as pd -import pdb -from . import physics_tend -import sys import xarray import yaml - -""" -Vertical profile of tendencies of t, q, u, or v from physics parameterizations, dynamics (non-physics), their total, and residual. -Total change is the actual change in state variable from first time to last time. Total change differs from cumulative change -attributed to physics and non-physics tendencies when residual is not zero. -""" +import physics_tend def parse_args(): + """ + parse command line arguments + """ # =============Arguments=================== - parser = argparse.ArgumentParser(description = "Vertical profile of FV3 diagnostic tendencies", formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = argparse.ArgumentParser( + description="Vertical profile of FV3 diagnostic tendencies", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) # ==========Mandatory Arguments=================== - parser.add_argument("config", type=argparse.FileType('r'), help="yaml configuration file") - parser.add_argument("historyfile", type=argparse.FileType("r"), help="FV3 history file") - parser.add_argument("gridfile", type=argparse.FileType("r"), help="FV3 grid spec file") - parser.add_argument("statevariable", type=str, help="moisture, temperature, or wind component variable name") + parser.add_argument("config", help="yaml configuration file") + parser.add_argument("historyfile", help="FV3 history file") + parser.add_argument("gridfile", help="FV3 grid spec file") + parser.add_argument( + "statevariable", help="moisture, temperature, or wind component variable name") # ==========Optional Arguments=================== parser.add_argument("-d", "--debug", action='store_true') - parser.add_argument("--nofineprint", action='store_true', help="Don't add metadata and created by date (for comparing images)") - parser.add_argument("-o", "--ofile", type=str, help="name of output image file") - parser.add_argument("--resid", action="store_true", help="calculate residual") - parser.add_argument("-s", "--shp", type=str, default=None, help="shape file directory for mask") - parser.add_argument("--subtract", type=argparse.FileType("r"), help="FV3 history file to subtract") - parser.add_argument("-t", "--twindow", type=int, default=3, help="time window in hours") - parser.add_argument("-v", "--validtime", type=lambda x:pd.to_datetime(x), help="valid time") + parser.add_argument("--nofineprint", action='store_true', + help="Don't add metadata and created by date (for comparing images)") + parser.add_argument("-o", "--ofile", help="name of output image file") + parser.add_argument("--resid", action="store_true", + help="calculate residual") + parser.add_argument("-s", "--shp", help="shape file directory for mask") + parser.add_argument("--subtract", help="FV3 history file to subtract") + parser.add_argument("-t", "--twindow", type=float, + default=3, help="time window in hours") + parser.add_argument("-v", "--validtime", help="valid time") + parser.add_argument("--xmin", type=float, help="x-axis minimum") + parser.add_argument("--xmax", type=float, help="x-axis maximum") args = parser.parse_args() return args def main(): + """ + Vertical profile of tendencies of t, q, u, or v from physics parameterizations, + dynamics (non-physics), the combination of all tendencies (physics and non-physics), + the actual tendency, and the residual. Residual is the sum of all tendencies minus the + actual tendency. + """ args = parse_args() - gfile = args.gridfile - ifile = args.historyfile - variable = args.statevariable - config = args.config - debug = args.debug - resid = args.resid - nofineprint= args.nofineprint - ofile = args.ofile - shp = args.shp - subtract = args.subtract - twindow = datetime.timedelta(hours = args.twindow) - validtime = args.validtime + gfile = args.gridfile + ifile = args.historyfile + variable = args.statevariable + config = args.config + nofineprint = args.nofineprint + ofile = args.ofile + shp = args.shp + subtract = args.subtract + twindow = datetime.timedelta(hours=args.twindow) + twindow_quantity = twindow.total_seconds() * units.seconds + validtime = pd.to_datetime(args.validtime) + xmin = args.xmin + xmax = args.xmax level = logging.INFO - if debug: level = logging.DEBUG - logging.basicConfig(format='%(asctime)s - %(message)s', level=level) # prepend log message with time + if args.debug: + level = logging.DEBUG + # prepend log message with time + logging.basicConfig(format='%(asctime)s - %(message)s', level=level) logging.debug(args) # Output filename. @@ -75,134 +87,174 @@ def main(): ofile = os.path.realpath(args.ofile) odir = os.path.dirname(ofile) if not os.path.exists(odir): - logging.info(f"output directory {odir} does not exist. Creating it") + logging.info( + f"output directory {odir} does not exist. Creating it") os.mkdir(odir) - logging.info(f"output filename={ofile}") - + logging.debug("output filename=%s", ofile) # Reload fv3 in case user specifies a custom --config file - fv3 = yaml.load(open(config.name), Loader=yaml.FullLoader) + fv3 = yaml.load(open(config, encoding="utf8"), Loader=yaml.FullLoader) # Read lat/lon/area from gfile logging.debug(f"read lat/lon/area from {gfile}") - gds = xarray.open_dataset(gfile.name) + gds = xarray.open_mfdataset(gfile) lont = gds[fv3["lon_name"]] latt = gds[fv3["lat_name"]] area = gds["area"] # Open input file - logging.debug(f"About to open {ifile}") - fv3ds = xarray.open_dataset(ifile.name) + logging.debug("open %s", ifile) + fv3ds = xarray.open_dataset(ifile) + + if subtract: + logging.info("subtracting %s", subtract) + with xarray.set_options(keep_attrs=True): + fv3ds -= xarray.open_mfdataset(subtract) + datetimeindex = fv3ds.indexes['time'] if hasattr(datetimeindex, "to_datetimeindex"): - # Convert from CFTime to pandas datetime. I get a warning CFTimeIndex from non-standard calendar 'julian'. Maybe history file should be saved with standard calendar. + # Convert from CFTime to pandas datetime or get warning + # CFTimeIndex from non-standard calendar 'julian'. + # Maybe history file should be saved with standard calendar. # To turn off warning, set unsafe=True. datetimeindex = datetimeindex.to_datetimeindex(unsafe=True) + ragged_times = datetimeindex != datetimeindex.round('1ms') + if any(ragged_times): + logging.info( + f"round times to nearest millisec. before: {datetimeindex[ragged_times].values}") + datetimeindex = datetimeindex.round('1ms') + logging.info(f"after: {datetimeindex[ragged_times].values}") fv3ds['time'] = datetimeindex - if subtract: - logging.debug(f"subtracting {subtract.name}") - with xarray.set_options(keep_attrs=True): - fv3ds -= xarray.open_dataset(subtract.name) - fv3ds = fv3ds.assign_coords(lont=lont, latt=latt) # lont and latt used by pcolorfill() - tendency_vars = fv3["tendency_varnames"][variable] # list of tendency variable names for requested state variable - fv3ds = physics_tend.add_time0(fv3ds, variable, fv3) - tendencies = fv3ds[tendency_vars] # subset of original Dataset + # lont and latt used by pcolorfill() + fv3ds = fv3ds.assign_coords(lont=lont, latt=latt) if validtime is None: - logging.debug("validtime not provided on command line, so use latest time in history file.") validtime = fv3ds.time.values[-1] validtime = pd.to_datetime(validtime) + logging.info( + "validtime not provided on command line. Using last time in history file %s.", + validtime) time0 = validtime - twindow - time1 = time0 + datetime.timedelta(hours=1) - logging.info(f"Sum tendencies {time1}-{validtime}") - tindex = dict(time=slice(time1, validtime)) # slice of time from hour after time0 through validtime - tendencies_avg = tendencies.sel(tindex).mean(dim="time") # average tendencies in time - - # Dynamics (nophys) tendency is not reset every hour. Just calculate change from time0 to validtime. - nophys_var = [x for x in tendency_vars if x.endswith("_nophys")] - assert len(nophys_var) == 1 - nophys_var = nophys_var[0] # we don't want a 1-element list; we want a string. So that tendencies[nophys_var] is a DataArray, not a Dataset. - logging.info(f"Subtract nophys tendency at {time0} from {validtime}") - nophys_delta = tendencies[nophys_var].sel(time=validtime) - tendencies[nophys_var].sel(time=time0) - tendencies_avg[nophys_var] = nophys_delta / args.twindow - - - # Restore units after .mean() removed them. Copy units from 1st tendency variable. - tendency_units = units.parse_expression(fv3ds[tendency_vars[0]].units) - logging.debug(f"restoring {tendency_units} units after .mean() method removed them.") - tendencies_avg *= tendency_units - for da in tendencies_avg: - tendencies_avg[da] = tendencies_avg[da].metpy.convert_units("K/hour") - long_names = [fv3ds[da].attrs["long_name"] for da in tendencies_avg] # Make list of long_names before .to_array() loses them. - - # Remove characters up to and including 1st underscore (e.g. du3dt_) in DataArray name. - # for example dt3dt_pbl -> pbl - name_dict = {da : "_".join(da.split("_")[1:]) for da in tendencies_avg.data_vars} + assert time0 in fv3ds.time, (f"time0 {time0} not in history file. Closest is " + f"{fv3ds.time.sel(time=time0, method='nearest').time.data}") + + # list of tendency variable names for requested state variable + tendency_vars = fv3["tendency_varnames"][variable] + tendencies = fv3ds[tendency_vars] # subset of original Dataset + # convert DataArrays to Quantities to protect units. DataArray.mean drops units attribute. + tendencies = tendencies.metpy.quantify() + + # Define time slice starting with time-after-time0 and ending with validtime. + # We use the time *after* time0 because the time range corresponding to the tendency + # output is the period immediately prior to the tendency timestamp. + # That way, slice(time_after_time0, validtime) has a time range of [time0,validtime]. + idx_first_time_after_time0 = (fv3ds.time > time0).argmax() + time_after_time0 = fv3ds.time[idx_first_time_after_time0] + tindex = {"time": slice(time_after_time0, validtime)} + logging.debug( + "Time-weighted mean tendencies for time index slice %s", tindex) + timeweights = fv3ds.time.diff("time").sel(tindex) + time_weighted_tendencies = tendencies.sel(tindex) * timeweights + tendencies_avg = time_weighted_tendencies.sum( + dim="time") / timeweights.sum(dim="time") + + # Make list of long_names before .to_array() loses them. + long_names = [fv3ds[da].attrs["long_name"] for da in tendencies_avg] + + # Keep characters after final underscore. The first part is redundant. + # for example dtend_u_pbl -> pbl + name_dict = {da: "_".join(da.split("_")[-1:]) + for da in tendencies_avg.data_vars} + logging.debug("rename %s", name_dict) tendencies_avg = tendencies_avg.rename(name_dict) # Stack variables along new tendency dimension of new DataArray. tendency_dim = f"{variable} tendency" tendencies_avg = tendencies_avg.to_array(dim=tendency_dim) - # Assign long_names to a new DataArray coordinate. It will have the same shape as tendency dimension. - tendencies_avg = tendencies_avg.assign_coords({"long_name":(tendency_dim,long_names)}) - - logging.info(f"calculate actual change in {variable}") - state_variable = fv3ds[variable].metpy.quantify() # Tried metpy.quantify() with open_dataset, but pint.errors.UndefinedUnitError: 'dBz' is not defined in the unit registry - dstate_variable = state_variable.sel(time = validtime) - state_variable.sel(time = time0) - dstate_variable = dstate_variable.assign_coords(time=validtime) - dstate_variable.attrs["long_name"] = f"actual change in {state_variable.attrs['long_name']}" - - # Add all tendencies together and subtract actual rate of change in state variable. - # This is residual. - total = tendencies_avg.sum(dim=tendency_dim) - twindow_quantity = twindow.total_seconds() * units.seconds - resid = total - dstate_variable/twindow_quantity + # Assign long_names to a new DataArray coordinate. + # It will have the same shape as tendency dimension. + tendencies_avg = tendencies_avg.assign_coords( + {"long_name": (tendency_dim, long_names)}) + + logging.info("calculate actual change in %s", variable) + # Tried metpy.quantify() with open_dataset, but + # pint.errors.UndefinedUnitError: 'dBz' is not defined in the unit registry + state_variable = fv3ds[variable].metpy.quantify() + actual_change = state_variable.sel(time=validtime) - state_variable.sel( + time=time0, method="nearest", tolerance=datetime.timedelta(milliseconds=1)) + actual_change = actual_change.assign_coords(time=validtime) + actual_change.attrs["long_name"] = f"actual change in {state_variable.attrs['long_name']}" + + # Sum all tendencies (physics and non-physics) + all_tendencies = tendencies_avg.sum(dim=tendency_dim) + + # Subtract physics tendency variable if it was in tendency_vars. Don't want to double-count. + phys_var = [x for x in tendency_vars if x.endswith("_phys")] + if phys_var: + logging.info("subtracting 'phys' tendency variable " + "from all_tendencies to avoid double-counting") + # use .data to avoid re-introducing tendency coordinate + all_tendencies = all_tendencies - \ + tendencies_avg.sel({tendency_dim: "phys"}).data + + # Calculate actual tendency of state variable. + actual_tendency = actual_change / twindow_quantity + logging.info( + "subtract actual tendency from all_tendencies to get residual") + resid = all_tendencies - actual_tendency da2plot = tendencies_avg - if resid is not None: - # Add total and resid DataArrays to tendency_dim. - total = total.expand_dims({tendency_dim:["total"]}).assign_coords(long_name="sum of tendencies") - resid = resid.expand_dims({tendency_dim:["resid"]}).assign_coords(long_name=f"sum of tendencies - actual rate of change of {variable} (residual)") - da2plot = xarray.concat([da2plot, total, resid], dim=tendency_dim) - + if args.resid: + # Concatenate all_tendencies, actual_tendency, and resid DataArrays. + # Give them a name and long_name along tendency_dim. + all_tendencies = all_tendencies.expand_dims({tendency_dim: ["all"]}).assign_coords( + long_name="sum of tendencies") + actual_tendency = actual_tendency.expand_dims({tendency_dim: ["actual"]}).assign_coords( + long_name=f"actual rate of change of {variable}") + resid = resid.expand_dims({tendency_dim: ["resid"]}).assign_coords( + long_name=f"sum of tendencies - actual rate of change of {variable} (residual)") + da2plot = xarray.concat( + [da2plot, all_tendencies, actual_tendency, resid], dim=tendency_dim) # Mask points outside shape. - if shp: - # mask points outside shape - mask = physics_tend.pts_in_shp(latt.values, lont.values, shp, debug=debug) # Use .values to avoid AttributeError: 'DataArray' object has no attribute 'flatten' - mask = xarray.DataArray(mask, coords=[da2plot.grid_yt, da2plot.grid_xt]) + if shp is not None: + # Use .values to avoid AttributeError: 'DataArray' object has no attribute 'flatten' + mask = physics_tend.pts_in_shp( + latt.values, lont.values, shp) + mask = xarray.DataArray( + mask, coords=[da2plot.grid_yt, da2plot.grid_xt]) da2plot = da2plot.where(mask, drop=True) - area = area.where(mask).fillna(0) + area = area.where(mask).fillna(0) totalarea = area.metpy.convert_units("km**2").sum() - - logging.info(f"area-weighted spatial average") + logging.info("area-weighted spatial average") da2plot = da2plot.weighted(area).mean(area.dims) - + # Put units in attributes so they show up in xlabel. + # dequantify after area-weighted mean to preserve units. + da2plot = da2plot.metpy.dequantify() logging.info("creating figure") - fig, ax = plt.subplots() - plt.subplots_adjust(bottom=0.18) # add space at bottom for fine print - ax.invert_yaxis() # pressure increases from top to bottom + _, ax = plt.subplots() + plt.subplots_adjust(bottom=0.18) # add space at bottom for fine print + ax.invert_yaxis() # pressure increases from top to bottom ax.grid(visible=True, color="grey", alpha=0.5, lw=0.5) ax.yaxis.set_major_locator(MultipleLocator(100)) ax.yaxis.set_minor_locator(MultipleLocator(25)) ax.grid(which='minor', alpha=0.3, lw=0.4) - # Put units in attributes so they show up in xlabel. - da2plot = da2plot.metpy.dequantify() - logging.info("plot area-weighted spatial average...") - lines = da2plot.plot.line(y="pfull", ax=ax, hue=tendency_dim) + lines = da2plot.plot.line( + y="pfull", ax=ax, xlim=(xmin, xmax), hue=tendency_dim) - if resid is not None: # resid might have been turned from a Boolean scalar variable to a DataArray. - # Add special marker to dstate_variable and residual lines. - # DataArray plot legend handles differ from the plot lines, for some reason. So if you + if args.resid: + # Add special marker to actual_change and residual lines. + # DataArray plot legend handles differ from the plot lines, for some reason. So if you # change the style of a line later, it is not automatically changed in the legend. - # zip d{variable}, resid line and their respective legend handles together and change their style together. + # zip d{variable}, resid line and their respective legend handles together and change + # their style together. # [-2:] means take last two elements of da2plot. special_lines = list(zip(lines, ax.get_legend().legendHandles))[-2:] special_marker = 'o' @@ -217,36 +269,39 @@ def main(): title = f'{time0}-{validtime} ({twindow_quantity.to("hours"):~} time window)' ax.set_title(title, wrap=True) - if shp: + if shp is not None: # Locate region of interest on conus map background. Put in inset. - projection = cartopy.crs.LambertConformal(central_longitude=-97.5, central_latitude=fv3["standard_parallel"]) - ax_inset = plt.gcf().add_axes([.7, .001, .19, .13], projection=projection) + projection = cartopy.crs.LambertConformal( + central_longitude=-97.5, central_latitude=fv3["standard_parallel"]) + ax_inset = plt.gcf().add_axes( + [.7, .001, .19, .13], projection=projection) # astype(int) to avoid TypeError: numpy boolean subtract - cbar_kwargs = dict(ticks=[0.25,0.75],shrink=0.6) - pc = mask.assign_coords(lont=lont, latt=latt).astype(int).plot.pcolormesh(ax=ax_inset, x="lont", y="latt", infer_intervals=True, - transform=cartopy.crs.PlateCarree(), cmap=plt.cm.get_cmap('cool',2), add_labels=False, cbar_kwargs=cbar_kwargs) - pc.colorbar.ax.set_yticklabels(["masked","valid"], fontsize='xx-small') - pc.colorbar.outline.set_visible(False) + cbar_kwargs = {"ticks":[0.25, 0.75], "shrink":0.6} + pcm = mask.assign_coords(lont=lont, latt=latt).astype(int).plot.pcolormesh( + ax=ax_inset, x="lont", y="latt", infer_intervals=True, + transform=cartopy.crs.PlateCarree(), cmap=plt.cm.get_cmap('cool', 2), + add_labels=False, cbar_kwargs=cbar_kwargs) + pcm.colorbar.ax.set_yticklabels( + ["masked", "valid"], fontsize='xx-small') + pcm.colorbar.outline.set_visible(False) physics_tend.add_conus_features(ax_inset) extent = fv3["extent"] ax_inset.set_extent(extent) - # Annotate figure with details about figure creation. - fineprint = f"history: {os.path.realpath(ifile.name)}" - if subtract: - fineprint += f"\nsubtract: {os.path.realpath(subtract.name)}" - fineprint += f"\ngrid_spec: {os.path.realpath(gfile.name)}" - if shp: fineprint += f"\nmask: {shp}" - fineprint += f"\ntotal area: {totalarea.data:~.0f}" - fineprint += f"\ncreated {datetime.datetime.now(tz=None)}" + # Annotate figure with args namespace, total area, and timestamp + fineprint = f"{args} " + fineprint += (f"total area={totalarea.compute().data:~.0f}, " + f"created {datetime.datetime.now(tz=None)}") if nofineprint: - logging.info(fineprint) + logging.debug(fineprint) else: - fineprint_obj = plt.annotate(text=fineprint, xy=(1,1), xycoords='figure pixels', fontsize=5) - + logging.debug("add fineprint to image") + plt.figtext(0, 0, fineprint, fontsize='xx-small', + va="bottom", wrap=True) plt.savefig(ofile, dpi=fv3["dpi"]) - logging.info(f'created {os.path.realpath(ofile)}') + logging.info('created %s', os.path.realpath(ofile)) + if __name__ == "__main__": main() diff --git a/test/fv3_physics_tend/fv3_physics_tend_defaults.yaml b/test/fv3_physics_tend/fv3_physics_tend_defaults.yaml index 3e0d825b..748a2d8b 100644 --- a/test/fv3_physics_tend/fv3_physics_tend_defaults.yaml +++ b/test/fv3_physics_tend/fv3_physics_tend_defaults.yaml @@ -2,47 +2,40 @@ # Each type of tendency (moisture, temperature, wind component) has its own set of variables. tendency_varnames: spfh: - - dq3dt_deepcnv - - dq3dt_mp - - dq3dt_pbl - - dq3dt_shalcnv - - dq3dt_nophys + - dtend_qv_pbl + - dtend_qv_deepcnv + - dtend_qv_shalcnv + - dtend_qv_mp + - dtend_qv_phys + - dtend_qv_nophys tmp: - - dt3dt_congwd - - dt3dt_deepcnv - - dt3dt_lw - - dt3dt_mp - - dt3dt_orogwd - - dt3dt_pbl - - dt3dt_rdamp - - dt3dt_shalcnv - - dt3dt_sw - - dt3dt_nophys + - dtend_temp_lw + - dtend_temp_sw + - dtend_temp_pbl + - dtend_temp_deepcnv + - dtend_temp_shalcnv + - dtend_temp_mp + - dtend_temp_orogwd + - dtend_temp_cnvgwd + - dtend_temp_phys + - dtend_temp_nophys ugrd: - - du3dt_congwd - - du3dt_deepcnv - - du3dt_mp - - du3dt_orogwd - - du3dt_pbl - - du3dt_rdamp - - du3dt_shalcnv - - du3dt_nophys + - dtend_u_pbl + - dtend_u_orogwd + - dtend_u_deepcnv + - dtend_u_cnvgwd + - dtend_u_shalcnv + - dtend_u_phys + - dtend_u_nophys vgrd: - - dv3dt_congwd - - dv3dt_deepcnv - - dv3dt_mp - - dv3dt_orogwd - - dv3dt_pbl - - dv3dt_rdamp - - dv3dt_shalcnv - - dv3dt_nophys + - dtend_v_pbl + - dtend_v_orogwd + - dtend_v_deepcnv + - dtend_v_cnvgwd + - dtend_v_shalcnv + - dtend_v_phys + - dtend_v_nophys -# Name of variables in history file that contain the temperature, moisture, wind at time zero (initialization time). -time0_varname: - tmp : tmp_i - spfh: qv_i - ugrd: ugrd_i - vgrd: vgrd_i # Name of the longitude and latitude variables in the grid specification file. @@ -65,5 +58,5 @@ standard_parallel : 38.139 cmap : "Spectral_r" # resolution (dots per inch) of output -dpi : 150 +dpi : 100