Skip to content

Commit

Permalink
Update global_data_process.py
Browse files Browse the repository at this point in the history
  • Loading branch information
angehung5 authored Oct 16, 2023
1 parent 26fa597 commit c692c70
Showing 1 changed file with 68 additions and 19 deletions.
87 changes: 68 additions & 19 deletions python/global_data_process.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""
Created on Sun Jun 4 15:45:29 2023
Created on Sun Jun 4 2023
Updated on Mon Oct 16 2023: Use daily gfs.canopy files
Author: Wei-Ting Hung
"""
Expand Down Expand Up @@ -65,7 +66,9 @@
f_met = (
path + "/gfs.t" + HHI + "z." + YY + MM + DD + ".sfcf0" + HH + ".nc"
) # gfs met file
f_can = path + "/gfs.canopy.t" + HHI + "z." + YY + MM + "01.sfcf000.nc" # canopy file
f_can = (
path + "/gfs.canopy.t" + HHI + "z." + YY + MM + DD + ".sfcf000.nc"
) # canopy file
f_output = (
path + "/gfs.canopy.t" + HHI + "z." + YY + MM + DD + ".global.f0" + HH + ".nc"
) # output file
Expand All @@ -88,7 +91,7 @@
+ ".nc "
)
elif frp_src == 2: # climatological frp
f_frp = path + "/gfs.canopy.t" + HHI + "z." + YY + MM + "01.sfcf000.nc"
f_frp = path + "/gfs.canopy.t" + HHI + "z." + YY + MM + DD + ".sfcf000.nc"


# required variables
Expand All @@ -109,7 +112,7 @@
"hpbl",
"prate_ave",
]
canlist = ["lai", "clu", "ffrac", "fh", "mol", "csz", "frp", "href"]
canlist = ["lai", "clu", "ffrac", "fh", "pavd", "mol", "csz", "frp", "href"]


# constants
Expand Down Expand Up @@ -149,12 +152,25 @@ def mapping(xgrid, ygrid, data, xdata, ydata, map_method, fvalue):
def read_gfs_climatology(filename, lat, lon, varname):
readin = Dataset(filename)

# map to met grids
yt = readin["lat"][:]
xt = readin["lon"][:]
data = np.squeeze(readin[varname][0, :, :])
if varname == 'pavd':
## map to met grids
yt = readin['lat'][:]
xt = readin['lon'][:]
data = np.squeeze(readin[varname][:])

DATA = np.empty([data.shape[0], lat.shape[0], lat.shape[1]])

for ll in np.arange(data.shape[0]):
DATA[ll, :, :] = mapping(lat, lon, data[ll, :, :].flatten(), yt.flatten(), xt.flatten(), 'linear', np.nan)

else:
# map to met grids
yt = readin["lat"][:]
xt = readin["lon"][:]
data = np.squeeze(readin[varname][0, :, :])

DATA = mapping(lat, lon, data.flatten(), yt.flatten(), xt.flatten(), "linear", np.nan)
DATA = mapping(lat, lon, data.flatten(), yt.flatten(), xt.flatten(), "linear", np.nan)

DATA[np.isnan(DATA)] = 0
DATA[DATA < 0] = 0
return DATA
Expand Down Expand Up @@ -226,7 +242,8 @@ def read_frp_local(filename, lat, lon, fill_value):
+ "z."
+ YY
+ MM
+ "01.sfcf000.nc",
+ DD
+ ".sfcf000.nc",
]
)
if os.path.isfile(f_can) is True:
Expand Down Expand Up @@ -262,7 +279,7 @@ def read_frp_local(filename, lat, lon, fill_value):
else:
print("---- No available FRP file. Switch to Climatology FRP...")
frp_src = 2
f_frp = path + "/gfs.canopy.t" + HHI + "z." + YY + MM + "01.sfcf000.nc"
f_frp = path + "/gfs.canopy.t" + HHI + "z." + YY + MM + DD + ".sfcf000.nc"

if frp_src == 2: # 12 month climatology frp
if os.path.isfile(f_frp) is True:
Expand All @@ -281,7 +298,8 @@ def read_frp_local(filename, lat, lon, fill_value):
+ "z."
+ YY
+ MM
+ "01.sfcf000.nc",
+ DD
+ ".sfcf000.nc",
]
)
if os.path.isfile(f_met) is True:
Expand Down Expand Up @@ -354,6 +372,11 @@ def read_frp_local(filename, lat, lon, fill_value):
ATT = ["Canopy height above the surface", "m", fill_value]
DATA = read_gfs_climatology(f_can, lat, lon, "fh")

elif varname == 'pavd':
ATTNAME = ['long_name', 'units', 'missing_value']
ATT = ['Plant area volume density profile', 'm2/m3', fill_value]
DATA = read_gfs_climatology(f_can, lat, lon, 'pavd')

elif varname == "mol":
# Reference:
# Essa 1999, ESTIMATION OF MONIN-OBUKHOV LENGTH USING RICHARDSON AND BULK RICHARDSON NUMBER
Expand Down Expand Up @@ -409,15 +432,41 @@ def read_frp_local(filename, lat, lon, fill_value):
print(ATT)

# adding to output file
output = Dataset(f_output, "a")
if varname == 'pavd':
output = Dataset(f_output, 'a')
output.createDimension('level', DATA.shape[0])

var = output.createVariable(
varname, "float", ("time", "grid_yt", "grid_xt"), fill_value=fill_value
)
write_varatt(var, ATTNAME, ATT)
var[:] = DATA
var_bot = output.createVariable(
'layer_bottom', 'i4', ('level', )
)
var_top = output.createVariable(
'layer_top', 'i4', ('level', )
)
var = output.createVariable(
varname, 'float', ('time', 'level', 'grid_yt', 'grid_xt'), fill_value=fill_value
)

write_varatt(var, ATTNAME, ATT)
write_varatt(var_bot, ['long_name', 'units'], ['height of the layer bottom above the ground', 'm'])
write_varatt(var_top, ['long_name', 'units'], ['height of the layer top above the ground', 'm'])

var_bot[:] = np.arange(0, 65+1, 5)
var_top[:] = np.arange(5, 70+1, 5)
var[:] = DATA

output.close()
del [var_bot, var_top]

else:
output = Dataset(f_output, "a")

var = output.createVariable(
varname, "float", ("time", "grid_yt", "grid_xt"), fill_value=fill_value
)
write_varatt(var, ATTNAME, ATT)
var[:] = DATA

output.close()
output.close()

print("---- " + varname + " complete!")

Expand Down

0 comments on commit c692c70

Please sign in to comment.