Skip to content

Commit

Permalink
Merge pull request #13 from bcgov/dev
Browse files Browse the repository at this point in the history
fixes provided by weather logics
  • Loading branch information
franTarkenton authored Apr 14, 2021
2 parents 7580252 + 1941ae2 commit 4cb4491
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 6 deletions.
2 changes: 1 addition & 1 deletion process.jenkins
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ node('zavijava_rfc') {
%condaEnvPath%\\python src\\extractClimateObservations.py
'''
}
stage('download_weather_data') {
stage('processWeatherData') {
bat '''
echo running script
:: ----- run ens weather ------
Expand Down
1 change: 1 addition & 0 deletions src/ens_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ def main(args):
else:
bias_correction.main(run_time)
except Exception as e:
LOGGER.exception("")
LOGGER.error(f'Failure running program. {e}')


Expand Down
16 changes: 11 additions & 5 deletions src/processing/bias_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def get_observations(date_tm):
Returns:
pd.DataFrame: Observational data stored in pandas dataframe
"""

#climate_path_str = f'{gs.SRCDIR}/resources/climate_obs_{date_tm.year}.csv'
#climate_path_str = os.path.join(gs.CLIMATE_OBS_DIR, f'climate_obs_{date_tm.year}.csv')
climate_path_str = os.path.join(gs.CLIMATE_OBS_DIR, f'{gs.CLIMATE_OBS_FILE}{date_tm.year}.csv')
Expand All @@ -48,7 +48,7 @@ def get_observations(date_tm):
#clim_obs_path_str = f'{gs.SRCDIR}/resources/climate_obs_{start_bias.year}.csv'
climate_path_str = os.path.join(gs.CLIMATE_OBS_DIR, f'{gs.CLIMATE_OBS_FILE}{start_bias.year}.csv')

clim_obj_path = pathlib.Path(clim_obs_path_str)
clim_obj_path = pathlib.Path(climate_path_str)
LOGGER.debug(f"clim_obj_path: {clim_obj_path}")
df_two = pd.read_csv(str(clim_obj_path))
df = df.append(df_two)
Expand Down Expand Up @@ -378,24 +378,30 @@ def normalize_precip(forecast, individual, model=None):
"""
last_day = forecast['agg_day'].max()
copy = forecast.copy()
forecast.set_index(['stn_id', 'agg_day'], inplace=True, drop=True)
copy = copy.loc[copy['agg_day'] < last_day]
copy['agg_day'] += 1
copy.set_index(['stn_id', 'agg_day'], inplace=True, drop=True)
if not individual:
forecast.set_index(['stn_id', 'forecast', 'agg_day'], inplace=True, drop=True)
copy.set_index(['stn_id', 'forecast', 'agg_day'], inplace=True, drop=True)
for suffix in vs.metvars['precip']['ensemble_values']:
forecast.loc[copy.index, f'precip_{suffix}'] -= copy.loc[copy.index, f'precip_{suffix}']
else:
forecast.set_index(['stn_id', 'agg_day'], inplace=True, drop=True)
copy.set_index(['stn_id', 'agg_day'], inplace=True, drop=True)
for suffix in range(1, models[model]['ensemble_members'] + 1):
forecast.loc[copy.index, f'precip_{suffix}'] -= copy.loc[copy.index, f'precip_{suffix}']

forecast.reset_index(drop=False, inplace=True)


def get_raw_forecasts(date_tm):
raw_files = glob(date_tm.strftime(f'{gs.DIR}/tmp/%Y%m%d%H_*'))
globstr = date_tm.strftime(f'{gs.DIR}/tmp/%Y%m%d%H_*')
LOGGER.debug(f"glob str: {globstr}")
raw_files = glob(globstr)
dfs = []
LOGGER.debug(f"datetime: {date_tm}, glob length: {len(raw_files)}")
for raw in raw_files:
LOGGER.debug(f"input raw file: {raw}")
hour = int(raw.split('_')[-1])
df = pd.read_csv(raw)
df['datetime'] = date_tm + timedelta(hours=hour)
Expand Down
29 changes: 29 additions & 0 deletions tests/processing/test_bias_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,13 +499,15 @@ def test_normalize_precip(self, monkeypatch):
}
monkeypatch.setattr(bc.vs, 'metvars', metvars)
df = pd.DataFrame({
'forecast': [1, 1, 1, 1, 1, 1],
'precip_mean': [1, 2, 3, 2, 3.5, 5.5],
'precip_min': [0, 1, 1, 1.5, 2.5, 4.5],
'precip_max': [2, 4, 6, 2.5, 5.5, 8],
'stn_id': [1, 1, 1, 2, 2, 2],
'agg_day': [0, 1, 2, 0, 1, 2],
})
exp = pd.DataFrame({
'forecast': [1, 1, 1, 1, 1, 1],
'precip_mean': [1, 1, 1, 2, 1.5, 2],
'precip_min': [0, 1, 0, 1.5, 1, 2],
'precip_max': [2, 2, 2, 2.5, 3, 2.5],
Expand All @@ -515,6 +517,33 @@ def test_normalize_precip(self, monkeypatch):
bc.normalize_precip(df, False)
assert_frame_equal(df, exp, check_like=True)

def test_normalize_precip_multiple_forecasts(self, monkeypatch):
metvars = {
'precip': {
'correction': 'ratio',
'ensemble_values': ['mean', 'max', 'min'],
},
}
monkeypatch.setattr(bc.vs, 'metvars', metvars)
df = pd.DataFrame({
'forecast': [1, 1, 1, 2, 2, 2],
'precip_mean': [1, 2, 3, 2, 3.5, 5.5],
'precip_min': [0, 1, 1, 1.5, 2.5, 4.5],
'precip_max': [2, 4, 6, 2.5, 5.5, 8],
'stn_id': [1, 1, 1, 1, 1, 1],
'agg_day': [0, 1, 2, 0, 1, 2],
})
exp = pd.DataFrame({
'forecast': [1, 1, 1, 2, 2, 2],
'precip_mean': [1, 1, 1, 2, 1.5, 2],
'precip_min': [0, 1, 0, 1.5, 1, 2],
'precip_max': [2, 2, 2, 2.5, 3, 2.5],
'stn_id': [1, 1, 1, 1, 1, 1],
'agg_day': [0, 1, 2, 0, 1, 2],
})
bc.normalize_precip(df, False)
assert_frame_equal(df, exp, check_like=True)

@pytest.mark.pre_commit
class Test_Pre_Commit:

Expand Down

0 comments on commit 4cb4491

Please sign in to comment.