Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Changed the damage outputs #10079

Merged
merged 8 commits into from
Oct 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions debian/changelog
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
[Michele Simionato]
* Specifying total_losses is now mandatory in damage calculations with
multiple loss types
* Added support for consequence=losses for liquefaction and landslides
* Added a check for missing secondary perils
* Added loss types liquefaction and landslide
Expand Down
13 changes: 6 additions & 7 deletions openquake/calculators/classical_damage.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,21 +39,21 @@ def classical_damage(riskinputs, param, monitor):
dictionaries asset_ordinal -> damage(R, L, D)
"""
crmodel = monitor.read('crmodel')
total_loss_types = crmodel.oqparam.total_loss_types
mon = monitor('getting hazard', measuremem=False)
for ri in riskinputs:
R = ri.hazard_getter.R
L = len(crmodel.lti)
D = len(crmodel.damage_states)
result = AccumDict(accum=numpy.zeros((R, L, D), F32))
result = AccumDict(accum=numpy.zeros((R, D), F32))
with mon:
haz = ri.hazard_getter.get_hazard()
for taxo, assets in ri.asset_df.groupby('taxonomy'):
for rlz in range(R):
hcurve = haz[:, rlz]
out = crmodel.get_output(assets, hcurve)
for li, loss_type in enumerate(crmodel.loss_types):
for loss_type in total_loss_types:
for a, frac in zip(assets.ordinal, out[loss_type]):
result[a][rlz, li] = frac
result[a][rlz] += frac
yield result


Expand All @@ -70,16 +70,15 @@ def post_execute(self, result):
Export the result in CSV format.

:param result:
a dictionary asset_ordinal -> array(R, L, D)
a dictionary asset_ordinal -> array(R, D)
"""
D = len(self.crmodel.damage_states)
damages = numpy.zeros((self.A, self.R, self.L, D), numpy.float32)
damages = numpy.zeros((self.A, self.R, D), numpy.float32)
for a in result:
damages[a] = result[a]
self.datastore['damages-rlzs'] = damages
stats.set_rlzs_stats(self.datastore, 'damages-rlzs',
assets=self.assetcol['id'],
loss_type=self.oqparam.loss_types,
dmg_state=self.crmodel.damage_states)
dmg = views.view('portfolio_damage', self.datastore)
logging.info('\n' + views.text_table(dmg, ext='org'))
31 changes: 14 additions & 17 deletions openquake/calculators/event_based_damage.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,11 @@ class Dparam:

def zero_dmgcsq(A, R, crmodel):
"""
:returns: an array of zeros of shape (A, R, L, Dc)
:returns: an array of zeros of shape (A, R, Dc)
"""
dmg_csq = crmodel.get_dmg_csq()
L = len(crmodel.loss_types)
Dc = len(dmg_csq) + 1 # damages + consequences
return numpy.zeros((A, R, L, Dc), F32)
return numpy.zeros((A, R, Dc), F32)


def damage_from_gmfs(gmfslices, oqparam, dstore, monitor):
Expand Down Expand Up @@ -152,15 +151,14 @@ def event_based_damage(df, oq, dstore, monitor):
dmg_csq = crmodel.get_dmg_csq()
csqidx = {dc: i + 1 for i, dc in enumerate(dmg_csq)}
dmgcsq = zero_dmgcsq(len(assetcol), oq.R, crmodel)
_A, R, L, Dc = dmgcsq.shape
_A, R, Dc = dmgcsq.shape
D = Dc - len(crmodel.get_consequences())
if R > 1:
allrlzs = dstore['events']['rlz_id']
else:
allrlzs = U32([0])
assert len(oq.loss_types) == L
with mon_risk:
dddict = general.AccumDict(accum=numpy.zeros((L, Dc), F32)) # eid, kid
dddict = general.AccumDict(accum=numpy.zeros(Dc, F32)) # eid, kid
for sid, asset_df in assetcol.to_dframe().groupby('site_id'):
# working one site at the time
gmf_df = df[df.sid == sid]
Expand All @@ -181,17 +179,17 @@ def event_based_damage(df, oq, dstore, monitor):
for aids, d4 in _gen_d4(asset_df, gmf_df, crmodel, dparam):
for lti, d3 in enumerate(d4):
if R == 1:
dmgcsq[aids, 0, lti] += d3.sum(axis=1)
dmgcsq[aids, 0] += d3.sum(axis=1)
else:
for e, rlz in enumerate(dparam.rlzs):
dmgcsq[aids, rlz, lti] += d3[:, e]
dmgcsq[aids, rlz] += d3[:, e]
tot = d3.sum(axis=0) # sum on the assets
for e, eid in enumerate(eids):
dddict[eid, oq.K][lti] += tot[e]
dddict[eid, oq.K] += tot[e]
if oq.K:
for kids in dparam.aggids:
for a, aid in enumerate(aids):
dddict[eid, kids[aid]][lti] += d3[a, e]
dddict[eid, kids[aid]] += d3[a, e]

return _dframe(dddict, csqidx, oq.loss_types), dmgcsq

Expand All @@ -205,7 +203,7 @@ def _dframe(adic, csqidx, loss_types):
dic['event_id'].append(eid)
dic['loss_id'].append(scientific.LOSSID[lt])
for cname, ci in csqidx.items():
dic[cname].append(dd[li, ci])
dic[cname].append(dd[ci])
fix_dtypes(dic)
return pandas.DataFrame(dic)

Expand Down Expand Up @@ -281,7 +279,7 @@ def post_execute(self, dummy):
"""
oq = self.oqparam
# no damage check, perhaps the sites where disjoint from gmf_data
if self.dmgcsq[:, :, :, 1:].sum() == 0:
if self.dmgcsq[:, :, 1:].sum() == 0:
haz_sids = self.datastore['gmf_data/sid'][:]
count = numpy.isin(haz_sids, self.sitecol.sids).sum()
if count == 0:
Expand All @@ -299,22 +297,21 @@ def post_execute(self, dummy):
with prc.datastore:
prc.run(exports='')

_A, _R, L, _Dc = self.dmgcsq.shape
_A, _R, _Dc = self.dmgcsq.shape
D = len(self.crmodel.damage_states)
# fix no_damage distribution for events with zero damage
number = self.assetcol['value-number']
nl = len(oq.total_loss_types)
for r in range(self.R):
ne = prc.num_events[r]
for li in range(L):
self.dmgcsq[:, r, li, 0] = ( # no damage
number * ne - self.dmgcsq[:, r, li, 1:D].sum(axis=1))
self.dmgcsq[:, r, 0] = ( # no damage
nl * number * ne - self.dmgcsq[:, r, 1:D].sum(axis=1))
self.dmgcsq[:, r] /= ne
self.datastore['damages-rlzs'] = self.dmgcsq
set_rlzs_stats(self.datastore,
'damages-rlzs',
asset_id=self.assetcol['id'],
rlz=numpy.arange(self.R),
loss_type=oq.loss_types,
dmg_state=['no_damage'] + self.crmodel.get_dmg_csq())

if (hasattr(oq, 'infrastructure_connectivity_analysis')
Expand Down
18 changes: 7 additions & 11 deletions openquake/calculators/export/risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,13 +380,10 @@ def export_loss_maps_npz(ekey, dstore):

def modal_damage_array(data, damage_dt):
# determine the damage state with the highest probability
A, _L, _D = data.shape
dmgstate = damage_dt['structural'].names
arr = numpy.zeros(A, [('modal-ds-' + lt, hdf5.vstr)
for lt in damage_dt.names])
for li, loss_type in enumerate(damage_dt.names):
arr['modal-ds-' + loss_type] = [dmgstate[data[a, li].argmax()]
for a in range(A)]
A, _D = data.shape
dmgstate = damage_dt.names
arr = numpy.zeros(A, [('modal-ds', hdf5.vstr)])
arr['modal-ds'] = [dmgstate[data[a].argmax()] for a in range(A)]
return arr


Expand All @@ -397,7 +394,7 @@ def export_damages_csv(ekey, dstore):
ebd = oq.calculation_mode == 'event_based_damage'
dmg_dt = build_damage_dt(dstore)
rlzs = dstore['full_lt'].get_realizations()
orig = dstore[ekey[0]][:] # shape (A, R, L, D)
orig = dstore[ekey[0]][:] # shape (A, R, D)
writer = writers.CsvWriter(fmt='%.6E')
assets = get_assets(dstore)
md = dstore.metadata
Expand All @@ -418,14 +415,13 @@ def export_damages_csv(ekey, dstore):
if ebd: # export only the consequences from damages-rlzs, i == 0
rate = len(dstore['events']) * oq.time_ratio / len(rlzs)
data = orig[:, i] * rate
A, _L, Dc = data.shape
A, Dc = data.shape
if Dc == D: # no consequences, export nothing
return []
csq_dt = build_csq_dt(dstore)
damages = numpy.zeros(A, csq_dt)
for a in range(A):
for li, lt in enumerate(csq_dt.names):
damages[lt][a] = tuple(data[a, li, D:Dc])
damages[a] = tuple(data[a, D:Dc])
fname = dstore.build_fname('avg_risk', ros, ekey[1])
else: # scenario_damage, classical_damage
if oq.modal_damage_state:
Expand Down
42 changes: 15 additions & 27 deletions openquake/calculators/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -716,15 +716,6 @@ def _filter_agg(assetcol, losses, selected, stats=''):
dict(selected=encode(selected), tags=encode(tags), stats=stats))


def get_loss_type_tags(what):
try:
loss_type, query_string = what.rsplit('?', 1)
except ValueError: # no question mark
loss_type, query_string = what, ''
tags = query_string.split('&') if query_string else []
return loss_type, tags


# probably not used
@extract.add('csq_curves')
def extract_csq_curves(dstore, what):
Expand Down Expand Up @@ -832,7 +823,11 @@ def extract_agg_losses(dstore, what):
an array of shape (R,), being R the number of realizations
an array of length 0 if there is no data for the given tags
"""
loss_type, tags = get_loss_type_tags(what)
if '?' in what:
loss_type, query_string = what.rsplit('?', 1)
else:
loss_type, query_string = what, ''
tags = query_string.split('&') if query_string else []
if not loss_type:
raise ValueError('loss_type not passed in agg_losses/<loss_type>')
if 'avg_losses-stats/' + loss_type in dstore:
Expand All @@ -850,18 +845,16 @@ def extract_agg_losses(dstore, what):
def extract_agg_damages(dstore, what):
"""
Aggregate damages of the given loss type and tags. Use it as
/extract/agg_damages/structural?taxonomy=RC&custom_site_id=20126
/extract/agg_damages?taxonomy=RC&custom_site_id=20126

:returns:
array of shape (R, D), being R the number of realizations and D the
number of damage states, or an array of length 0 if there is no data
for the given tags
"""
loss_type, tags = get_loss_type_tags(what)
tags = what.split('&') if what else []
if 'damages-rlzs' in dstore:
oq = dstore['oqparam']
lti = oq.lti[loss_type]
damages = dstore['damages-rlzs'][:, :, lti]
damages = dstore['damages-rlzs'][:, :]
else:
raise KeyError('No damages found in %s' % dstore)
return _filter_agg(dstore['assetcol'], damages, tags)
Expand Down Expand Up @@ -1044,42 +1037,37 @@ def build_damage_dt(dstore):
:returns:
a composite dtype loss_type -> (ds1, ds2, ...)
"""
oq = dstore['oqparam']
attrs = json.loads(dstore.get_attr('damages-rlzs', 'json'))
limit_states = list(dstore.get_attr('crm', 'limit_states'))
csqs = attrs['dmg_state'][len(limit_states) + 1:] # consequences
dt_list = [(ds, F32) for ds in ['no_damage'] + limit_states + csqs]
damage_dt = numpy.dtype(dt_list)
loss_types = oq.loss_dt().names
return numpy.dtype([(lt, damage_dt) for lt in loss_types])
return damage_dt


def build_csq_dt(dstore):
"""
:param dstore: a datastore instance
:returns:
a composite dtype loss_type -> (csq1, csq2, ...)
a composite dtype (csq1, csq2, ...)
"""
oq = dstore['oqparam']
attrs = json.loads(dstore.get_attr('damages-rlzs', 'json'))
limit_states = list(dstore.get_attr('crm', 'limit_states'))
csqs = attrs['dmg_state'][len(limit_states) + 1:] # consequences
dt = numpy.dtype([(csq, F32) for csq in csqs])
loss_types = oq.loss_dt().names
return numpy.dtype([(lt, dt) for lt in loss_types])
return dt


def build_damage_array(data, damage_dt):
"""
:param data: an array of shape (A, L, D)
:param damage_dt: a damage composite data type loss_type -> states
:param data: an array of shape (A, D)
:param damage_dt: a damage data type
:returns: a composite array of length N and dtype damage_dt
"""
A, _L, _D = data.shape
A, _D = data.shape
dmg = numpy.zeros(A, damage_dt)
for a in range(A):
for li, lt in enumerate(damage_dt.names):
dmg[lt][a] = tuple(data[a, li])
dmg[a] = tuple(data[a])
return dmg


Expand Down
17 changes: 7 additions & 10 deletions openquake/calculators/tests/scenario_damage_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,10 @@ def test_case_1(self):
view('num_units', self.calc.datastore)

# test agg_damages, 1 realization x 3 damage states
[dmg] = extract(self.calc.datastore, 'agg_damages/structural?'
'taxonomy=RC&CRESTA=01.1')
[dmg] = extract(self.calc.datastore, 'agg_damages?taxonomy=RC&CRESTA=01.1')
aac([1482., 489., 29.], dmg, atol=1E-4)
# test no intersection
dmg = extract(self.calc.datastore, 'agg_damages/structural?'
'taxonomy=RM&CRESTA=01.1')
dmg = extract(self.calc.datastore, 'agg_damages?taxonomy=RM&CRESTA=01.1')
self.assertEqual(dmg.shape, ())

# missing fragility functions
Expand All @@ -79,16 +77,15 @@ def test_case_1c(self):
[fname] = export(('damages-rlzs', 'csv'), self.calc.datastore)
self.assertEqualFiles('expected/' + strip_calc_id(fname), fname)
df = self.calc.datastore.read_df('damages-rlzs', 'asset_id')
self.assertEqual(list(df.columns),
['rlz', 'loss_type', 'dmg_state', 'value'])
self.assertEqual(list(df.columns), ['rlz', 'dmg_state', 'value'])

# check risk_by_event
[fname] = export(('risk_by_event', 'csv'), self.calc.datastore)
self.assertEqualFiles('expected/' + strip_calc_id(fname), fname,
delta=1E-5)

# check agg_damages extraction
total = extract(self.calc.datastore, 'agg_damages/structural')
total = extract(self.calc.datastore, 'agg_damages')

aac(total, [[27652.219, 28132.8, 9511.933, 2870.9312, 11832.913]],
atol=.1)
Expand Down Expand Up @@ -138,15 +135,15 @@ def test_case_5(self):
def test_case_5a(self):
# this is a case with two gsims and one asset
self.assert_ok(case_5a, 'job_haz.ini,job_risk.ini')
dmg = extract(self.calc.datastore, 'agg_damages/structural?taxonomy=*')
dmg = extract(self.calc.datastore, 'agg_damages?taxonomy=*')
self.assertEqual(dmg.array.shape, (1, 2, 5)) # (T, R, D)
aac(dmg.array[0].sum(axis=0),
[0.68951, 0.623331, 0.305033, 0.155678, 0.22645], atol=1E-5)

def test_case_6(self):
# this is a case with 5 assets on the same point
self.assert_ok(case_6, 'job_h.ini,job_r.ini')
dmg = extract(self.calc.datastore, 'agg_damages/structural?taxonomy=*')
dmg = extract(self.calc.datastore, 'agg_damages?taxonomy=*')
tmpname = write_csv(None, dmg, fmt='%.5E') # (T, R, D) == (5, 1, 5)
self.assertEqualFiles('expected/dmg_by_taxon.csv', tmpname,
delta=1E-5)
Expand All @@ -165,7 +162,7 @@ def test_case_7(self):
'risk_by_event', ['event_id', 'loss_id', 'agg_id'],
dict(agg_id=K))
self.assertEqual(len(df), 300)
self.assertEqual(len(df[df.dmg_1 > 0]), 72) # only 72/300 are nonzero
self.assertEqual(len(df[df.dmg_1 > 0]), 174) # only 174/300 are nonzero

def test_case_8(self):
# case with a shakemap
Expand Down
12 changes: 5 additions & 7 deletions openquake/calculators/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,9 +560,8 @@ def portfolio_dmgdist(token, dstore):
oq = dstore['oqparam']
dstates = ['no_damage'] + oq.limit_states
D = len(dstates)
arr = dstore['damages-rlzs'][:, 0, :, :D].sum(axis=0) # shape (L, D)
tbl = numpy.zeros(len(arr), dt(['loss_type', 'total'] + dstates))
tbl['loss_type'] = oq.loss_types
arr = dstore['damages-rlzs'][:, 0, :D].sum(axis=0) # shape D
tbl = numpy.zeros(len(arr), dt(['total'] + dstates))
tbl['total'] = arr.sum(axis=1)
for dsi, ds in enumerate(dstates):
tbl[ds] = arr[:, dsi]
Expand All @@ -584,15 +583,14 @@ def view_portfolio_damage(token, dstore):
del df['agg_id']
del df['return_period']
return df.set_index('loss_type')
# dimensions assets, stat, loss_types, dmg_state
# dimensions assets, stat, dmg_state
if 'damages-stats' in dstore:
attrs = get_shape_descr(dstore['damages-stats'].attrs['json'])
arr = dstore.sel('damages-stats', stat='mean').sum(axis=(0, 1))
else:
attrs = get_shape_descr(dstore['damages-rlzs'].attrs['json'])
arr = dstore.sel('damages-rlzs', rlz=0).sum(axis=(0, 1))
rows = [(lt,) + tuple(row) for lt, row in zip(attrs['loss_type'], arr)]
return numpy.array(rows, dt(['loss_type'] + list(attrs['dmg_state'])))
arr = dstore.sel('damages-rlzs', rlz=0).sum(axis=(0, 1)) # shape D
return numpy.array(arr, dt(list(attrs['dmg_state'])))


def sum_table(records):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#,,,,,,,,"generated_by='OpenQuake engine 3.13.0-git63b4e49e3c', start_date='2022-01-04T10:03:02', checksum=2908959360, investigation_time=50.0, risk_investigation_time=100.0"
asset_id,taxonomy,lon,lat,structural~no_damage,structural~slight,structural~moderate,structural~extensive,structural~complete
#,,,,,,,,"generated_by='OpenQuake engine 3.22.0-gitcb9ea472a1', start_date='2024-10-23T02:12:41', checksum=2021864103, investigation_time=50.0, risk_investigation_time=100.0"
asset_id,taxonomy,lon,lat,no_damage,slight,moderate,extensive,complete
a1,W,83.31382,29.46117,5.834455E-01,1.166002E-01,1.639836E-01,6.452727E-02,7.144331E-02
Loading