def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) name, kind = dskey.split('-') if kind == 'stats': weights = dstore['csm_info'].rlzs['weight'] tags, stats = zip(*oq.risk_stats()) if dskey in dstore: # precomputed value = dstore[dskey].value else: # computed on the fly value = compute_stats2(dstore['avg_losses-rlzs'].value, stats, weights) else: # rlzs value = dstore[dskey].value # shape (A, R, LI) R = value.shape[1] tags = ['rlz-%03d' % r for r in range(R)] for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for l, lt in enumerate(dt.names): array[lt] = values[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
def pair(self, array, stats): """ :return (array, array_stats) if stats, else (array, None) """ if len(self.weights) > 1 and stats: statnames, statfuncs = zip(*stats) array_stats = compute_stats2(array, statfuncs, self.weights) else: array_stats = None return array, array_stats
def execute(self): A = len(self.assetcol) ltypes = self.riskmodel.loss_types I = self.oqparam.insured_losses + 1 R = len(self.rlzs_assoc.realizations) self.vals = self.assetcol.values() # loss curves multi_lr_dt = numpy.dtype( [(ltype, (F32, len(cbuilder.ratios))) for ltype, cbuilder in zip( ltypes, self.riskmodel.curve_builders)]) rcurves = numpy.zeros((A, R, I), multi_lr_dt) # build rcurves-rlzs if self.oqparam.loss_ratios: assets = list(self.assetcol) cb_inputs = self.cb_inputs('all_loss_ratios') mon = self.monitor('build_rcurves') res = parallel.apply( build_rcurves, (cb_inputs, assets, mon)).reduce() for l, r in res: aids, curves = res[l, r] rcurves[ltypes[l]][aids, r] = curves self.datastore['rcurves-rlzs'] = rcurves # build rcurves-stats (sequentially) # this is a fundamental output, being used to compute loss_maps-stats if R > 1: weights = self.datastore['realizations']['weight'] quantiles = self.oqparam.quantile_loss_curves with self.monitor('computing avg_losses-stats'): self.datastore['avg_losses-stats'] = compute_stats2( self.datastore['avg_losses-rlzs'], quantiles, weights) with self.monitor('computing rcurves-stats'): self.datastore['rcurves-stats'] = compute_stats2( rcurves, quantiles, weights) # build an aggregate loss curve per realization if 'agg_loss_table' in self.datastore: with self.monitor('building agg_curve'): self.build_agg_curve()
def view_mean_avg_losses(token, dstore): dt = dstore['oqparam'].loss_dt() weights = dstore['realizations']['weight'] array = dstore['avg_losses-rlzs'].value # shape (N, R) if len(weights) == 1: # one realization mean = array[:, 0] else: mean = hstats.compute_stats2(array, [hstats.mean_curve], weights)[:, 0] data = numpy.array([tuple(row) for row in mean], dt) assets = util.get_assets(dstore) losses = util.compose_arrays(assets, data) losses.sort() return rst_table(losses, fmt=FIVEDIGITS)
def post_execute(self, result): bcr_data = numpy.zeros((self.N, self.R), self.oqparam.loss_dt(bcr_dt)) for (aid, lt, r), data in result.items(): bcr_data[lt][aid, r] = data self.datastore['bcr-rlzs'] = bcr_data weights = [rlz.weight for rlz in self.rlzs_assoc.realizations] if len(weights) > 1: snames, sfuncs = zip(*self.oqparam.risk_stats()) bcr_stats = numpy.zeros((self.N, len(sfuncs)), self.oqparam.loss_dt(bcr_dt)) for lt in bcr_data.dtype.names: bcr_stats[lt] = compute_stats2(bcr_data[lt], sfuncs, weights) self.datastore['bcr-stats'] = bcr_stats
def _get_data(dstore, dskey, stats): name, kind = dskey.split('-') # i.e. ('avg_losses', 'stats') if kind == 'stats': weights = dstore['weights'][()] tags, stats = zip(*stats) if dskey in set(dstore): # precomputed value = dstore[dskey][()] # shape (A, S, LI) else: # computed on the fly value = compute_stats2(dstore[name + '-rlzs'][()], stats, weights) else: # rlzs value = dstore[dskey][()] # shape (A, R, LI) R = value.shape[1] tags = ['rlz-%03d' % r for r in range(R)] return name, value, tags
def post_execute(self, result): # NB: defined only for loss_type = 'structural' bcr_data = numpy.zeros((self.A, self.R), bcr_dt) for aid, data in result.items(): bcr_data[aid]['annual_loss_orig'] = data[:, 0] bcr_data[aid]['annual_loss_retro'] = data[:, 1] bcr_data[aid]['bcr'] = data[:, 2] self.datastore['bcr-rlzs'] = bcr_data weights = [rlz.weight for rlz in self.rlzs_assoc.realizations] if len(weights) > 1: snames, sfuncs = zip(*self.oqparam.risk_stats()) bcr_stats = numpy.zeros((self.A, len(sfuncs)), bcr_dt) bcr_stats = compute_stats2(bcr_data, sfuncs, weights) self.datastore['bcr-stats'] = bcr_stats
def _get_data(dstore, dskey, stats): name, kind = dskey.split('-') # i.e. ('avg_losses', 'stats') if kind == 'stats': weights = dstore['weights'].value tags, stats = zip(*stats) if dskey in set(dstore): # precomputed value = dstore[dskey].value # shape (A, S, LI) else: # computed on the fly value = compute_stats2( dstore[name + '-rlzs'].value, stats, weights) else: # rlzs value = dstore[dskey].value # shape (A, R, LI) R = value.shape[1] tags = ['rlz-%03d' % r for r in range(R)] return name, value, tags
def _get_data(dstore, dskey, stats): name, kind = dskey.split('-') # i.e. ('avg_losses', 'stats') if kind == 'stats': weights = dstore['weights'][()] if dskey in set(dstore): # precomputed tags = [decode(s) for s in dstore.get_attr(dskey, 'stats')] statfuncs = [stats[tag] for tag in tags] value = dstore[dskey][()] # shape (A, S, LI) else: # computed on the fly tags, statfuncs = zip(*stats.items()) value = compute_stats2(dstore[name + '-rlzs'][()], statfuncs, weights) else: # rlzs value = dstore[dskey][()] # shape (A, R, LI) R = value.shape[1] tags = ['rlz-%03d' % r for r in range(R)] return name, value, tags
def _get_data(dstore, dskey, stats): name, kind = dskey.split('-') # i.e. ('avg_losses', 'stats') if kind == 'stats': weights = dstore['weights'][()] if dskey in set(dstore): # precomputed rlzs_or_stats = list(stats) statfuncs = [stats[ros] for ros in stats] value = dstore[dskey][()] # shape (A, S, LI) else: # compute on the fly rlzs_or_stats, statfuncs = zip(*stats.items()) value = compute_stats2( dstore[name + '-rlzs'][()], statfuncs, weights) else: # rlzs value = dstore[dskey][()] # shape (A, R, LI) R = value.shape[1] rlzs_or_stats = ['rlz-%03d' % r for r in range(R)] return name, value, rlzs_or_stats
def post_execute(self, result): """ Export the result in CSV format. :param result: a dictionary asset -> fractions per damage state """ damages_dt = numpy.dtype([(ds, numpy.float32) for ds in self.riskmodel.damage_states]) damages = numpy.zeros((self.A, self.R), damages_dt) for r in result: for aid, fractions in result[r].items(): damages[aid, r] = tuple(fractions) self.datastore['damages-rlzs'] = damages weights = [rlz.weight for rlz in self.rlzs_assoc.realizations] if len(weights) > 1: # compute stats snames, sfuncs = zip(*self.oqparam.risk_stats()) dmg_stats = compute_stats2(damages, sfuncs, weights) self.datastore['damages-stats'] = dmg_stats
def export_losses_by_taxon_csv(ekey, dstore): oq = dstore['oqparam'] taxonomies = add_quotes(dstore['assetcol/taxonomies'].value) rlzs = dstore['csm_info'].get_rlzs_assoc().realizations loss_types = oq.loss_dt().names key, kind = ekey[0].split('-') value = dstore[key + '-rlzs'].value if kind == 'stats': weights = dstore['realizations']['weight'] tags, stats = zip(*oq.risk_stats()) value = compute_stats2(value, stats, weights) else: # rlzs tags = rlzs writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) dt = numpy.dtype([('taxonomy', taxonomies.dtype)] + oq.loss_dt_list()) for tag, values in zip(tags, value.transpose(1, 0, 2)): fname = dstore.build_fname(key, tag, ekey[1]) array = numpy.zeros(len(values), dt) array['taxonomy'] = taxonomies for l, lt in enumerate(loss_types): array[lt] = values[:, l] writer.save(array, fname) return writer.getsaved()