def post_execute(self, result): """ Saving loss curves in the datastore. :param result: aggregated result of the task classical_risk """ loss_ratios = { cb.loss_type: cb.curve_resolution for cb in self.riskmodel.curve_builder if cb.user_provided } self.loss_curve_dt = scientific.build_loss_curve_dt( loss_ratios, self.oqparam.conditional_loss_poes, self.I) ltypes = self.riskmodel.loss_types loss_curves = numpy.zeros((self.N, self.R), self.loss_curve_dt) for l, r, aid, lcurve in result['loss_curves']: loss_curves_lt = loss_curves[ltypes[l]] for i, name in enumerate(loss_curves_lt.dtype.names): if name.startswith('avg'): loss_curves_lt[name][aid, r] = lcurve[i] else: # 'losses', 'poes' base.set_array(loss_curves_lt[name][aid, r], lcurve[i]) self.datastore['loss_curves-rlzs'] = loss_curves self.datastore.set_nbytes('loss_curves-rlzs') # loss curves stats if self.R > 1: stat_curves = numpy.zeros((self.N, self.S), self.loss_curve_dt) for l, aid, losses, statpoes, statloss in result['stat_curves']: stat_curves_lt = stat_curves[ltypes[l]] for s in range(self.S): stat_curves_lt['avg'][aid, s] = statloss[s] base.set_array(stat_curves_lt['poes'][aid, s], statpoes[s]) base.set_array(stat_curves_lt['losses'][aid, s], losses) self.datastore['loss_curves-stats'] = stat_curves self.datastore.set_nbytes('loss_curves-stats')
def post_execute(self, result): """ Saving loss curves in the datastore. :param result: aggregated result of the task classical_risk """ curve_res = { cp.loss_type: cp.curve_resolution for cp in self.crmodel.curve_params if cp.user_provided } self.loss_curve_dt = scientific.build_loss_curve_dt( curve_res, insured_losses=False) ltypes = self.crmodel.loss_types # loss curves stats are generated always stats = list(self.oqparam.hazard_stats()) stat_curves = numpy.zeros((self.A, self.S), self.loss_curve_dt) avg_losses = numpy.zeros((self.A, self.S, self.L), F32) for li, a, losses, statpoes, statloss in result['stat_curves']: stat_curves_lt = stat_curves[ltypes[li]] for s in range(self.S): avg_losses[a, s, li] = statloss[s] base.set_array(stat_curves_lt['poes'][a, s], statpoes[s]) base.set_array(stat_curves_lt['losses'][a, s], losses) self.datastore['avg_losses-stats'] = avg_losses self.datastore.set_shape_descr('avg_losses-stats', asset_id=self.assetcol['id'], stat=stats, loss_type=self.oqparam.loss_types) self.datastore['loss_curves-stats'] = stat_curves self.datastore.set_attrs('loss_curves-stats', stat=stats) if self.R > 1: # individual realizations saved only if many loss_curves = numpy.zeros((self.A, self.R), self.loss_curve_dt) avg_losses = numpy.zeros((self.A, self.R, self.L), F32) for li, r, a, (losses, poes, avg) in result['loss_curves']: lc = loss_curves[a, r][ltypes[li]] avg_losses[a, r, li] = avg base.set_array(lc['losses'], losses) base.set_array(lc['poes'], poes) self.datastore['avg_losses-rlzs'] = avg_losses self.datastore.set_shape_descr('avg_losses-rlzs', asset_id=self.assetcol['id'], rlz=numpy.arange(self.R), loss_type=self.oqparam.loss_types) self.datastore['loss_curves-rlzs'] = loss_curves
def build_agg_curve(self): """ Build a single loss curve per realization. It is NOT obtained by aggregating the loss curves; instead, it is obtained without generating the loss curves, directly from the the aggregate losses. """ oq = self.oqparam weights = self.datastore['realizations']['weight'] cr = { cb.loss_type: cb.curve_resolution for cb in self.riskmodel.curve_builder } loss_curve_dt = scientific.build_loss_curve_dt( cr, oq.conditional_loss_poes, oq.insured_losses) cb_inputs = self.cb_inputs('agg_loss_table') R = len(weights) # NB: using the Processmap since celery is hanging; the computation # is fast anyway and this part will likely be removed in the future result = parallel.Processmap.apply( build_agg_curve, (cb_inputs, self.monitor('')), concurrent_tasks=self.oqparam.concurrent_tasks).reduce() agg_curve = numpy.zeros(R, loss_curve_dt) for li, r in result: lt = loss_curve_dt.names[li] agg_curve[r][lt] = result[li, r] self.datastore['agg_curve-rlzs'] = agg_curve if R > 1: # save stats too statnames, stats = zip(*oq.risk_stats()) agg_curve_stats = numpy.zeros(len(stats), agg_curve.dtype) for loss_type in agg_curve.dtype.names: acs = agg_curve_stats[loss_type] data = agg_curve[loss_type] avg = data['avg'] losses, all_poes = scientific.normalize_curves_eb([ (c['losses'], c['poes']) for c in data ]) acs['losses'] = losses acs['poes'] = compute_stats(all_poes, stats, weights) acs['avg'] = compute_stats(avg, stats, weights) self.datastore['agg_curve-stats'] = agg_curve_stats
def post_execute(self, result): """ Saving loss curves in the datastore. :param result: aggregated result of the task classical_risk """ curve_res = { cp.loss_type: cp.curve_resolution for cp in self.riskmodel.curve_params if cp.user_provided } self.loss_curve_dt = scientific.build_loss_curve_dt( curve_res, self.oqparam.insured_losses) ltypes = self.riskmodel.loss_types # loss curves stats are generated always stats = [encode(n) for (n, f) in self.oqparam.risk_stats()] stat_curves = numpy.zeros((self.A, self.S), self.loss_curve_dt) avg_losses = numpy.zeros((self.A, self.R, self.L * self.I), F32) for l, a, losses, statpoes, statloss in result['stat_curves']: stat_curves_lt = stat_curves[ltypes[l]] for s in range(self.S): avg_losses[a, s, l] = statloss[s] base.set_array(stat_curves_lt['poes'][a, s], statpoes[s]) base.set_array(stat_curves_lt['losses'][a, s], losses) self.datastore['avg_losses-stats'] = avg_losses self.datastore.set_attrs('avg_losses-stats', stats=stats) self.datastore['loss_curves-stats'] = stat_curves self.datastore.set_attrs('loss_curves-stats', stats=stats) if self.R > 1: # individual realizations saved only if many loss_curves = numpy.zeros((self.A, self.R), self.loss_curve_dt) avg_losses = numpy.zeros((self.A, self.R, self.L * self.I), F32) for l, r, a, (losses, poes, avg) in result['loss_curves']: lc = loss_curves[a, r][ltypes[l]] avg_losses[a, r, l] = avg base.set_array(lc['losses'], losses) base.set_array(lc['poes'], poes) self.datastore['avg_losses-rlzs'] = avg_losses self.datastore['loss_curves-rlzs'] = loss_curves
def post_execute(self, result): """ Saving loss curves in the datastore. :param result: aggregated result of the task classical_risk """ curve_res = {cp.loss_type: cp.curve_resolution for cp in self.riskmodel.curve_params if cp.user_provided} self.loss_curve_dt = scientific.build_loss_curve_dt( curve_res, insured_losses=False) ltypes = self.riskmodel.loss_types # loss curves stats are generated always stats = encode(list(self.oqparam.hazard_stats())) stat_curves = numpy.zeros((self.A, self.S), self.loss_curve_dt) avg_losses = numpy.zeros((self.A, self.S, self.L), F32) for l, a, losses, statpoes, statloss in result['stat_curves']: stat_curves_lt = stat_curves[ltypes[l]] for s in range(self.S): avg_losses[a, s, l] = statloss[s] base.set_array(stat_curves_lt['poes'][a, s], statpoes[s]) base.set_array(stat_curves_lt['losses'][a, s], losses) self.datastore['avg_losses-stats'] = avg_losses self.datastore.set_attrs('avg_losses-stats', stats=stats) self.datastore['loss_curves-stats'] = stat_curves self.datastore.set_attrs('loss_curves-stats', stats=stats) if self.R > 1: # individual realizations saved only if many loss_curves = numpy.zeros((self.A, self.R), self.loss_curve_dt) avg_losses = numpy.zeros((self.A, self.R, self.L), F32) for l, r, a, (losses, poes, avg) in result['loss_curves']: lc = loss_curves[a, r][ltypes[l]] avg_losses[a, r, l] = avg base.set_array(lc['losses'], losses) base.set_array(lc['poes'], poes) self.datastore['avg_losses-rlzs'] = avg_losses self.datastore['loss_curves-rlzs'] = loss_curves