Exemple #1
0
    def post_execute(self, result):
        """
        Saving loss curves in the datastore.

        :param result: aggregated result of the task classical_risk
        """
        loss_ratios = {
            cb.loss_type: cb.curve_resolution
            for cb in self.riskmodel.curve_builder if cb.user_provided
        }
        self.loss_curve_dt, _ = scientific.build_loss_dtypes(
            loss_ratios, self.oqparam.conditional_loss_poes, self.I)
        ltypes = self.riskmodel.loss_types
        loss_curves = numpy.zeros((self.N, self.R), self.loss_curve_dt)
        for l, r, aid, lcurve in result['loss_curves']:
            loss_curves_lt = loss_curves[ltypes[l]]
            for i, name in enumerate(loss_curves_lt.dtype.names):
                if name.startswith('avg'):
                    loss_curves_lt[name][aid, r] = lcurve[i]
                else:  # 'losses', 'poes'
                    base.set_array(loss_curves_lt[name][aid, r], lcurve[i])
        self.datastore['loss_curves-rlzs'] = loss_curves
        self.datastore.set_nbytes('loss_curves-rlzs')

        # loss curves stats
        if self.R > 1:
            stat_curves = numpy.zeros((self.N, self.S), self.loss_curve_dt)
            for l, aid, losses, statpoes, statloss in result['stat_curves']:
                stat_curves_lt = stat_curves[ltypes[l]]
                for s in range(self.S):
                    stat_curves_lt['avg'][aid, s] = statloss[s]
                    base.set_array(stat_curves_lt['poes'][aid, s], statpoes[s])
                    base.set_array(stat_curves_lt['losses'][aid, s], losses)
            self.datastore['loss_curves-stats'] = stat_curves
            self.datastore.set_nbytes('loss_curves-stats')
Exemple #2
0
def get_loss_maps(dstore, kind):
    """
    :param dstore: a DataStore instance
    :param kind: 'rlzs' or 'stats'
    """
    oq = dstore['oqparam']
    name = 'rcurves-%s' % kind
    if name in dstore:  # event_based risk
        values = dstore['assetcol'].values()
        _, loss_maps_dt = scientific.build_loss_dtypes(
            {lt: len(oq.loss_ratios[lt]) for lt in oq.loss_ratios},
            oq.conditional_loss_poes, oq.insured_losses)
        rcurves = dstore[name].value  # to support Ubuntu 14
        A, R, I = rcurves.shape
        ins = ['', '_ins']
        loss_maps = numpy.zeros((A, R), loss_maps_dt)
        for ltype, lratios in oq.loss_ratios.items():
            for (a, r, i) in indices(A, R, I):
                rcurve = rcurves[ltype][a, r, i]
                losses = numpy.array(lratios) * values[ltype][a]
                tup = tuple(
                    scientific.conditional_loss_ratio(losses, rcurve, poe)
                    for poe in oq.conditional_loss_poes)
                loss_maps[ltype + ins[i]][a, r] = tup
        return loss_maps
    name = 'loss_curves-%s' % kind
    if name in dstore:  # classical_risk
        loss_curves = dstore[name]
    loss_maps = scientific.broadcast(
        scientific.loss_maps, loss_curves, oq.conditional_loss_poes)
    return loss_maps
Exemple #3
0
    def post_execute(self, result):
        """
        Save the losses in a compact form.
        """
        loss_ratios = {cb.loss_type: cb.curve_resolution
                       for cb in self.riskmodel.curve_builders
                       if cb.user_provided}
        self.loss_curve_dt, self.loss_maps_dt = scientific.build_loss_dtypes(
            loss_ratios, self.oqparam.conditional_loss_poes, self.I)

        self.save_loss_curves(result)
Exemple #4
0
    def build_agg_curve(self):
        """
        Build a single loss curve per realization. It is NOT obtained
        by aggregating the loss curves; instead, it is obtained without
        generating the loss curves, directly from the the aggregate losses.
        """
        oq = self.oqparam
        cr = {
            cb.loss_type: cb.curve_resolution
            for cb in self.riskmodel.curve_builder
        }
        loss_curve_dt, _ = scientific.build_loss_dtypes(
            cr, oq.conditional_loss_poes)
        lts = self.riskmodel.loss_types
        cb_inputs = self.cb_inputs('agg_loss_table')
        I = oq.insured_losses + 1
        R = len(self.rlzs_assoc.realizations)
        # NB: using the Processmap since celery is hanging; the computation
        # is fast anyway and this part will likely be removed in the future
        result = parallel.Processmap.apply(
            build_agg_curve, (cb_inputs, self.monitor('')),
            concurrent_tasks=self.oqparam.concurrent_tasks).reduce()
        agg_curve = numpy.zeros((I, R), loss_curve_dt)
        for l, r, i in result:
            agg_curve[lts[l]][i, r] = result[l, r, i]
        self.datastore['agg_curve-rlzs'] = agg_curve

        if R > 1:  # save stats too
            statnames, stats = zip(*oq.risk_stats())
            weights = self.datastore['realizations']['weight']
            agg_curve_stats = numpy.zeros((I, len(stats)), agg_curve.dtype)
            for l, loss_type in enumerate(agg_curve.dtype.names):
                acs = agg_curve_stats[loss_type]
                data = agg_curve[loss_type]
                for i in range(I):
                    avg = data['avg'][i]
                    losses, all_poes = scientific.normalize_curves_eb([
                        (c['losses'], c['poes']) for c in data[i]
                    ])
                    acs['losses'][i] = losses
                    acs['poes'][i] = compute_stats(all_poes, stats, weights)
                    acs['avg'][i] = compute_stats(avg, stats, weights)

            self.datastore['agg_curve-stats'] = agg_curve_stats
Exemple #5
0
    def build_agg_curve(self):
        """
        Build a single loss curve per realization. It is NOT obtained
        by aggregating the loss curves; instead, it is obtained without
        generating the loss curves, directly from the the aggregate losses.
        """
        oq = self.oqparam
        cr = {cb.loss_type: cb.curve_resolution
              for cb in self.riskmodel.curve_builders}
        loss_curve_dt, _ = scientific.build_loss_dtypes(
            cr, oq.conditional_loss_poes)
        lts = self.riskmodel.loss_types
        cb_inputs = self.cb_inputs('agg_loss_table')
        I = oq.insured_losses + 1
        R = len(self.rlzs_assoc.realizations)
        result = parallel.apply(
            build_agg_curve, (cb_inputs, self.monitor('')),
            concurrent_tasks=self.oqparam.concurrent_tasks).reduce()
        agg_curve = numpy.zeros((I, R), loss_curve_dt)
        for l, r, i in result:
            agg_curve[lts[l]][i, r] = result[l, r, i]
        self.datastore['agg_curve-rlzs'] = agg_curve

        if R > 1:  # save stats too
            weights = self.datastore['realizations']['weight']
            Q1 = len(oq.quantile_loss_curves) + 1
            agg_curve_stats = numpy.zeros((I, Q1), agg_curve.dtype)
            for l, loss_type in enumerate(agg_curve.dtype.names):
                acs = agg_curve_stats[loss_type]
                data = agg_curve[loss_type]
                for i in range(I):
                    losses, all_poes = scientific.normalize_curves_eb(
                        [(c['losses'], c['poes']) for c in data[i]])
                    acs['losses'][i] = losses
                    acs['poes'][i] = compute_stats(
                        all_poes, oq.quantile_loss_curves, weights)
                    acs['avg'][i] = compute_stats(
                        data['avg'][i], oq.quantile_loss_curves, weights)

            self.datastore['agg_curve-stats'] = agg_curve_stats