Ejemplo n.º 1
0
def classical_risk(riskinput, riskmodel, param, monitor):
    """
    Compute and return the average losses for each asset.

    :param riskinput:
        a :class:`openquake.risklib.riskinput.RiskInput` object
    :param riskmodel:
        a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
    :param param:
        dictionary of extra parameters
    :param monitor:
        :class:`openquake.baselib.performance.Monitor` instance
    """
    ins = param['insured_losses']
    result = dict(loss_curves=[], stat_curves=[])
    all_outputs = list(riskmodel.gen_outputs(riskinput, monitor))
    for outputs in all_outputs:
        r = outputs.r
        outputs.average_losses = AccumDict(accum=[])  # l -> array
        for l, (loss_curves, insured_curves) in enumerate(outputs):
            for i, asset in enumerate(outputs.assets):
                aid = asset.ordinal
                avg = scientific.average_loss(loss_curves[i])
                outputs.average_losses[l].append(avg)
                lcurve = (loss_curves[i, 0], loss_curves[i, 1], avg)
                if ins:
                    lcurve += (
                        insured_curves[i, 0], insured_curves[i, 1],
                        scientific.average_loss(insured_curves[i]))
                else:
                    lcurve += (None, None, None)
                result['loss_curves'].append((l, r, aid, lcurve))

    # compute statistics
    rlzs = riskinput.rlzs
    if len(rlzs) > 1 and param['stats']:
        w = param['weights']
        statnames, stats = zip(*param['stats'])
        l_idxs = range(len(riskmodel.lti))
        for assets, rows in groupby(
                all_outputs, lambda o: tuple(o.assets)).items():
            weights = [w[row.r] for row in rows]
            row = rows[0]
            for l in l_idxs:
                for i, asset in enumerate(assets):
                    avgs = numpy.array([r.average_losses[l][i] for r in rows])
                    avg_stats = compute_stats(avgs, stats, weights)
                    # row is index by the loss type index l and row[l]
                    # is a pair loss_curves, insured_loss_curves
                    # loss_curves[i, 0] are the i-th losses,
                    # loss_curves[i, 1] are the i-th poes
                    losses = row[l][0][i, 0]
                    poes_stats = compute_stats(
                        numpy.array([row[l][0][i, 1] for row in rows]),
                        stats, weights)
                    result['stat_curves'].append(
                        (l, asset.ordinal, losses, poes_stats, avg_stats))
    return result
Ejemplo n.º 2
0
def classical_risk(riskinputs, riskmodel, param, monitor):
    """
    Compute and return the average losses for each asset.

    :param riskinputs:
        :class:`openquake.risklib.riskinput.RiskInput` objects
    :param riskmodel:
        a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
    :param param:
        dictionary of extra parameters
    :param monitor:
        :class:`openquake.baselib.performance.Monitor` instance
    """
    result = dict(loss_curves=[], stat_curves=[])
    for ri in riskinputs:
        all_outputs = list(riskmodel.gen_outputs(ri, monitor))
        for outputs in all_outputs:
            r = outputs.rlzi
            outputs.average_losses = AccumDict(accum=[])  # l -> array
            for l, loss_curves in enumerate(outputs):
                # loss_curves has shape (C, N, 2)
                for i, asset in enumerate(outputs.assets):
                    aid = asset.ordinal
                    avg = scientific.average_loss(loss_curves[:, i].T)
                    outputs.average_losses[l].append(avg)
                    lcurve = (loss_curves[:, i, 0], loss_curves[:, i, 1], avg)
                    result['loss_curves'].append((l, r, aid, lcurve))

        # compute statistics
        R = ri.hazard_getter.num_rlzs
        w = param['weights']
        statnames, stats = zip(*param['stats'])
        l_idxs = range(len(riskmodel.lti))
        for assets, outs in groupby(all_outputs,
                                    lambda o: tuple(o.assets)).items():
            weights = [w[out.rlzi] for out in outs]
            out = outs[0]
            for l in l_idxs:
                for i, asset in enumerate(assets):
                    avgs = numpy.array([r.average_losses[l][i] for r in outs])
                    avg_stats = compute_stats(avgs, stats, weights)
                    # is a pair loss_curves, insured_loss_curves
                    # out[l][:, i, 0] are the i-th losses
                    # out[l][:, i, 1] are the i-th poes
                    losses = out[l][:, i, 0]
                    poes_stats = compute_stats(
                        numpy.array([out[l][:, i, 1] for out in outs]), stats,
                        weights)
                    result['stat_curves'].append(
                        (l, asset.ordinal, losses, poes_stats, avg_stats))
    if R == 1:  # the realization is the same as the mean
        del result['loss_curves']
    return result
Ejemplo n.º 3
0
def classical_risk(riskinputs, param, monitor):
    """
    Compute and return the average losses for each asset.

    :param riskinputs:
        :class:`openquake.risklib.riskinput.RiskInput` objects
    :param param:
        dictionary of extra parameters
    :param monitor:
        :class:`openquake.baselib.performance.Monitor` instance
    """
    crmodel = monitor.read('crmodel')
    result = dict(loss_curves=[], stat_curves=[])
    weights = [w['default'] for w in param['weights']]
    statnames, stats = zip(*param['stats'])
    mon = monitor('getting hazard', measuremem=False)
    for ri in riskinputs:
        A = len(ri.asset_df)
        L = len(crmodel.lti)
        R = ri.hazard_getter.num_rlzs
        loss_curves = numpy.zeros((R, L, A), object)
        avg_losses = numpy.zeros((R, L, A))
        with mon:
            haz = ri.hazard_getter.get_hazard()
        for taxo, asset_df in ri.asset_df.groupby('taxonomy'):
            for rlz in range(R):
                pcurve = haz.extract(rlz)
                out = crmodel.get_output(taxo, asset_df, pcurve, rlz=rlz)
                for li, loss_type in enumerate(crmodel.loss_types):
                    # loss_curves has shape (A, C)
                    for i, asset in enumerate(asset_df.to_records()):
                        loss_curves[rlz, li, i] = lc = out[loss_type][i]
                        aid = asset['ordinal']
                        avg = scientific.average_loss(lc)
                        avg_losses[rlz, li, i] = avg
                        lcurve = (lc['loss'], lc['poe'], avg)
                        result['loss_curves'].append((li, rlz, aid, lcurve))

        # compute statistics
        for li, loss_type in enumerate(crmodel.loss_types):
            for i, asset in enumerate(ri.asset_df.to_records()):
                avg_stats = compute_stats(
                    avg_losses[:, li, i], stats, weights)
                losses = loss_curves[0, li, i]['loss']
                all_poes = numpy.array(
                    [loss_curves[r, li, i]['poe'] for r in range(R)])
                poes_stats = compute_stats(all_poes, stats, weights)
                result['stat_curves'].append(
                    (li, asset['ordinal'], losses, poes_stats, avg_stats))
    if R == 1:  # the realization is the same as the mean
        del result['loss_curves']
    return result
Ejemplo n.º 4
0
def classical_risk(riskinput, riskmodel, monitor):
    """
    Compute and return the average losses for each asset.

    :param riskinput:
        a :class:`openquake.risklib.riskinput.RiskInput` object
    :param riskmodel:
        a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
    :param monitor:
        :class:`openquake.baselib.performance.Monitor` instance
    """
    oq = monitor.oqparam
    ins = oq.insured_losses
    result = dict(loss_curves=[], stat_curves=[])
    outputs = list(riskmodel.gen_outputs(riskinput, monitor))
    for out in outputs:
        l, r = out.lr
        for i, asset in enumerate(out.assets):
            aid = asset.ordinal
            avg = out.average_losses[i]
            avg_ins = (out.average_insured_losses[i]
                       if ins else numpy.nan)
            lcurve = (
                out.loss_curves[i, 0],
                out.loss_curves[i, 1], avg)
            if ins:
                lcurve += (
                    out.insured_curves[i, 0],
                    out.insured_curves[i, 1], avg_ins)
            else:
                lcurve += (None, None, None)
            result['loss_curves'].append((l, r, aid, lcurve))

        # compute statistics
        if len(riskinput.rlzs) > 1:
            for (l, assets), outs in groupby(outputs, by_l_assets).items():
                weights = []
                for out in outs:  # outputs with the same loss type and assets
                    weights.append(riskinput.rlzs[out.lr[1]].weight)
                for i, asset in enumerate(assets):
                    avg_stats = compute_stats(
                        numpy.array([out.average_losses for out in outs]),
                        oq.quantile_loss_curves, weights)
                    losses = out.loss_curves[i, 0]
                    poes_stats = compute_stats(
                        numpy.array([out.loss_curves[i, 1] for out in outs]),
                        oq.quantile_loss_curves, weights)
                    result['stat_curves'].append(
                        (l, asset.ordinal, losses, poes_stats, avg_stats))

    return result
Ejemplo n.º 5
0
 def compute_pmap(self, sids, pmaps):
     """
     :params sids: array of N site IDs
     :param pmaps: array of R simple ProbabilityMaps
     :returns: a ProbabilityMap with arrays of size (num_levels, num_stats)
     """
     if len(pmaps) == 0:
         raise ValueError('No probability maps!')
     elif len(pmaps) == 1:  # the mean is the only pmap
         assert not self.quantiles, self.quantiles
         return pmaps[0]
     elif sum(len(pmap) for pmap in pmaps) == 0:  # all empty pmaps
         raise ValueError('All empty probability maps!')
     N, L, I = get_shape(pmaps)
     nstats = len(self.quantiles) + 1
     stats = ProbabilityMap.build(L, nstats, sids)
     curves_by_rlz = numpy.zeros((len(pmaps), len(sids), L), numpy.float64)
     for i, pmap in enumerate(pmaps):
         for j, sid in enumerate(sids):
             if sid in pmap:
                 curves_by_rlz[i][j] = pmap[sid].array[:, 0]
     mq = compute_stats(curves_by_rlz, self.quantiles, self.weights)
     for i, array in enumerate(mq):
         for j, sid in numpy.ndenumerate(sids):
             stats[sid].array[:, i] = array[j]
     return stats
Ejemplo n.º 6
0
 def compute_pmap(self, sids, pmaps):
     """
     :params sids: array of N site IDs
     :param pmaps: array of R simple ProbabilityMaps
     :returns: a ProbabilityMap with arrays of size (num_levels, num_stats)
     """
     if len(pmaps) == 0:
         raise ValueError('No probability maps!')
     elif len(pmaps) == 1:  # the mean is the only pmap
         assert not self.quantiles, self.quantiles
         return pmaps[0]
     elif sum(len(pmap) for pmap in pmaps) == 0:  # all empty pmaps
         raise ValueError('All empty probability maps!')
     N, L, I = get_shape(pmaps)
     nstats = len(self.quantiles) + 1
     stats = ProbabilityMap.build(L, nstats, sids)
     curves_by_rlz = numpy.zeros((len(pmaps), len(sids), L), numpy.float64)
     for i, pmap in enumerate(pmaps):
         for j, sid in enumerate(sids):
             if sid in pmap:
                 curves_by_rlz[i][j] = pmap[sid].array[:, 0]
     mq = compute_stats(curves_by_rlz, self.quantiles, self.weights)
     for i, array in enumerate(mq):
         for j, sid in numpy.ndenumerate(sids):
             stats[sid].array[:, i] = array[j]
     return stats
Ejemplo n.º 7
0
def classical_risk(riskinputs, crmodel, param, monitor):
    """
    Compute and return the average losses for each asset.

    :param riskinputs:
        :class:`openquake.risklib.riskinput.RiskInput` objects
    :param crmodel:
        a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
    :param param:
        dictionary of extra parameters
    :param monitor:
        :class:`openquake.baselib.performance.Monitor` instance
    """
    result = dict(loss_curves=[], stat_curves=[])
    weights = [w['default'] for w in param['weights']]
    statnames, stats = zip(*param['stats'])
    for ri in riskinputs:
        A = len(ri.assets)
        L = len(crmodel.lti)
        R = ri.hazard_getter.num_rlzs
        loss_curves = numpy.zeros((R, L, A), object)
        avg_losses = numpy.zeros((R, L, A))
        for out in ri.gen_outputs(crmodel, monitor):
            r = out.rlzi
            for l, loss_type in enumerate(crmodel.loss_types):
                # loss_curves has shape (A, C)
                for i, asset in enumerate(ri.assets):
                    loss_curves[out.rlzi, l, i] = lc = out[loss_type][i]
                    aid = asset['ordinal']
                    avg = scientific.average_loss(lc)
                    avg_losses[r, l, i] = avg
                    lcurve = (lc['loss'], lc['poe'], avg)
                    result['loss_curves'].append((l, r, aid, lcurve))

        # compute statistics
        for l, loss_type in enumerate(crmodel.loss_types):
            for i, asset in enumerate(ri.assets):
                avg_stats = compute_stats(avg_losses[:, l, i], stats, weights)
                losses = loss_curves[0, l, i]['loss']
                all_poes = numpy.array(
                    [loss_curves[r, l, i]['poe'] for r in range(R)])
                poes_stats = compute_stats(all_poes, stats, weights)
                result['stat_curves'].append(
                    (l, asset['ordinal'], losses, poes_stats, avg_stats))
    if R == 1:  # the realization is the same as the mean
        del result['loss_curves']
    return result
Ejemplo n.º 8
0
def classical_risk(riskinputs, riskmodel, param, monitor):
    """
    Compute and return the average losses for each asset.

    :param riskinputs:
        :class:`openquake.risklib.riskinput.RiskInput` objects
    :param riskmodel:
        a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
    :param param:
        dictionary of extra parameters
    :param monitor:
        :class:`openquake.baselib.performance.Monitor` instance
    """
    result = dict(loss_curves=[], stat_curves=[])
    weights = [w['default'] for w in param['weights']]
    statnames, stats = zip(*param['stats'])
    for ri in riskinputs:
        A = len(ri.assets)
        L = len(riskmodel.lti)
        R = ri.hazard_getter.num_rlzs
        loss_curves = numpy.zeros((R, L, A), object)
        avg_losses = numpy.zeros((R, L, A))
        for out in riskmodel.gen_outputs(ri, monitor):
            r = out.rlzi
            for l, loss_type in enumerate(riskmodel.loss_types):
                # loss_curves has shape (A, C)
                for i, asset in enumerate(ri.assets):
                    loss_curves[out.rlzi, l, i] = lc = out[loss_type][i]
                    aid = asset['ordinal']
                    avg = scientific.average_loss(lc)
                    avg_losses[r, l, i] = avg
                    lcurve = (lc['loss'], lc['poe'], avg)
                    result['loss_curves'].append((l, r, aid, lcurve))

        # compute statistics
        for l, loss_type in enumerate(riskmodel.loss_types):
            for i, asset in enumerate(ri.assets):
                avg_stats = compute_stats(avg_losses[:, l, i], stats, weights)
                losses = loss_curves[0, l, i]['loss']
                all_poes = numpy.array(
                    [loss_curves[r, l, i]['poe'] for r in range(R)])
                poes_stats = compute_stats(all_poes, stats, weights)
                result['stat_curves'].append(
                    (l, asset['ordinal'], losses, poes_stats, avg_stats))
    if R == 1:  # the realization is the same as the mean
        del result['loss_curves']
    return result
Ejemplo n.º 9
0
    def build_agg_curve(self):
        """
        Build a single loss curve per realization. It is NOT obtained
        by aggregating the loss curves; instead, it is obtained without
        generating the loss curves, directly from the the aggregate losses.
        """
        oq = self.oqparam
        cr = {
            cb.loss_type: cb.curve_resolution
            for cb in self.riskmodel.curve_builder
        }
        loss_curve_dt, _ = scientific.build_loss_dtypes(
            cr, oq.conditional_loss_poes)
        lts = self.riskmodel.loss_types
        cb_inputs = self.cb_inputs('agg_loss_table')
        I = oq.insured_losses + 1
        R = len(self.rlzs_assoc.realizations)
        # NB: using the Processmap since celery is hanging; the computation
        # is fast anyway and this part will likely be removed in the future
        result = parallel.Processmap.apply(
            build_agg_curve, (cb_inputs, self.monitor('')),
            concurrent_tasks=self.oqparam.concurrent_tasks).reduce()
        agg_curve = numpy.zeros((I, R), loss_curve_dt)
        for l, r, i in result:
            agg_curve[lts[l]][i, r] = result[l, r, i]
        self.datastore['agg_curve-rlzs'] = agg_curve

        if R > 1:  # save stats too
            statnames, stats = zip(*oq.risk_stats())
            weights = self.datastore['realizations']['weight']
            agg_curve_stats = numpy.zeros((I, len(stats)), agg_curve.dtype)
            for l, loss_type in enumerate(agg_curve.dtype.names):
                acs = agg_curve_stats[loss_type]
                data = agg_curve[loss_type]
                for i in range(I):
                    avg = data['avg'][i]
                    losses, all_poes = scientific.normalize_curves_eb([
                        (c['losses'], c['poes']) for c in data[i]
                    ])
                    acs['losses'][i] = losses
                    acs['poes'][i] = compute_stats(all_poes, stats, weights)
                    acs['avg'][i] = compute_stats(avg, stats, weights)

            self.datastore['agg_curve-stats'] = agg_curve_stats
Ejemplo n.º 10
0
    def build_agg_curve(self):
        """
        Build a single loss curve per realization. It is NOT obtained
        by aggregating the loss curves; instead, it is obtained without
        generating the loss curves, directly from the the aggregate losses.
        """
        oq = self.oqparam
        cr = {cb.loss_type: cb.curve_resolution
              for cb in self.riskmodel.curve_builders}
        loss_curve_dt, _ = scientific.build_loss_dtypes(
            cr, oq.conditional_loss_poes)
        lts = self.riskmodel.loss_types
        cb_inputs = self.cb_inputs('agg_loss_table')
        I = oq.insured_losses + 1
        R = len(self.rlzs_assoc.realizations)
        result = parallel.apply(
            build_agg_curve, (cb_inputs, self.monitor('')),
            concurrent_tasks=self.oqparam.concurrent_tasks).reduce()
        agg_curve = numpy.zeros((I, R), loss_curve_dt)
        for l, r, i in result:
            agg_curve[lts[l]][i, r] = result[l, r, i]
        self.datastore['agg_curve-rlzs'] = agg_curve

        if R > 1:  # save stats too
            weights = self.datastore['realizations']['weight']
            Q1 = len(oq.quantile_loss_curves) + 1
            agg_curve_stats = numpy.zeros((I, Q1), agg_curve.dtype)
            for l, loss_type in enumerate(agg_curve.dtype.names):
                acs = agg_curve_stats[loss_type]
                data = agg_curve[loss_type]
                for i in range(I):
                    losses, all_poes = scientific.normalize_curves_eb(
                        [(c['losses'], c['poes']) for c in data[i]])
                    acs['losses'][i] = losses
                    acs['poes'][i] = compute_stats(
                        all_poes, oq.quantile_loss_curves, weights)
                    acs['avg'][i] = compute_stats(
                        data['avg'][i], oq.quantile_loss_curves, weights)

            self.datastore['agg_curve-stats'] = agg_curve_stats
Ejemplo n.º 11
0
 def build_stats(self, results, hstats):
     """
     :param results: dict key -> 6D disagg_matrix
     :param hstats: (statname, statfunc) pairs
     """
     weights = [rlz.weight for rlz in self.rlzs_assoc.realizations]
     R = len(weights)
     T = len(self.trts)
     dic = {}  # sid, poe, imt -> disagg_matrix
     for sid in self.sitecol.sids:
         shape = disagg.get_shape(self.bin_edges, sid)
         for poe in self.oqparam.poes_disagg or (None,):
             for imt in self.oqparam.imtls:
                 dic[sid, poe, imt] = numpy.zeros((R, T) + shape)
     for (sid, rlzi, poe, imt), matrix in results.items():
         dic[sid, poe, imt][rlzi] = matrix
     res = {}  # sid, stat, poe, imt -> disagg_matrix
     for (sid, poe, imt), array in dic.items():
         for stat, func in hstats:
             matrix = compute_stats(array, [func], weights)[0]
             res[sid, stat, poe, imt] = matrix
     return res
Ejemplo n.º 12
0
 def build_stats(self, results, hstats):
     """
     :param results: dict key -> 6D disagg_matrix
     :param hstats: (statname, statfunc) pairs
     """
     weights = [rlz.weight for rlz in self.rlzs_assoc.realizations]
     R = len(weights)
     T = len(self.trts)
     dic = {}  # sid, poe, imt -> disagg_matrix
     for sid in self.sitecol.sids:
         shape = disagg.get_shape(self.bin_edges, sid)
         for poe in self.oqparam.poes_disagg or (None,):
             for imt in self.oqparam.imtls:
                 dic[sid, poe, imt] = numpy.zeros((R, T) + shape)
     for (sid, rlzi, poe, imt), matrix in results.items():
         dic[sid, poe, imt][rlzi] = matrix
     res = {}  # sid, stat, poe, imt -> disagg_matrix
     for (sid, poe, imt), array in dic.items():
         wei_imt = [weight[imt] for weight in weights]
         for stat, func in hstats:
             [matrix] = compute_stats(array, [func], wei_imt)
             res[sid, stat, poe, imt] = matrix
     return res