Ejemplo n.º 1
0
    def __call__(self,
                 loss_type,
                 assets,
                 hazard_curves,
                 _epsilons=None,
                 _tags=None):
        """
        :param str loss_type:
            the loss type considered
        :param assets:
            assets is an iterator over N
            :class:`openquake.risklib.scientific.Asset` instances
        :param hazard_curves:
            an iterator over N arrays with the poes
        :param _epsilons:
            ignored, here only for API compatibility with other calculators
        :returns:
            a :class:`openquake.risklib.scientific.Classical.Output` instance.
        """
        curves = utils.numpy_map(self.curves[loss_type], hazard_curves)
        average_losses = utils.numpy_map(scientific.average_loss, curves)
        maps = scientific.loss_map_matrix(self.conditional_loss_poes, curves)
        fractions = scientific.loss_map_matrix(self.poes_disagg, curves)

        if self.insured_losses and loss_type != 'fatalities':
            deductibles = [a.deductible(loss_type) for a in assets]
            limits = [a.insurance_limit(loss_type) for a in assets]

            insured_curves = utils.numpy_map(scientific.insured_loss_curve,
                                             curves, deductibles, limits)
            average_insured_losses = utils.numpy_map(scientific.average_loss,
                                                     insured_curves)
        else:
            insured_curves = None
            average_insured_losses = None

        return scientific.Output(assets,
                                 loss_type,
                                 loss_curves=curves,
                                 average_losses=average_losses,
                                 insured_curves=insured_curves,
                                 average_insured_losses=average_insured_losses,
                                 loss_maps=maps,
                                 loss_fractions=fractions)
Ejemplo n.º 2
0
    def __call__(self,
                 loss_type,
                 assets,
                 hazard_curve,
                 _epsilons=None,
                 _eids=None):
        """
        :param str loss_type:
            the loss type considered
        :param assets:
            assets is an iterator over N
            :class:`openquake.risklib.scientific.Asset` instances
        :param hazard_curve:
            an array of poes
        :param _epsilons:
            ignored, here only for API compatibility with other calculators
        :returns:
            a :class:`openquake.risklib.scientific.Classical.Output` instance.
        """
        n = len(assets)
        vf = self.risk_functions[loss_type]
        imls = self.hazard_imtls[vf.imt]
        curves = [
            scientific.classical(vf, imls, hazard_curve,
                                 self.lrem_steps_per_interval)
        ] * n
        average_losses = utils.numpy_map(scientific.average_loss, curves)
        maps = scientific.loss_map_matrix(self.conditional_loss_poes, curves)
        values = get_values(loss_type, assets)

        if self.insured_losses and loss_type != 'occupants':
            deductibles = [a.deductible(loss_type) for a in assets]
            limits = [a.insurance_limit(loss_type) for a in assets]

            insured_curves = rescale(
                utils.numpy_map(scientific.insured_loss_curve, curves,
                                deductibles, limits), values)
            average_insured_losses = utils.numpy_map(scientific.average_loss,
                                                     insured_curves)
        else:
            insured_curves = None
            average_insured_losses = None

        return scientific.Output(assets,
                                 loss_type,
                                 loss_curves=rescale(numpy.array(curves),
                                                     values),
                                 average_losses=values * average_losses,
                                 insured_curves=insured_curves,
                                 average_insured_losses=average_insured_losses,
                                 loss_maps=values * maps)
Ejemplo n.º 3
0
    def __call__(self, loss_type, assets, hazard_curves, _epsilons=None,
                 _tags=None):
        """
        :param str loss_type:
            the loss type considered
        :param assets:
            assets is an iterator over N
            :class:`openquake.risklib.scientific.Asset` instances
        :param hazard_curves:
            an iterator over N arrays with the poes
        :param _epsilons:
            ignored, here only for API compatibility with other calculators
        :returns:
            a :class:`openquake.risklib.scientific.Classical.Output` instance.
        """
        curves = utils.numpy_map(self.curves[loss_type], hazard_curves)
        average_losses = utils.numpy_map(scientific.average_loss, curves)
        maps = scientific.loss_map_matrix(self.conditional_loss_poes, curves)
        fractions = scientific.loss_map_matrix(self.poes_disagg, curves)

        if self.insured_losses and loss_type != 'fatalities':
            deductibles = [a.deductible(loss_type) for a in assets]
            limits = [a.insurance_limit(loss_type) for a in assets]

            insured_curves = utils.numpy_map(
                scientific.insured_loss_curve, curves, deductibles, limits)
            average_insured_losses = utils.numpy_map(
                scientific.average_loss, insured_curves)
        else:
            insured_curves = None
            average_insured_losses = None

        return scientific.Output(
            assets, loss_type, loss_curves=curves,
            average_losses=average_losses, insured_curves=insured_curves,
            average_insured_losses=average_insured_losses,
            loss_maps=maps, loss_fractions=fractions)
Ejemplo n.º 4
0
    def build_agg_loss_curve_and_map(self, losses):
        """
        Build a loss curve from a set of losses with length given by
        the parameter loss_curve_resolution.

        :param losses: a sequence of losses
        :returns: a quartet (losses, poes, avg, loss_map)
        """
        oq = self.oqparam
        clp = oq.conditional_loss_poes
        losses_poes = scientific.event_based(
            losses, tses=oq.tses, time_span=oq.risk_investigation_time or
            oq.investigation_time, curve_resolution=oq.loss_curve_resolution)
        loss_map = scientific.loss_map_matrix(
            clp, [losses_poes]).reshape(len(clp)) if clp else None
        return (losses_poes[0], losses_poes[1],
                scientific.average_loss(losses_poes), loss_map)
Ejemplo n.º 5
0
    def __call__(self, loss_type, assets, hazard_curve, _eps=None):
        """
        :param str loss_type:
            the loss type considered
        :param assets:
            assets is an iterator over N
            :class:`openquake.risklib.scientific.Asset` instances
        :param hazard_curve:
            an array of poes
        :param _eps:
            ignored, here only for API compatibility with other calculators
        :returns:
            a :class:`openquake.risklib.scientific.Classical.Output` instance.
        """
        n = len(assets)
        vf = self.risk_functions[loss_type]
        imls = self.hazard_imtls[vf.imt]
        curves = [scientific.classical(
            vf, imls, hazard_curve, self.lrem_steps_per_interval)] * n
        average_losses = utils.numpy_map(scientific.average_loss, curves)
        maps = scientific.loss_map_matrix(self.conditional_loss_poes, curves)
        values = get_values(loss_type, assets)

        if self.insured_losses and loss_type != 'occupants':
            deductibles = [a.deductible(loss_type) for a in assets]
            limits = [a.insurance_limit(loss_type) for a in assets]

            insured_curves = rescale(
                utils.numpy_map(scientific.insured_loss_curve,
                                curves, deductibles, limits), values)
            average_insured_losses = utils.numpy_map(
                scientific.average_loss, insured_curves)
        else:
            insured_curves = None
            average_insured_losses = None

        return scientific.Output(
            assets, loss_type,
            loss_curves=rescale(numpy.array(curves), values),
            average_losses=values * average_losses,
            insured_curves=insured_curves,
            average_insured_losses=average_insured_losses,
            loss_maps=values * maps)
Ejemplo n.º 6
0
 def test_more_poes(self):
     numpy.testing.assert_allclose([[4.5, 9], [5, 10]],
                                   scientific.loss_map_matrix([0.55, 0.5],
                                                              self.curves))
Ejemplo n.º 7
0
 def test_one_poe(self):
     numpy.testing.assert_allclose([[3.5, 7]],
                                   scientific.loss_map_matrix([0.65],
                                                              self.curves))
Ejemplo n.º 8
0
 def test_no_poes(self):
     self.assertEqual(0, scientific.loss_map_matrix([], self.curves).size)
Ejemplo n.º 9
0
 def test_more_poes(self):
     numpy.testing.assert_allclose(
         [[4.5, 9], [5, 10]],
         scientific.loss_map_matrix([0.55, 0.5], self.curves))
Ejemplo n.º 10
0
 def test_one_poe(self):
     numpy.testing.assert_allclose(
         [[3.5, 7]], scientific.loss_map_matrix([0.65], self.curves))
Ejemplo n.º 11
0
 def test_no_poes(self):
     self.assertEqual(0, scientific.loss_map_matrix([], self.curves).size)
Ejemplo n.º 12
0
    def post_execute(self, result):
        """
        Extract from the result dictionary
        rlz.ordinal -> (loss_type, tag) -> [(asset.id, loss), ...]
        several interesting outputs.
        """
        oq = self.oqparam
        # take the cached self.rlzs_assoc and write it on the datastore
        self.rlzs_assoc = self.rlzs_assoc
        rlzs = self.rlzs_assoc.realizations
        loss_types = self.riskmodel.get_loss_types()

        C = oq.loss_curve_resolution
        self.loss_curve_dt = numpy.dtype(
            [('losses', (float, C)), ('poes', (float, C)), ('avg', float)])

        if oq.conditional_loss_poes:
            lm_names = _loss_map_names(oq.conditional_loss_poes)
            self.loss_map_dt = numpy.dtype([(f, float) for f in lm_names])

        self.assets = assets = riskinput.sorted_assets(self.assets_by_site)

        self.specific_assets = specific_assets = [
            a for a in assets if a.id in self.oqparam.specific_assets]
        specific_asset_refs = set(self.oqparam.specific_assets)

        N = len(assets)

        event_loss_asset = [{} for rlz in rlzs]
        event_loss = [{} for rlz in rlzs]

        loss_curves = self.zeros(N, self.loss_curve_dt)
        ins_curves = self.zeros(N, self.loss_curve_dt)
        if oq.conditional_loss_poes:
            loss_maps = self.zeros(N, self.loss_map_dt)
        agg_loss_curve = self.zeros(1, self.loss_curve_dt)

        for i in sorted(result):
            rlz = rlzs[i]

            data_by_lt_tag = result[i]
            # (loss_type, asset_id) -> [(tag, loss, ins_loss), ...]
            elass = {(loss_type, asset.id): [] for asset in assets
                     for loss_type in loss_types}
            elagg = []  # aggregate event loss
            nonzero = total = 0
            for loss_type, tag in data_by_lt_tag:
                d = data_by_lt_tag[loss_type, tag]
                if tag == 'counts_matrix':
                    assets, counts = d.keys(), d.values()
                    indices = numpy.array([asset.idx for asset in assets])
                    asset_values = workflows.get_values(loss_type, assets)
                    poes = scientific.build_poes(
                        counts, oq.ses_per_logic_tree_path)
                    cb = scientific.CurveBuilder(
                        loss_type, numpy.linspace(0, 1, C))
                    lcurves = cb.build_loss_curves(
                        poes, asset_values, indices, N)
                    self.store('lcurves/' + loss_type, rlz, lcurves)
                    continue

                for aid, loss, ins_loss in d['data']:
                    elass[loss_type, aid].append((tag, loss, ins_loss))

                # aggregates
                elagg.append((loss_type, tag, d['loss'], d['ins_loss']))
                nonzero += d['nonzero']
                total += d['total']
            logging.info('rlz=%d: %d/%d nonzero losses', i, nonzero, total)

            if elass:
                data_by_lt = collections.defaultdict(list)
                for (loss_type, asset_id), rows in elass.items():
                    for tag, loss, ins_loss in rows:
                        data_by_lt[loss_type].append(
                            (tag, asset_id, loss, ins_loss))
                for loss_type, data in data_by_lt.items():
                    event_loss_asset[i][loss_type] = sorted(
                        # data contains rows (tag, asset, loss, ins_loss)
                        (t, a, l, i) for t, a, l, i in data
                        if a in specific_asset_refs)

                    # build the loss curves per asset
                    lc = self.build_loss_curves(elass, loss_type, 1)
                    loss_curves[loss_type] = lc

                    if oq.insured_losses:
                        # build the insured loss curves per asset
                        ic = self.build_loss_curves(elass, loss_type, 2)
                        ins_curves[loss_type] = ic

                    if oq.conditional_loss_poes:
                        # build the loss maps per asset, array of shape (N, P)
                        losses_poes = numpy.array(  # shape (N, 2, C)
                            [lc['losses'], lc['poes']]).transpose(1, 0, 2)
                        lmaps = scientific.loss_map_matrix(
                            oq.conditional_loss_poes, losses_poes)  # (P, N)
                        for lm, lmap in zip(lm_names, lmaps):
                            loss_maps[loss_type][lm] = lmap

            self.store('loss_curves', rlz, loss_curves)
            if oq.insured_losses:
                self.store('ins_curves', rlz, ins_curves)
            if oq.conditional_loss_poes:
                self.store('loss_maps', rlz, loss_maps)

            if elagg:
                for loss_type, rows in groupby(
                        elagg, operator.itemgetter(0)).items():
                    event_loss[i][loss_type] = [row[1:] for row in rows]
                    # aggregate loss curve for all tags
                    losses, poes, avg, _ = self.build_agg_loss_curve_and_map(
                        [loss for _lt, _tag, loss, _ins_loss in rows])
                    # NB: there is no aggregate insured loss curve
                    agg_loss_curve[loss_type][0] = (losses, poes, avg)
                    # NB: the aggregated loss_map is not stored
                self.store('agg_loss_curve', rlz, agg_loss_curve)

        if specific_assets:
            self.event_loss_asset = event_loss_asset
        self.event_loss = event_loss

        # store statistics (i.e. mean and quantiles) for curves and maps
        if len(self.rlzs_assoc.realizations) > 1:
            self.compute_store_stats('loss_curves')
            self.compute_store_stats('agg_loss_curve')
Ejemplo n.º 13
0
    def __call__(self, loss_type, assets, ground_motion_values, epsilons,
                 event_ids):
        """
        :param str loss_type: the loss type considered

        :param assets:
           assets is an iterator over
           :class:`openquake.risklib.scientific.Asset` instances

        :param ground_motion_values:
           a numpy array with ground_motion_values of shape N x R

        :param epsilons:
           a numpy array with stochastic values of shape N x R

        :param event_ids:
           a numpy array of R event ID (integer)

        :returns:
            a :class:
            `openquake.risklib.scientific.ProbabilisticEventBased.Output`
            instance.
        """
        loss_matrix = self.risk_functions[loss_type].apply_to(
            ground_motion_values, epsilons)
        values = get_values(loss_type, assets)
        ela = loss_matrix.T * values  # matrix with T x N elements
        if self.insured_losses and loss_type != 'fatalities':
            deductibles = [a.deductible(loss_type) for a in assets]
            limits = [a.insurance_limit(loss_type) for a in assets]
            ila = utils.numpy_map(
                scientific.insured_losses, loss_matrix, deductibles, limits)
        else:  # build a zero matrix of size T x N
            ila = numpy.zeros((len(ground_motion_values[0]), len(assets)))
        if isinstance(assets[0].id, str):
            # in oq-lite return early, with just the losses per asset
            cb = self.riskmodel.curve_builders[self.riskmodel.lti[loss_type]]
            return scientific.Output(
                assets, loss_type,
                event_loss_per_asset=ela,
                insured_loss_per_asset=ila,
                counts_matrix=cb.build_counts(loss_matrix),
                insured_counts_matrix=cb.build_counts(ila),
                tags=event_ids)

        # in the engine, compute more stuff on the workers
        curves = utils.numpy_map(self.curves, loss_matrix)
        average_losses = utils.numpy_map(scientific.average_loss, curves)
        stddev_losses = numpy.std(loss_matrix, axis=1)
        maps = scientific.loss_map_matrix(self.conditional_loss_poes, curves)
        elt = self.event_loss(ela, event_ids)

        if self.insured_losses and loss_type != 'fatalities':
            insured_curves = utils.numpy_map(self.curves, ila)
            average_insured_losses = utils.numpy_map(
                scientific.average_loss, insured_curves)
            stddev_insured_losses = numpy.std(ila, axis=1)
        else:
            insured_curves = None
            average_insured_losses = None
            stddev_insured_losses = None
        return scientific.Output(
            assets, loss_type,
            loss_matrix=loss_matrix if self.return_loss_matrix else None,
            loss_curves=curves, average_losses=average_losses,
            stddev_losses=stddev_losses, insured_curves=insured_curves,
            average_insured_losses=average_insured_losses,
            stddev_insured_losses=stddev_insured_losses,
            loss_maps=maps, event_loss_table=elt)
Ejemplo n.º 14
0
    def __call__(self, loss_type, assets, ground_motion_values, epsilons,
                 event_ids):
        """
        :param str loss_type: the loss type considered

        :param assets:
           assets is an iterator over
           :class:`openquake.risklib.scientific.Asset` instances

        :param ground_motion_values:
           a numpy array with ground_motion_values of shape N x R

        :param epsilons:
           a numpy array with stochastic values of shape N x R

        :param event_ids:
           a numpy array of R event ID (integer)

        :returns:
            a :class:
            `openquake.risklib.scientific.ProbabilisticEventBased.Output`
            instance.
        """
        loss_matrix = self.risk_functions[loss_type].apply_to(
            ground_motion_values, epsilons)
        values = get_values(loss_type, assets)
        ela = loss_matrix.T * values  # matrix with T x N elements
        if self.insured_losses and loss_type != 'fatalities':
            deductibles = [a.deductible(loss_type) for a in assets]
            limits = [a.insurance_limit(loss_type) for a in assets]
            ila = utils.numpy_map(scientific.insured_losses, loss_matrix,
                                  deductibles, limits)
        else:  # build a zero matrix of size T x N
            ila = numpy.zeros((len(ground_motion_values[0]), len(assets)))
        if isinstance(assets[0].id, str):
            # in oq-lite return early, with just the losses per asset
            cb = self.riskmodel.curve_builders[self.riskmodel.lti[loss_type]]
            return scientific.Output(
                assets,
                loss_type,
                event_loss_per_asset=ela,
                insured_loss_per_asset=ila,
                counts_matrix=cb.build_counts(loss_matrix),
                insured_counts_matrix=cb.build_counts(ila),
                tags=event_ids)

        # in the engine, compute more stuff on the workers
        curves = utils.numpy_map(self.curves, loss_matrix)
        average_losses = utils.numpy_map(scientific.average_loss, curves)
        stddev_losses = numpy.std(loss_matrix, axis=1)
        maps = scientific.loss_map_matrix(self.conditional_loss_poes, curves)
        elt = self.event_loss(ela, event_ids)

        if self.insured_losses and loss_type != 'fatalities':
            insured_curves = utils.numpy_map(self.curves, ila)
            average_insured_losses = utils.numpy_map(scientific.average_loss,
                                                     insured_curves)
            stddev_insured_losses = numpy.std(ila, axis=1)
        else:
            insured_curves = None
            average_insured_losses = None
            stddev_insured_losses = None
        return scientific.Output(
            assets,
            loss_type,
            loss_matrix=loss_matrix if self.return_loss_matrix else None,
            loss_curves=curves,
            average_losses=average_losses,
            stddev_losses=stddev_losses,
            insured_curves=insured_curves,
            average_insured_losses=average_insured_losses,
            stddev_insured_losses=stddev_insured_losses,
            loss_maps=maps,
            event_loss_table=elt)