Beispiel #1
0
    def test_compute_insured_losses(self):
        self.asset.deductible = 150
        self.asset.ins_limit = 300
        expected = numpy.array([0, 300, 180.02423357, 171.02684563,
                                250.77079384, 0, 0, 288.28653452, 300, 300])

        self.assertTrue(numpy.allclose(expected,
                compute_insured_losses(self.asset, self.losses)))
Beispiel #2
0
    def test_compute_insured_losses(self):
        self.asset.deductible = 150
        self.asset.ins_limit = 300
        expected = numpy.array([
            0, 300, 180.02423357, 171.02684563, 250.77079384, 0, 0,
            288.28653452, 300, 300
        ])

        self.assertTrue(
            numpy.allclose(expected,
                           compute_insured_losses(self.asset, self.losses)))
Beispiel #3
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vulnerability_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            gmf = self._load_ground_motion_field(site)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self._compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self._compute_loss_ratio_curve(
                    asset, gmf, loss_ratios)

                self._loss_ratio_curve_on_kvs(
                    point.column, point.row, loss_ratio_curve, asset)

                losses = loss_ratios * asset.value

                aggregate_curve.append(losses)

                if loss_ratio_curve:
                    loss_curve = self._compute_loss_curve(
                        loss_ratio_curve, asset)

                    self._loss_curve_on_kvs(point.column, point.row,
                        loss_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.job_ctxt.params):
                        general.compute_conditional_loss(
                                self.job_ctxt.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

                    if self.job_ctxt.params.get("INSURED_LOSSES"):
                        insured_losses = general.compute_insured_losses(
                            asset, losses)

                        insured_loss_ratio_curve = (
                            self._compute_insured_loss_ratio_curve(
                                insured_losses, asset, gmf))

                        self._insured_loss_ratio_curve_on_kvs(point.column,
                            point.row, insured_loss_ratio_curve, asset)

                        insured_loss_curve = self._compute_loss_curve(
                            insured_loss_ratio_curve, asset)

                        self._insured_loss_curve_on_kvs(point.column,
                            point.row, insured_loss_curve, asset)

        return aggregate_curve.losses
Beispiel #4
0
    def compute_risk(self, block_id, **kwargs):
        """
        This method will perform two distinct (but similar) computations and
        return a result for each computation. The computations are as follows:

        First:

        For a given block of sites, compute loss values for all assets in the
        block. This computation will yield a single loss value per realization
        for the region block.

        Second:

        For each asset in the given block of sites, we need compute loss
        (where loss = loss_ratio * asset_value) for each realization. This
        gives 1 loss value _per_ asset _per_ realization. We then need to take
        the mean & standard deviation.

        Other info:

        The GMF data for each realization is stored in the KVS by the preceding
        scenario hazard job.

        :param block_id: id of the region block data we need to pull from the
            KVS
        :type block_id: str
        :keyword vuln_model:
            dict of :py:class:`openquake.shapes.VulnerabilityFunction` objects,
            keyed by the vulnerability function name as a string
        :keyword epsilon_provider:
            :py:class:`openquake.risk.job.EpsilonProvider` object

        :returns: 2-tuple of the following data:
            * 1-dimensional :py:class:`numpy.ndarray` of loss values for this
                region block (again, 1 value per realization)

            * list of 2-tuples containing site, loss, and asset
                information.

                The first element of each 2-tuple shall be a
                :py:class:`openquake.shapes.Site` object, which represents the
                geographical location of the asset loss.

                The second element shall be a list of
                2-tuples of dicts representing the loss and asset data (in that
                order).

                Example::

                    [(<Site(-117.0, 38.0)>, [
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a171'}),
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a187'})
                    ]),
                     (<Site(-118.0, 39.0)>, [
                        ({'mean_loss': 50, 'stddev_loss': 50.0},
                            {'assetID': 'a192'})
                    ])]
        """

        vuln_model = kwargs["vuln_model"]
        insured_losses = kwargs["insured_losses"]
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        block_losses = []
        loss_map_data = {}

        for site in block.sites:
            gmvs = {"IMLs": general.load_gmvs_at(
                    self.job_ctxt.job_id, general.hazard_input_site(
                    self.job_ctxt, site))}

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                vuln_function = vuln_model[asset.taxonomy]

                loss_ratios = general.compute_loss_ratios(
                    vuln_function, gmvs, epsilon_provider, asset)
                losses = loss_ratios * asset.value

                if insured_losses:
                    losses = general.compute_insured_losses(asset, losses)

                asset_site = shapes.Site(asset.site.x, asset.site.y)

                loss = ({
                    "mean_loss": numpy.mean(losses),
                    "stddev_loss": numpy.std(losses, ddof=1)}, {
                    "assetID": asset.asset_ref
                })

                block_losses.append(losses)
                collect_block_data(loss_map_data, asset_site, loss)

        sum_block_losses = reduce(lambda x, y: x + y, block_losses)
        return sum_block_losses, loss_map_data
Beispiel #5
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vulnerability_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            gmf = self._load_ground_motion_field(site)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self._compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self._compute_loss_ratio_curve(
                    asset, gmf, loss_ratios)

                self._loss_ratio_curve_on_kvs(point.column, point.row,
                                              loss_ratio_curve, asset)

                losses = loss_ratios * asset.value

                aggregate_curve.append(losses)

                if loss_ratio_curve:
                    loss_curve = self._compute_loss_curve(
                        loss_ratio_curve, asset)

                    self._loss_curve_on_kvs(point.column, point.row,
                                            loss_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                            self.job_ctxt.params):
                        general.compute_conditional_loss(
                            self.job_ctxt.job_id, point.column, point.row,
                            loss_curve, asset, loss_poe)

                    if self.job_ctxt.params.get("INSURED_LOSSES"):
                        insured_losses = general.compute_insured_losses(
                            asset, losses)

                        insured_loss_ratio_curve = (
                            self._compute_insured_loss_ratio_curve(
                                insured_losses, asset, gmf))

                        self._insured_loss_ratio_curve_on_kvs(
                            point.column, point.row, insured_loss_ratio_curve,
                            asset)

                        insured_loss_curve = self._compute_loss_curve(
                            insured_loss_ratio_curve, asset)

                        self._insured_loss_curve_on_kvs(
                            point.column, point.row, insured_loss_curve, asset)

        return aggregate_curve.losses
Beispiel #6
0
    def compute_risk(self, block_id, **kwargs):
        """
        This method will perform two distinct (but similar) computations and
        return a result for each computation. The computations are as follows:

        First:

        For a given block of sites, compute loss values for all assets in the
        block. This computation will yield a single loss value per realization
        for the region block.

        Second:

        For each asset in the given block of sites, we need compute loss
        (where loss = loss_ratio * asset_value) for each realization. This
        gives 1 loss value _per_ asset _per_ realization. We then need to take
        the mean & standard deviation.

        Other info:

        The GMF data for each realization is stored in the KVS by the preceding
        scenario hazard job.

        :param block_id: id of the region block data we need to pull from the
            KVS
        :type block_id: str
        :keyword vuln_model:
            dict of :py:class:`openquake.shapes.VulnerabilityFunction` objects,
            keyed by the vulnerability function name as a string
        :keyword epsilon_provider:
            :py:class:`openquake.risk.job.EpsilonProvider` object

        :returns: 2-tuple of the following data:
            * 1-dimensional :py:class:`numpy.ndarray` of loss values for this
                region block (again, 1 value per realization)

            * list of 2-tuples containing site, loss, and asset
                information.

                The first element of each 2-tuple shall be a
                :py:class:`openquake.shapes.Site` object, which represents the
                geographical location of the asset loss.

                The second element shall be a list of
                2-tuples of dicts representing the loss and asset data (in that
                order).

                Example::

                    [(<Site(-117.0, 38.0)>, [
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a171'}),
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a187'})
                    ]),
                     (<Site(-118.0, 39.0)>, [
                        ({'mean_loss': 50, 'stddev_loss': 50.0},
                            {'assetID': 'a192'})
                    ])]
        """

        vuln_model = kwargs["vuln_model"]
        insured_losses = kwargs["insured_losses"]
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        block_losses = []
        loss_map_data = {}

        for site in block.sites:
            gmvs = {
                "IMLs":
                general.load_gmvs_at(
                    self.job_ctxt.job_id,
                    general.hazard_input_site(self.job_ctxt, site))
            }

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                vuln_function = vuln_model[asset.taxonomy]

                loss_ratios = general.compute_loss_ratios(
                    vuln_function, gmvs, epsilon_provider, asset)
                losses = loss_ratios * asset.value

                if insured_losses:
                    losses = general.compute_insured_losses(asset, losses)

                asset_site = shapes.Site(asset.site.x, asset.site.y)

                loss = ({
                    "mean_loss": numpy.mean(losses),
                    "stddev_loss": numpy.std(losses, ddof=1)
                }, {
                    "assetID": asset.asset_ref
                })

                block_losses.append(losses)
                collect_block_data(loss_map_data, asset_site, loss)

        sum_block_losses = reduce(lambda x, y: x + y, block_losses)
        return sum_block_losses, loss_map_data