Beispiel #1
0
    def _compute_loss_ratio_curve(self, asset, gmf, loss_ratios):
        """
        Generates a loss ratio curve for the given asset.
        """

        vulnerability_function = self.vulnerability_curves.get(
            asset.taxonomy, None)

        if not vulnerability_function:
            LOGGER.error("Unknown vulnerability function %s for asset %s" %
                         (asset.taxonomy, asset.asset_ref))

            return None

        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)
        loss_histogram_bins = self.job_ctxt.oq_job_profile.loss_histogram_bins

        loss_ratio_curve = general.compute_loss_ratio_curve(
            vulnerability_function,
            gmf,
            epsilon_provider,
            asset,
            loss_histogram_bins,
            loss_ratios=loss_ratios)

        return loss_ratio_curve
Beispiel #2
0
 def setUp(self):
     path = os.path.join(helpers.SCHEMA_EXAMPLES_DIR, TEST_FILE)
     inputs = [("exposure", path)]
     self.job = self.setup_classic_job(inputs=inputs)
     [input] = models.inputs4job(self.job.id,
                                 input_type="exposure",
                                 path=path)
     writer = ExposureDBWriter(input)
     exposure_parser = exposure.ExposureModelFile(path)
     writer.serialize(exposure_parser)
     self.model = writer.model
     self.epsilon_provider = general.EpsilonProvider(
         dict(EPSILON_RANDOM_SEED=37))
Beispiel #3
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        def get_loss_curve(site, vuln_function, asset):
            "Compute loss curve basing on GMF data"
            gmvs = self._get_gmvs_at(
                general.hazard_input_site(self.job_ctxt, site))

            gmf_slice = {
                "IMLs": gmvs,
                "TSES": self._tses(),
                "TimeSpan": self._time_span()
            }

            loss_ratios = general.compute_loss_ratios(vuln_function, gmf_slice,
                                                      epsilon_provider, asset)
            loss_ratio_curve = general.compute_loss_ratio_curve(
                vuln_function,
                gmf_slice,
                epsilon_provider,
                asset,
                self.job_ctxt.oq_job_profile.loss_histogram_bins,
                loss_ratios=loss_ratios)

            aggregate_curve.append(loss_ratios * asset.value)

            return loss_ratio_curve.rescale_abscissae(asset.value)

        result = general.compute_bcr_for_block(
            self.job_ctxt, block.sites, get_loss_curve,
            float(self.job_ctxt.params['INTEREST_RATE']),
            float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY']))

        bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id,
                                                 block_id)

        kvs.set_value_json_encoded(bcr_block_key, result)
        LOGGER.debug('bcr result for block %s: %r', block_id, result)

        return aggregate_curve.losses
Beispiel #4
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """
        self.slice_gmfs(block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        points = list(
            general.Block.from_kvs(self.job_ctxt.job_id,
                                   block_id).grid(self.job_ctxt.region))
        gmf_slices = dict(
            (point.site,
             kvs.get_value_json_decoded(
                 kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column,
                                        point.row))) for point in points)
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        def get_loss_curve(point, vuln_function, asset):
            "Compute loss curve basing on GMF data"
            gmf_slice = gmf_slices[point.site]
            loss_ratios = general.compute_loss_ratios(vuln_function, gmf_slice,
                                                      epsilon_provider, asset)
            loss_ratio_curve = general.compute_loss_ratio_curve(
                vuln_function,
                gmf_slice,
                epsilon_provider,
                asset,
                self.job_ctxt.oq_job_profile.loss_histogram_bins,
                loss_ratios=loss_ratios)

            aggregate_curve.append(loss_ratios * asset.value)

            return loss_ratio_curve.rescale_abscissae(asset.value)

        result = general.compute_bcr_for_block(
            self.job_ctxt.job_id, points, get_loss_curve,
            float(self.job_ctxt.params['INTEREST_RATE']),
            float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY']))

        bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id,
                                                 block_id)
        kvs.set_value_json_encoded(bcr_block_key, result)
        LOGGER.debug('bcr result for block %s: %r', block_id, result)

        return aggregate_curve.losses
Beispiel #5
0
    def compute_loss_ratios(self, asset, gmf_slice):
        """For a given asset and ground motion field, computes
        the loss ratios used to obtain the related loss ratio curve
        and aggregate loss curve."""

        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        vuln_function = self.vuln_curves.get(asset.taxonomy, None)

        if not vuln_function:
            LOGGER.error("Unknown vulnerability function %s for asset %s" %
                         (asset.taxonomy, asset.asset_ref))
            return None

        return general.compute_loss_ratios(vuln_function, gmf_slice,
                                           epsilon_provider, asset)
Beispiel #6
0
    def _compute_loss_ratios(self, asset, gmf):
        """
        Compute the loss ratios for the given asset and associated ground
        motion field.
        """

        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)
        vulnerability_function = self.vulnerability_curves.get(
            asset.taxonomy, None)

        if not vulnerability_function:
            LOGGER.error("Unknown vulnerability function %s for asset %s" %
                         (asset.taxonomy, asset.asset_ref))
            return None

        return general.compute_loss_ratios(vulnerability_function, gmf,
                                           epsilon_provider, asset)
Beispiel #7
0
    def compute_loss_ratio_curve(self, col, row, asset, gmf_slice,
                                 loss_ratios):
        """Compute the loss ratio curve for a single asset.

        :param asset: the asset used to compute loss
        :type asset: an :py:class:`openquake.db.model.ExposureData` instance
        """
        job_ctxt = self.job_ctxt

        vuln_function = self.vuln_curves.get(asset.taxonomy, None)

        if not vuln_function:
            LOGGER.error("Unknown vulnerability function %s for asset %s" %
                         (asset.taxonomy, asset.asset_ref))
            return None

        epsilon_provider = general.EpsilonProvider(job_ctxt.params)

        loss_histogram_bins = job_ctxt.oq_job_profile.loss_histogram_bins
        loss_ratio_curve = general.compute_loss_ratio_curve(
            vuln_function,
            gmf_slice,
            epsilon_provider,
            asset,
            loss_histogram_bins,
            loss_ratios=loss_ratios)

        # NOTE (jmc): Early exit if the loss ratio is all zeros
        if not False in (loss_ratio_curve.ordinates == 0.0):
            return None

        key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id, row, col,
                                        asset.asset_ref)

        kvs.get_client().set(key, loss_ratio_curve.to_json())

        LOGGER.debug("Loss ratio curve is %s, write to key %s" %
                     (loss_ratio_curve, key))

        return loss_ratio_curve
Beispiel #8
0
    def execute(self):
        """Entry point for triggering the computation."""
        LOGGER.debug("Executing scenario risk computation.")
        LOGGER.debug("This will calculate mean and standard deviation loss"
                     "values for the region defined in the job config.")

        tasks = []

        vuln_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        sum_per_gmf = SumPerGroundMotionField(vuln_model, epsilon_provider)

        region_loss_map_data = {}

        for block_id in self.job_ctxt.blocks_keys:
            LOGGER.debug("Dispatching task for block %s of %s" %
                         (block_id, len(self.job_ctxt.blocks_keys)))
            a_task = general.compute_risk.delay(self.job_ctxt.job_id,
                                                block_id,
                                                vuln_model=vuln_model)
            tasks.append(a_task)

        for task in tasks:
            task.wait()
            if not task.successful():
                raise Exception(task.result)

            block_loss, block_loss_map_data = task.result

            # do some basic validation on our results
            assert block_loss is not None, "Expected a result != None"
            assert isinstance(block_loss, numpy.ndarray), \
                "Expected a numpy array"

            # our result should be a 1-dimensional numpy.array of loss values
            sum_per_gmf.sum_losses(block_loss)

            collect_region_data(block_loss_map_data, region_loss_map_data)

        loss_map_data = [(site, data)
                         for site, data in region_loss_map_data.iteritems()]

        # serialize the loss map data to XML
        loss_map_path = os.path.join(self.job_ctxt['BASE_PATH'],
                                     self.job_ctxt['OUTPUT_DIR'],
                                     'loss-map-%s.xml' % self.job_ctxt.job_id)
        loss_map_writer = risk_output.create_loss_map_writer(
            self.job_ctxt.job_id, self.job_ctxt.serialize_results_to,
            loss_map_path, True)

        if loss_map_writer:
            LOGGER.debug("Starting serialization of the loss map...")

            # Add a metadata dict in the first list position
            # Note: the metadata is still incomplete (see bug 809410)
            loss_map_metadata = {'scenario': True}
            loss_map_data.insert(0, loss_map_metadata)
            loss_map_writer.serialize(loss_map_data)

        # For now, just print these values.
        # These are not debug statements; please don't remove them!
        print "Mean region loss value: %s" % sum_per_gmf.mean
        print "Standard deviation region loss value: %s" % sum_per_gmf.stddev
Beispiel #9
0
    def compute_risk(self, block_id, **kwargs):
        """
        This method will perform two distinct (but similar) computations and
        return a result for each computation. The computations are as follows:

        First:

        For a given block of sites, compute loss values for all assets in the
        block. This computation will yield a single loss value per realization
        for the region block.

        Second:

        For each asset in the given block of sites, we need compute loss
        (where loss = loss_ratio * asset_value) for each realization. This
        gives 1 loss value _per_ asset _per_ realization. We then need to take
        the mean & standard deviation.

        Other info:

        The GMF data for each realization is stored in the KVS by the preceding
        scenario hazard job.

        :param block_id: id of the region block data we need to pull from the
            KVS
        :type block_id: str
        :keyword vuln_model:
            dict of :py:class:`openquake.shapes.VulnerabilityFunction` objects,
            keyed by the vulnerability function name as a string
        :keyword epsilon_provider:
            :py:class:`openquake.risk.job.EpsilonProvider` object

        :returns: 2-tuple of the following data:
            * 1-dimensional :py:class:`numpy.ndarray` of loss values for this
                region block (again, 1 value per realization)

            * list of 2-tuples containing site, loss, and asset
                information.

                The first element of each 2-tuple shall be a
                :py:class:`openquake.shapes.Site` object, which represents the
                geographical location of the asset loss.

                The second element shall be a list of
                2-tuples of dicts representing the loss and asset data (in that
                order).

                Example::

                    [(<Site(-117.0, 38.0)>, [
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a171'}),
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a187'})
                    ]),
                     (<Site(-118.0, 39.0)>, [
                        ({'mean_loss': 50, 'stddev_loss': 50.0},
                            {'assetID': 'a192'})
                    ])]
        """

        vuln_model = kwargs['vuln_model']
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        loss_data = {}

        # used to sum the losses for the whole block
        sum_per_gmf = SumPerGroundMotionField(vuln_model, epsilon_provider)

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)

            # the scientific functions used below
            # require the gmvs to be wrapped in a dict with a single key, IMLs
            gmvs = {'IMLs': general.load_gmvs_at(self.job_ctxt.job_id, point)}

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                vuln_function = vuln_model[asset.taxonomy]

                asset_mean_loss = compute_mean_loss(vuln_function, gmvs,
                                                    epsilon_provider, asset)

                asset_stddev_loss = compute_stddev_loss(
                    vuln_function, gmvs, epsilon_provider, asset)

                asset_site = shapes.Site(asset.site.x, asset.site.y)

                loss = ({
                    'mean_loss': asset_mean_loss,
                    'stddev_loss': asset_stddev_loss
                }, {
                    'assetID': asset.asset_ref
                })

                sum_per_gmf.add(gmvs, asset)
                collect_block_data(loss_data, asset_site, loss)

        return sum_per_gmf.losses, loss_data
Beispiel #10
0
    def compute_risk(self, block_id, **kwargs):
        """
        This method will perform two distinct (but similar) computations and
        return a result for each computation. The computations are as follows:

        First:

        For a given block of sites, compute loss values for all assets in the
        block. This computation will yield a single loss value per realization
        for the region block.

        Second:

        For each asset in the given block of sites, we need compute loss
        (where loss = loss_ratio * asset_value) for each realization. This
        gives 1 loss value _per_ asset _per_ realization. We then need to take
        the mean & standard deviation.

        Other info:

        The GMF data for each realization is stored in the KVS by the preceding
        scenario hazard job.

        :param block_id: id of the region block data we need to pull from the
            KVS
        :type block_id: str
        :keyword vuln_model:
            dict of :py:class:`openquake.shapes.VulnerabilityFunction` objects,
            keyed by the vulnerability function name as a string
        :keyword epsilon_provider:
            :py:class:`openquake.risk.job.EpsilonProvider` object

        :returns: 2-tuple of the following data:
            * 1-dimensional :py:class:`numpy.ndarray` of loss values for this
                region block (again, 1 value per realization)

            * list of 2-tuples containing site, loss, and asset
                information.

                The first element of each 2-tuple shall be a
                :py:class:`openquake.shapes.Site` object, which represents the
                geographical location of the asset loss.

                The second element shall be a list of
                2-tuples of dicts representing the loss and asset data (in that
                order).

                Example::

                    [(<Site(-117.0, 38.0)>, [
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a171'}),
                        ({'mean_loss': 200.0, 'stddev_loss': 100},
                            {'assetID': 'a187'})
                    ]),
                     (<Site(-118.0, 39.0)>, [
                        ({'mean_loss': 50, 'stddev_loss': 50.0},
                            {'assetID': 'a192'})
                    ])]
        """

        vuln_model = kwargs["vuln_model"]
        insured_losses = kwargs["insured_losses"]
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        block_losses = []
        loss_map_data = {}

        for site in block.sites:
            gmvs = {
                "IMLs":
                general.load_gmvs_at(
                    self.job_ctxt.job_id,
                    general.hazard_input_site(self.job_ctxt, site))
            }

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                vuln_function = vuln_model[asset.taxonomy]

                loss_ratios = general.compute_loss_ratios(
                    vuln_function, gmvs, epsilon_provider, asset)
                losses = loss_ratios * asset.value

                if insured_losses:
                    losses = general.compute_insured_losses(asset, losses)

                asset_site = shapes.Site(asset.site.x, asset.site.y)

                loss = ({
                    "mean_loss": numpy.mean(losses),
                    "stddev_loss": numpy.std(losses, ddof=1)
                }, {
                    "assetID": asset.asset_ref
                })

                block_losses.append(losses)
                collect_block_data(loss_map_data, asset_site, loss)

        sum_block_losses = reduce(lambda x, y: x + y, block_losses)
        return sum_block_losses, loss_map_data