Пример #1
0
def compute_bcr_for_block(job_ctxt, sites, get_loss_curve, interest_rate,
                          asset_life_expectancy):
    """
    Compute and return Benefit-Cost Ratio data for a number of sites.

    :param get_loss_curve:
        Function that takes three positional arguments: point object,
        vulnerability function object and asset object and is supposed
        to return a loss curve.
    :return:
        A list of tuples::

            [((site_lat, site_lon), [
                ({'bcr': 1, 'eal_retrofitted': 2, 'eal_original': 3}, assetID),
                ({'bcr': 3, 'eal_retrofitted': 4, 'eal_original': 5}, assetID),
                ...]),
             ...]
    """
    # too many local vars (16/15) -- pylint: disable=R0914
    result = defaultdict(list)
    job_id = job_ctxt.job_id

    vuln_curves = vulnerability.load_vuln_model_from_kvs(job_id)
    vuln_curves_retrofitted = vulnerability.load_vuln_model_from_kvs(
        job_id, retrofitted=True)

    for site in sites:
        assets = BaseRiskCalculator.assets_at(job_id, site)

        for asset in assets:
            vuln_function = vuln_curves[asset.taxonomy]
            loss_curve = get_loss_curve(site, vuln_function, asset)
            LOG.info('for asset %s loss_curve = %s', asset.asset_ref,
                     loss_curve)
            eal_original = compute_mean_loss(loss_curve)

            vuln_function = vuln_curves_retrofitted[asset.taxonomy]
            loss_curve = get_loss_curve(site, vuln_function, asset)
            LOG.info('for asset %s loss_curve retrofitted = %s',
                     asset.asset_ref, loss_curve)
            eal_retrofitted = compute_mean_loss(loss_curve)

            bcr = compute_bcr(eal_original, eal_retrofitted, interest_rate,
                              asset_life_expectancy, asset.retrofitting_cost)

            LOG.info(
                'for asset %s EAL original = %f, '
                'EAL retrofitted = %f, BCR = %f', asset.asset_ref,
                eal_original, eal_retrofitted, bcr)

            key = (asset.site.x, asset.site.y)

            result[key].append(({
                'bcr': bcr,
                'eal_original': eal_original,
                'eal_retrofitted': eal_retrofitted
            }, asset.asset_ref))

    return result.items()
Пример #2
0
def compute_bcr_for_block(job_ctxt, sites, get_loss_curve,
                          interest_rate, asset_life_expectancy):
    """
    Compute and return Benefit-Cost Ratio data for a number of sites.

    :param get_loss_curve:
        Function that takes three positional arguments: point object,
        vulnerability function object and asset object and is supposed
        to return a loss curve.
    :return:
        A list of tuples::

            [((site_lat, site_lon), [
                ({'bcr': 1, 'eal_retrofitted': 2, 'eal_original': 3}, assetID),
                ({'bcr': 3, 'eal_retrofitted': 4, 'eal_original': 5}, assetID),
                ...]),
             ...]
    """
    # too many local vars (16/15) -- pylint: disable=R0914
    result = defaultdict(list)
    job_id = job_ctxt.job_id

    vuln_curves = vulnerability.load_vuln_model_from_kvs(job_id)
    vuln_curves_retrofitted = vulnerability.load_vuln_model_from_kvs(
        job_id, retrofitted=True)

    for site in sites:
        assets = BaseRiskCalculator.assets_at(job_id, site)

        for asset in assets:
            vuln_function = vuln_curves[asset.taxonomy]
            loss_curve = get_loss_curve(site, vuln_function, asset)
            LOG.info('for asset %s loss_curve = %s',
                     asset.asset_ref, loss_curve)
            eal_original = compute_mean_loss(loss_curve)

            vuln_function = vuln_curves_retrofitted[asset.taxonomy]
            loss_curve = get_loss_curve(site, vuln_function, asset)
            LOG.info('for asset %s loss_curve retrofitted = %s',
                     asset.asset_ref, loss_curve)
            eal_retrofitted = compute_mean_loss(loss_curve)

            bcr = compute_bcr(eal_original, eal_retrofitted,
                              interest_rate, asset_life_expectancy,
                              asset.retrofitting_cost)

            LOG.info('for asset %s EAL original = %f, '
                     'EAL retrofitted = %f, BCR = %f',
                     asset.asset_ref, eal_original, eal_retrofitted, bcr)

            key = (asset.site.x, asset.site.y)

            result[key].append(({'bcr': bcr,
                                 'eal_original': eal_original,
                                 'eal_retrofitted': eal_retrofitted},
                                asset.asset_ref))

    return result.items()
Пример #3
0
def compute_bcr_for_block(job_id, points, get_loss_curve,
                          interest_rate, asset_life_expectancy):
    """
    Compute and return Benefit-Cost Ratio data for a number of points.

    :param get_loss_curve:
        Function that takes three positional arguments: point object,
        vulnerability function object and asset object and is supposed
        to return a loss curve.
    :return:
        A list of tuples::

            [((site_lat, site_lon), [
                ({'bcr': 1, 'eal_retrofitted': 2, 'eal_original': 3}, assetID),
                ({'bcr': 3, 'eal_retrofitted': 4, 'eal_original': 5}, assetID),
                ...]),
             ...]
    """
    # too many local vars (16/15) -- pylint: disable=R0914
    result = defaultdict(list)

    vuln_curves = vulnerability.load_vuln_model_from_kvs(job_id)
    vuln_curves_retrofitted = vulnerability.load_vuln_model_from_kvs(
        job_id, retrofitted=True)

    for point in points:
        asset_key = kvs.tokens.asset_key(job_id, point.row, point.column)
        for asset in kvs.get_list_json_decoded(asset_key):
            vuln_function = vuln_curves[asset['taxonomy']]
            loss_curve = get_loss_curve(point, vuln_function, asset)
            LOG.info('for asset %s loss_curve = %s',
                     asset['assetID'], loss_curve)
            eal_original = compute_mean_loss(loss_curve)

            vuln_function = vuln_curves_retrofitted[asset['taxonomy']]
            loss_curve = get_loss_curve(point, vuln_function, asset)
            LOG.info('for asset %s loss_curve retrofitted = %s',
                     asset['assetID'], loss_curve)
            eal_retrofitted = compute_mean_loss(loss_curve)

            bcr = compute_bcr(
                eal_original, eal_retrofitted,
                interest_rate, asset_life_expectancy,
                asset['retrofittingCost']
            )

            LOG.info('for asset %s EAL original = %f, '
                     'EAL retrofitted = %f, BCR = %f',
                     asset['assetID'], eal_original, eal_retrofitted, bcr)

            key = (asset['lat'], asset['lon'])
            result[key].append(({'bcr': bcr,
                                 'eal_original': eal_original,
                                 'eal_retrofitted': eal_retrofitted},
                                asset['assetID']))

    return result.items()
Пример #4
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """

        result = defaultdict(list)
        seed, correlation_type = self._get_correlation_type()
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)
        loss_histogram_bins = self.job_ctxt.oq_job_profile.loss_histogram_bins

        vulnerability_model_original = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        vulnerability_model_retrofitted = (
            vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id, retrofitted=True))

        assets_getter = lambda site: general.BaseRiskCalculator.assets_at(
            self.job_ctxt.job_id, site)

        def hazard_getter(site):
            gmvs = self._get_gmvs_at(general.hazard_input_site(
                self.job_ctxt, site))

            return {"IMLs": gmvs, "TSES": self._tses(),
                "TimeSpan": self._time_span()}

        bcr = api.bcr(api.probabilistic_event_based(
            vulnerability_model_original, loss_histogram_bins, seed,
            correlation_type), api.probabilistic_event_based(
            vulnerability_model_retrofitted, loss_histogram_bins, seed,
            correlation_type), float(self.job_ctxt.params["INTEREST_RATE"]),
            float(self.job_ctxt.params["ASSET_LIFE_EXPECTANCY"]))

        for asset_output in api.compute_on_sites(
            block.sites, assets_getter, hazard_getter, bcr):

            asset = asset_output.asset

            result[(asset.site.x, asset.site.y)].append(({
                "bcr": asset_output.bcr,
                "eal_original": asset_output.eal_original,
                "eal_retrofitted": asset_output.eal_retrofitted},
                asset.asset_ref))

        bcr_block_key = kvs.tokens.bcr_block_key(
            self.job_ctxt.job_id, block_id)

        result = result.items()
        kvs.set_value_json_encoded(bcr_block_key, result)
        LOGGER.debug("bcr result for block %s: %r", block_id, result)
Пример #5
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """

        result = defaultdict(list)
        block = Block.from_kvs(self.job_ctxt.job_id, block_id)

        vulnerability_model_original = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        vulnerability_model_retrofitted = (
            vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id, retrofitted=True))

        steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval

        assets_getter = lambda site: BaseRiskCalculator.assets_at(
            self.job_ctxt.job_id, site)

        hazard_getter = lambda site: (
            self._get_db_curve(hazard_input_site(self.job_ctxt, site)))

        bcr = api.bcr(api.classical(vulnerability_model_original, steps=steps),
            api.classical(vulnerability_model_retrofitted, steps=steps),
            float(self.job_ctxt.params["INTEREST_RATE"]),
            float(self.job_ctxt.params["ASSET_LIFE_EXPECTANCY"]))

        for asset_output in api.compute_on_sites(
            block.sites, assets_getter, hazard_getter, bcr):

            asset = asset_output.asset

            result[(asset.site.x, asset.site.y)].append(({
                "bcr": asset_output.bcr,
                "eal_original": asset_output.eal_original,
                "eal_retrofitted": asset_output.eal_retrofitted},
                asset.asset_ref))

        bcr = result.items()
        bcr_block_key = kvs.tokens.bcr_block_key(
            self.job_ctxt.job_id, block_id)

        kvs.set_value_json_encoded(bcr_block_key, bcr)
        LOGGER.debug("bcr result for block %s: %r", block_id, bcr)

        return True
Пример #6
0
    def execute(self):
        """
        Entry point for triggering the computation.
        """

        LOGGER.debug("Executing scenario risk computation.")
        LOGGER.debug("This will calculate mean and standard deviation loss"
                     "values for the region defined in the job config.")

        vuln_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        region_losses = []
        region_loss_map_data = {}

        region_data = distribute(
            general.compute_risk, ("block_id", self.job_ctxt.blocks_keys),
            tf_args=dict(job_id=self.job_ctxt.job_id,
                         vuln_model=vuln_model,
                         insured_losses=self._insured_losses))

        for block_data in region_data:
            region_losses.append(block_data[0])
            collect_region_data(block_data[1], region_loss_map_data)

        self._sum_region_losses = reduce(lambda x, y: x + y, region_losses)

        self._loss_map_data = [
            (site, data) for site, data in region_loss_map_data.iteritems()
        ]
Пример #7
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = general.Block.from_kvs(self.calc_proxy.job_id, block_id)

        vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.calc_proxy.job_id)

        for point in block.grid(self.calc_proxy.region):
            hazard_curve = self._get_db_curve(point.site)

            asset_key = kvs.tokens.asset_key(self.calc_proxy.job_id,
                            point.row, point.column)
            for asset in kvs.get_list_json_decoded(asset_key):
                LOGGER.debug("processing asset %s" % (asset))

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve, vuln_curves)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(point,
                            loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes(
                        self.calc_proxy.params):

                        compute_conditional_loss(
                                self.calc_proxy.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

        return True
Пример #8
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        for point in block.grid(self.job_ctxt.region):
            hazard_curve = self._get_db_curve(point.site)

            assets = self.assets_for_cell(self.job_ctxt.job_id, point.site)
            for asset in assets:
                LOGGER.debug("processing asset %s" % asset)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve, vuln_curves)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point, loss_ratio_curve, asset)

                    for poe in conditional_loss_poes(self.job_ctxt.params):
                        compute_conditional_loss(
                            self.job_ctxt.job_id, point.column,
                            point.row, loss_curve, asset, poe)

        return True
Пример #9
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = job.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            curve_token = kvs.tokens.mean_hazard_curve_key(
                self.job_id, point.site)

            decoded_curve = kvs.get_value_json_decoded(curve_token)

            hazard_curve = Curve([(exp(float(el['x'])), el['y'])
                                  for el in decoded_curve['curve']])

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            assets = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in assets]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
Пример #10
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = general.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            hazard_curve = self._get_db_curve(point.site)

            asset_key = kvs.tokens.asset_key(self.job_id,
                            point.row, point.column)
            for asset in kvs.get_list_json_decoded(asset_key):
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
Пример #11
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            hazard_curve = self._get_db_curve(
                general.hazard_input_site(self.job_ctxt, site))

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve, vuln_curves)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point, loss_ratio_curve, asset)

                    for poe in conditional_loss_poes(self.job_ctxt.params):
                        compute_conditional_loss(
                            self.job_ctxt.job_id, point.column,
                            point.row, loss_curve, asset, poe)

        return True
Пример #12
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            hazard_curve = self._get_db_curve(
                general.hazard_input_site(self.job_ctxt, site))

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve, vuln_curves)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point, loss_ratio_curve, asset)

                    for poe in conditional_loss_poes(self.job_ctxt.params):
                        compute_conditional_loss(self.job_ctxt.job_id,
                                                 point.column, point.row,
                                                 loss_curve, asset, poe)

        return True
Пример #13
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vulnerability_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)
        seed, correlation_type = self._get_correlation_type()

        job_id = self.job_ctxt.job_id

        def hazard_getter(site):
            point = self.job_ctxt.region.grid.point_at(site)
            gmvs = self._get_gmvs_at(general.hazard_input_site(
                    self.job_ctxt, site))
            gmf = {"IMLs": gmvs, "TSES": self._tses(),
                    "TimeSpan": self._time_span()}
            return point, gmf

        def on_asset_complete(asset, point, loss_ratio_curve,
                              loss_curve, loss_conditionals,
                              insured_curve, insured_loss_ratio_curve):
            self._loss_ratio_curve_on_kvs(
                point.column, point.row, loss_ratio_curve, asset)

            self._loss_curve_on_kvs(
                point.column, point.row, loss_curve, asset)

            for loss_poe, loss_conditional in loss_conditionals.items():
                key = kvs.tokens.loss_key(job_id,
                                          point.row, point.column,
                                          asset.asset_ref, loss_poe)
                kvs.get_client().set(key, loss_conditional)

            if self.job_ctxt.params.get("INSURED_LOSSES"):
                self._insured_loss_curve_on_kvs(
                    point.column, point.row, insured_curve, asset)

                self._insured_loss_ratio_curve_on_kvs(
                    point.column, point.row, insured_loss_ratio_curve, asset)

        losses = event_based.compute(
            block.sites,
            lambda site: general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site),
            self.vulnerability_curves,
            hazard_getter,
            self.job_ctxt.oq_job_profile.loss_histogram_bins,
            general.conditional_loss_poes(self.job_ctxt.params),
            self.job_ctxt.params.get("INSURED_LOSSES"),
            seed, correlation_type,
            on_asset_complete)

        return losses
Пример #14
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = Block.from_kvs(self.job_ctxt.job_id, block_id)

        vuln_curves = vulnerability.load_vuln_model_from_kvs(self.job_ctxt.job_id)

        lrem_steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval
        loss_poes = conditional_loss_poes(self.job_ctxt.params)

        assets_getter = lambda site: BaseRiskCalculator.assets_at(self.job_ctxt.job_id, site)

        hazard_getter = lambda site: (
            self.job_ctxt.region.grid.point_at(site),
            self._get_db_curve(hazard_input_site(self.job_ctxt, site)),
        )

        def on_asset_complete(asset, point, loss_ratio_curve, loss_curve, loss_conditionals):
            loss_key = kvs.tokens.loss_curve_key(self.job_ctxt.job_id, point.row, point.column, asset.asset_ref)

            kvs.get_client().set(loss_key, loss_curve.to_json())

            for poe, loss in loss_conditionals.items():
                key = kvs.tokens.loss_key(self.job_ctxt.job_id, point.row, point.column, asset.asset_ref, poe)
                kvs.get_client().set(key, loss)

            loss_ratio_key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id, point.row, point.column, asset.asset_ref)

            kvs.get_client().set(loss_ratio_key, loss_ratio_curve.to_json())

        classical.compute(
            block.sites, assets_getter, vuln_curves, hazard_getter, lrem_steps, loss_poes, on_asset_complete
        )
    def from_kvs(job_id, epsilon_provider):
        """Return an aggregate curve using the GMFs and assets
        stored in the underlying kvs system."""

        vuln_model = vulnerability.load_vuln_model_from_kvs(job_id)
        aggregate_curve = AggregateLossCurve(vuln_model, epsilon_provider)

        gmfs_keys = kvs.get_keys("%s*%s*" % (
                job_id, kvs.tokens.GMF_KEY_TOKEN))

        LOG.debug("Found %s stored GMFs..." % len(gmfs_keys))
        asset_counter = 0

        for gmfs_key in gmfs_keys:
            assets = _assets_keys_for_gmfs(job_id, gmfs_key)

            for asset in assets:
                asset_counter += 1
                gmfs = kvs.get_value_json_decoded(gmfs_key)

                aggregate_curve.append(gmfs,
                        json.JSONDecoder().decode(asset))

        LOG.debug("Found %s stored assets..." % asset_counter)
        return aggregate_curve
Пример #16
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = job.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            curve_token = kvs.tokens.mean_hazard_curve_key(self.job_id,
                                point.site)

            decoded_curve = kvs.get_value_json_decoded(curve_token)

            hazard_curve = Curve([(exp(float(el['x'])), el['y'])
                            for el in decoded_curve['curve']])

            asset_key = kvs.tokens.asset_key(self.id,
                            point.row, point.column)
            assets = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in assets]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
Пример #17
0
    def execute(self):
        """
        Entry point for triggering the computation.
        """

        LOGGER.debug("Executing scenario risk computation.")
        LOGGER.debug(
            "This will calculate mean and standard deviation loss" "values for the region defined in the job config."
        )

        vuln_model = vulnerability.load_vuln_model_from_kvs(self.job_ctxt.job_id)

        region_loss_map_data = {}

        region_losses = distribute(
            general.compute_risk,
            ("block_id", self.job_ctxt.blocks_keys),
            tf_args=dict(job_id=self.job_ctxt.job_id, vuln_model=vuln_model, insured_losses=self._insured_losses),
        )

        for block_data in region_losses:
            self._region_losses.append(block_data[0])
            collect_region_data(block_data[1], region_loss_map_data)

        self._loss_map_data = [(site, data) for site, data in region_loss_map_data.iteritems()]
    def test_loading_and_storing_model_in_kvs(self):
        path = os.path.join(test.SCHEMA_DIR, TEST_FILE)
        vulnerability.load_vulnerability_model(1234, path)
        model = vulnerability.load_vuln_model_from_kvs(1234)

        self.assertEqual(NO_OF_CURVES_IN_TEST_FILE, len(model))

        expected_curve = shapes.Curve(
            [
                (5.0, (0.00, 0.3)),
                (5.5, (0.00, 0.3)),
                (6.0, (0.00, 0.3)),
                (6.5, (0.00, 0.3)),
                (7.0, (0.00, 0.3)),
                (7.5, (0.01, 0.3)),
                (8.0, (0.06, 0.3)),
                (8.5, (0.18, 0.3)),
                (9.0, (0.36, 0.3)),
                (9.5, (0.36, 0.3)),
                (10.0, (0.36, 0.3)),
            ]
        )

        self.assertEqual(expected_curve, model["PK"])
        self.assertEqual(expected_curve, model["IR"])
Пример #19
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)
        seed, correlation_type = self._get_correlation_type()

        def hazard_getter(site):
            "Compute loss curve basing on GMF data"
            gmvs = self._get_gmvs_at(general.hazard_input_site(
                    self.job_ctxt, site))

            return {"IMLs": gmvs, "TSES": self._tses(),
                    "TimeSpan": self._time_span()}

        result = defaultdict(list)

        def on_asset_complete(asset, bcr, eal_original, eal_retrofitted):
            result[(asset.site.x, asset.site.y)].append(
                ({'bcr': bcr,
                  'eal_original': eal_original,
                  'eal_retrofitted': eal_retrofitted},
                  asset.asset_ref))

        job_id = self.job_ctxt.job_id

        benefit_cost_ratio.compute_probabilistic(
            block.sites,
            lambda site: general.BaseRiskCalculator.assets_at(job_id, site),
            vulnerability.load_vuln_model_from_kvs(job_id),
            vulnerability.load_vuln_model_from_kvs(job_id, retrofitted=True),
            hazard_getter,
            float(self.job_ctxt.params['INTEREST_RATE']),
            float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY']),
            self.job_ctxt.oq_job_profile.loss_histogram_bins,
            seed, correlation_type, on_asset_complete)

        bcr_block_key = kvs.tokens.bcr_block_key(
            self.job_ctxt.job_id, block_id)
        result = result.items()
        kvs.set_value_json_encoded(bcr_block_key, result)
        LOGGER.debug('bcr result for block %s: %r', block_id, result)
Пример #20
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.slice_gmfs(block_id)

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)

            key = kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column,
                                         point.row)

            gmf = kvs.get_value_json_decoded(key)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf, loss_ratios)

                aggregate_curve.append(loss_ratios * asset.value)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                            self.job_ctxt.params):

                        general.compute_conditional_loss(
                            self.job_ctxt.job_id, point.column, point.row,
                            loss_curve, asset, loss_poe)

        return aggregate_curve.losses
Пример #21
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.slice_gmfs(block_id)

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)

            key = kvs.tokens.gmf_set_key(
                self.job_ctxt.job_id, point.column, point.row)

            gmf = kvs.get_value_json_decoded(key)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf, loss_ratios)

                aggregate_curve.append(loss_ratios * asset.value)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.job_ctxt.params):

                        general.compute_conditional_loss(
                                self.job_ctxt.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

        return aggregate_curve.losses
Пример #22
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.slice_gmfs(block_id)

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.calc_proxy.job_id)

        block = general.Block.from_kvs(self.calc_proxy.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for point in block.grid(self.calc_proxy.region):
            key = kvs.tokens.gmf_set_key(self.calc_proxy.job_id, point.column,
                                         point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(
                self.calc_proxy.job_id, point.row, point.column)

            for asset in kvs.get_list_json_decoded(asset_key):
                LOGGER.debug("Processing asset %s" % (asset))

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf_slice)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf_slice, loss_ratios)

                aggregate_curve.append(loss_ratios * asset["assetValue"])

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.calc_proxy.params):

                        general.compute_conditional_loss(
                                self.calc_proxy.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

        return aggregate_curve.losses
Пример #23
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            gmvs = self._get_gmvs_at(general.hazard_input_site(
                    self.job_ctxt, site))

            gmf = {"IMLs": gmvs, "TSES": self._tses(),
                    "TimeSpan": self._time_span()}

            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf, loss_ratios)

                aggregate_curve.append(loss_ratios * asset.value)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.job_ctxt.params):

                        general.compute_conditional_loss(
                                self.job_ctxt.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

        return aggregate_curve.losses
Пример #24
0
    def __init__(self, job_id, block_id):
        """ Prepare the calculator for computations"""

        self.job_id = job_id
        self.block_id = block_id

        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # self.vuln_curves is a dict of {string: Curve}
        LOGGER.debug("ProbabilisticLossRatioCalculator init: vuln curves are")

        for k, v in self.vuln_curves.items():
            LOGGER.debug("%s: %s" % (k, v))
Пример #25
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) gmfs
         3) exposure portfolio (=assets)
         4) vulnerability

        TODO(fab): make conditional_loss_poe (set of probabilities of
        exceedance for which the loss computation is done)
        a list of floats, and read it from the job configuration.
        """

        conditional_loss_poes = [
            float(x)
            for x in self.params.get('CONDITIONAL_LOSS_POE', "0.01").split()
        ]
        self.slice_gmfs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # TODO(jmc): DONT assumes that hazard and risk grid are the same
        block = job.Block.from_kvs(block_id)

        for point in block.grid(self.region):
            key = kvs.generate_product_key(self.job_id,
                                           kvs.tokens.GMF_KEY_TOKEN,
                                           point.column, point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in asset_list]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf_slice)
                if loss_ratio_curve is not None:

                    # compute loss curve
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes:
                        self.compute_conditional_loss(point.column, point.row,
                                                      loss_curve, asset,
                                                      loss_poe)
        return True
Пример #26
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """
        job_ctxt = self.job_ctxt
        job_id = job_ctxt.job_id
        block = Block.from_kvs(job_id, block_id)

        result = defaultdict(list)

        def on_asset_complete(asset, bcr, eal_original, eal_retrofitted):
            result[(asset.site.x, asset.site.y)].append(
                ({"bcr": bcr, "eal_original": eal_original, "eal_retrofitted": eal_retrofitted}, asset.asset_ref)
            )

        benefit_cost_ratio.compute(
            block.sites,
            lambda site: BaseRiskCalculator.assets_at(job_id, site),
            vulnerability.load_vuln_model_from_kvs(job_id),
            vulnerability.load_vuln_model_from_kvs(job_id, retrofitted=True),
            lambda site: self._get_db_curve(hazard_input_site(self.job_ctxt, site)),
            self.job_ctxt.oq_job_profile.lrem_steps_per_interval,
            float(job_ctxt.params["INTEREST_RATE"]),
            float(job_ctxt.params["ASSET_LIFE_EXPECTANCY"]),
            on_asset_complete,
        )

        bcr = result.items()
        bcr_block_key = kvs.tokens.bcr_block_key(job_ctxt.job_id, block_id)
        kvs.set_value_json_encoded(bcr_block_key, bcr)
        LOGGER.debug("bcr result for block %s: %r", block_id, bcr)
        return True
    def test_loading_and_storing_model_in_kvs(self):
        path = os.path.join(helpers.SCHEMA_DIR, TEST_FILE)
        vulnerability.load_vulnerability_model(1234, path)
        model = vulnerability.load_vuln_model_from_kvs(1234)

        self.assertEqual(NO_OF_CURVES_IN_TEST_FILE, len(model))

        exp_imls = [5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0]
        exp_loss_ratios = [0.00, 0.00, 0.00, 0.00, 0.00, 0.01, 0.06, 0.18,
            0.36, 0.36, 0.36]
        exp_covs = [0.3] * 11
        expected_curve = shapes.VulnerabilityFunction(exp_imls,
            exp_loss_ratios, exp_covs)

        self.assertEqual(expected_curve, model["PK"])
        self.assertEqual(expected_curve, model["IR"])
    def test_loading_and_storing_model_in_kvs(self):
        path = os.path.join(test.SCHEMA_DIR, TEST_FILE)
        vulnerability.load_vulnerability_model(1234, path)
        model = vulnerability.load_vuln_model_from_kvs(1234)

        self.assertEqual(NO_OF_CURVES_IN_TEST_FILE, len(model))

        expected_curve = shapes.Curve([(5.0, (0.00, 0.3)), (5.5, (0.00, 0.3)),
                                       (6.0, (0.00, 0.3)), (6.5, (0.00, 0.3)),
                                       (7.0, (0.00, 0.3)), (7.5, (0.01, 0.3)),
                                       (8.0, (0.06, 0.3)), (8.5, (0.18, 0.3)),
                                       (9.0, (0.36, 0.3)), (9.5, (0.36, 0.3)),
                                       (10.0, (0.36, 0.3))])

        self.assertEqual(expected_curve, model["PK"])
        self.assertEqual(expected_curve, model["IR"])
Пример #29
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = Block.from_kvs(self.job_ctxt.job_id, block_id)

        vulnerability_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval

        assets_getter = lambda site: BaseRiskCalculator.assets_at(
            self.job_ctxt.job_id, site)

        hazard_getter = lambda site: (
            self._get_db_curve(hazard_input_site(self.job_ctxt, site)))

        calculator = api.conditional_losses(
            conditional_loss_poes(self.job_ctxt.params),
            api.classical(vulnerability_model, steps=steps))

        for asset_output in api.compute_on_sites(block.sites,
            assets_getter, hazard_getter, calculator):

            location = asset_output.asset.site

            point = self.job_ctxt.region.grid.point_at(
                shapes.Site(location.x, location.y))

            loss_key = kvs.tokens.loss_curve_key(
                self.job_ctxt.job_id, point.row,
                point.column, asset_output.asset.asset_ref)

            kvs.get_client().set(loss_key, asset_output.loss_curve.to_json())

            loss_ratio_key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id,
                point.row, point.column, asset_output.asset.asset_ref)

            kvs.get_client().set(loss_ratio_key,
                asset_output.loss_ratio_curve.to_json())

            for poe, loss in asset_output.conditional_losses.items():
                key = kvs.tokens.loss_key(
                    self.job_ctxt.job_id, point.row, point.column,
                    asset_output.asset.asset_ref, poe)

                kvs.get_client().set(key, loss)
    def from_kvs(job_id):
        """Return an aggregate curve using the GMFs and assets
        stored in the underlying kvs system."""
        
        vuln_model = vulnerability.load_vuln_model_from_kvs(job_id)
        aggregate_curve = AggregateLossCurve(vuln_model)
        
        client = kvs.get_client(binary=False)
        gmfs_keys = client.keys("%s*%s*" % (job_id, kvs.tokens.GMF_KEY_TOKEN))
        LOG.debug("Found %s stored GMFs..." % len(gmfs_keys))

        for gmfs_key in gmfs_keys: # O(2*n)
            asset = _asset_for_gmfs(job_id, gmfs_key)
            gmfs = kvs.get_value_json_decoded(gmfs_key)
            aggregate_curve.append(gmfs, asset)
        
        return aggregate_curve
Пример #31
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) gmfs
         3) exposure portfolio (=assets)
         4) vulnerability

        TODO(fab): make conditional_loss_poe (set of probabilities of
        exceedance for which the loss computation is done)
        a list of floats, and read it from the job configuration.
        """

        conditional_loss_poes = [float(x) for x in self.params.get(
                    'CONDITIONAL_LOSS_POE', "0.01").split()]
        self.slice_gmfs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # TODO(jmc): DONT assumes that hazard and risk grid are the same
        block = job.Block.from_kvs(block_id)

        for point in block.grid(self.region):
            key = kvs.generate_product_key(self.job_id,
                kvs.tokens.GMF_KEY_TOKEN, point.column, point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in asset_list]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                        point.column, point.row, asset, gmf_slice)
                if loss_ratio_curve is not None:

                    # compute loss curve
                    loss_curve = self.compute_loss_curve(
                            point.column, point.row,
                            loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes:
                        self.compute_conditional_loss(point.column, point.row,
                                loss_curve, asset, loss_poe)
        return True
Пример #32
0
    def test_loading_and_storing_model_in_kvs(self):
        path = os.path.join(helpers.SCHEMA_DIR, TEST_FILE)
        vulnerability.load_vulnerability_model(1234, path)
        model = vulnerability.load_vuln_model_from_kvs(1234)

        self.assertEqual(NO_OF_CURVES_IN_TEST_FILE, len(model))

        exp_imls = [5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0]
        exp_loss_ratios = [
            0.00, 0.00, 0.00, 0.00, 0.00, 0.01, 0.06, 0.18, 0.36, 0.36, 0.36
        ]
        exp_covs = [0.3] * 11
        expected_curve = shapes.VulnerabilityFunction(exp_imls,
                                                      exp_loss_ratios,
                                                      exp_covs)

        self.assertEqual(expected_curve, model["PK"])
        self.assertEqual(expected_curve, model["IR"])
Пример #33
0
    def execute(self):
        """Entry point for triggering the computation."""
        LOGGER.debug("Executing scenario risk computation.")
        LOGGER.debug("This will calculate mean and standard deviation loss"
            "values for the region defined in the job config.")

        tasks = []

        vuln_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        sum_per_gmf = SumPerGroundMotionField(vuln_model, epsilon_provider)

        region_loss_map_data = {}

        for block_id in self.job_ctxt.blocks_keys:
            LOGGER.debug("Dispatching task for block %s of %s"
                % (block_id, len(self.job_ctxt.blocks_keys)))
            a_task = general.compute_risk.delay(
                self.job_ctxt.job_id, block_id, vuln_model=vuln_model,
                epsilon_provider=epsilon_provider)
            tasks.append(a_task)

        for task in tasks:
            task.wait()
            if not task.successful():
                raise Exception(task.result)

            block_loss, block_loss_map_data = task.result

            # do some basic validation on our results
            assert block_loss is not None, "Expected a result != None"
            assert isinstance(block_loss, numpy.ndarray), \
                "Expected a numpy array"

            # our result should be a 1-dimensional numpy.array of loss values
            sum_per_gmf.sum_losses(block_loss)

            collect_region_data(
                block_loss_map_data, region_loss_map_data)

        loss_map_data = [(site, data)
                for site, data in region_loss_map_data.iteritems()]

        # serialize the loss map data to XML
        loss_map_path = os.path.join(
            self.job_ctxt['BASE_PATH'],
            self.job_ctxt['OUTPUT_DIR'],
            'loss-map-%s.xml' % self.job_ctxt.job_id)
        loss_map_writer = risk_output.create_loss_map_writer(
            self.job_ctxt.job_id, self.job_ctxt.serialize_results_to,
            loss_map_path, True)

        if loss_map_writer:
            LOGGER.debug("Starting serialization of the loss map...")

            # Add a metadata dict in the first list position
            # Note: the metadata is still incomplete (see bug 809410)
            loss_map_metadata = {'scenario': True}
            loss_map_data.insert(0, loss_map_metadata)
            loss_map_writer.serialize(loss_map_data)

        # For now, just print these values.
        # These are not debug statements; please don't remove them!
        print "Mean region loss value: %s" % sum_per_gmf.mean
        print "Standard deviation region loss value: %s" % sum_per_gmf.stddev
Пример #34
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vulnerability_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        seed, correlation_type = self._get_correlation_type()
        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)
        loss_histogram_bins = self.job_ctxt.oq_job_profile.loss_histogram_bins

        def hazard_getter(site):
            gmvs = self._get_gmvs_at(general.hazard_input_site(
                self.job_ctxt, site))

            return {"IMLs": gmvs, "TSES": self._tses(),
                "TimeSpan": self._time_span()}

        assets_getter = lambda site: general.BaseRiskCalculator.assets_at(
            self.job_ctxt.job_id, site)

        probabilistic_event_based_calculator = api.probabilistic_event_based(
            self.vulnerability_model, loss_histogram_bins,
            seed, correlation_type)

        calculator = api.conditional_losses(general.conditional_loss_poes(
            self.job_ctxt.params), probabilistic_event_based_calculator)

        if self.job_ctxt.params.get("INSURED_LOSSES"):
            calculator = api.insured_curves(self.vulnerability_model,
                loss_histogram_bins, seed, correlation_type,
                api.insured_losses(calculator))

        for asset_output in api.compute_on_sites(block.sites,
            assets_getter, hazard_getter, calculator):

            location = asset_output.asset.site

            point = self.job_ctxt.region.grid.point_at(
                shapes.Site(location.x, location.y))

            self._loss_ratio_curve_on_kvs(
                point.column, point.row, asset_output.loss_ratio_curve,
                asset_output.asset)

            self._loss_curve_on_kvs(
                point.column, point.row, asset_output.loss_curve,
                asset_output.asset)

            for poe, loss in asset_output.conditional_losses.items():
                key = kvs.tokens.loss_key(
                    self.job_ctxt.job_id, point.row, point.column,
                    asset_output.asset.asset_ref, poe)

                kvs.get_client().set(key, loss)

            if self.job_ctxt.params.get("INSURED_LOSSES"):
                self._insured_loss_curve_on_kvs(
                    point.column, point.row,
                    asset_output.insured_loss_curve, asset_output.asset)

                self._insured_loss_ratio_curve_on_kvs(
                    point.column, point.row,
                    asset_output.insured_loss_ratio_curve, asset_output.asset)

        return probabilistic_event_based_calculator.aggregate_losses
Пример #35
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vulnerability_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            gmf = self._load_ground_motion_field(site)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self._compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self._compute_loss_ratio_curve(
                    asset, gmf, loss_ratios)

                self._loss_ratio_curve_on_kvs(point.column, point.row,
                                              loss_ratio_curve, asset)

                losses = loss_ratios * asset.value

                aggregate_curve.append(losses)

                if loss_ratio_curve:
                    loss_curve = self._compute_loss_curve(
                        loss_ratio_curve, asset)

                    self._loss_curve_on_kvs(point.column, point.row,
                                            loss_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                            self.job_ctxt.params):
                        general.compute_conditional_loss(
                            self.job_ctxt.job_id, point.column, point.row,
                            loss_curve, asset, loss_poe)

                    if self.job_ctxt.params.get("INSURED_LOSSES"):
                        insured_losses = general.compute_insured_losses(
                            asset, losses)

                        insured_loss_ratio_curve = (
                            self._compute_insured_loss_ratio_curve(
                                insured_losses, asset, gmf))

                        self._insured_loss_ratio_curve_on_kvs(
                            point.column, point.row, insured_loss_ratio_curve,
                            asset)

                        insured_loss_curve = self._compute_loss_curve(
                            insured_loss_ratio_curve, asset)

                        self._insured_loss_curve_on_kvs(
                            point.column, point.row, insured_loss_curve, asset)

        return aggregate_curve.losses
Пример #36
0
    def execute(self):
        """Entry point for triggering the computation."""
        LOGGER.debug("Executing scenario risk computation.")
        LOGGER.debug("This will calculate mean and standard deviation loss"
                     "values for the region defined in the job config.")

        tasks = []

        vuln_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        sum_per_gmf = SumPerGroundMotionField(vuln_model, epsilon_provider)

        region_loss_map_data = {}

        for block_id in self.job_ctxt.blocks_keys:
            LOGGER.debug("Dispatching task for block %s of %s" %
                         (block_id, len(self.job_ctxt.blocks_keys)))
            a_task = general.compute_risk.delay(self.job_ctxt.job_id,
                                                block_id,
                                                vuln_model=vuln_model)
            tasks.append(a_task)

        for task in tasks:
            task.wait()
            if not task.successful():
                raise Exception(task.result)

            block_loss, block_loss_map_data = task.result

            # do some basic validation on our results
            assert block_loss is not None, "Expected a result != None"
            assert isinstance(block_loss, numpy.ndarray), \
                "Expected a numpy array"

            # our result should be a 1-dimensional numpy.array of loss values
            sum_per_gmf.sum_losses(block_loss)

            collect_region_data(block_loss_map_data, region_loss_map_data)

        loss_map_data = [(site, data)
                         for site, data in region_loss_map_data.iteritems()]

        # serialize the loss map data to XML
        loss_map_path = os.path.join(self.job_ctxt['BASE_PATH'],
                                     self.job_ctxt['OUTPUT_DIR'],
                                     'loss-map-%s.xml' % self.job_ctxt.job_id)
        loss_map_writer = risk_output.create_loss_map_writer(
            self.job_ctxt.job_id, self.job_ctxt.serialize_results_to,
            loss_map_path, True)

        if loss_map_writer:
            LOGGER.debug("Starting serialization of the loss map...")

            # Add a metadata dict in the first list position
            # Note: the metadata is still incomplete (see bug 809410)
            loss_map_metadata = {'scenario': True}
            loss_map_data.insert(0, loss_map_metadata)
            loss_map_writer.serialize(loss_map_data)

        # For now, just print these values.
        # These are not debug statements; please don't remove them!
        print "Mean region loss value: %s" % sum_per_gmf.mean
        print "Standard deviation region loss value: %s" % sum_per_gmf.stddev
Пример #37
0
    def execute(self):
        """Entry point for triggering the computation."""

        LOGGER.debug("Executing deterministic risk computation.")
        LOGGER.debug("This will calculate mean and standard deviation loss"
            "values for the region defined in the job config.")

        tasks = []

        vuln_model = \
            vulnerability.load_vuln_model_from_kvs(self.job_id)

        epsilon_provider = risk_job.EpsilonProvider(self.params)

        sum_per_gmf = det.SumPerGroundMotionField(vuln_model, epsilon_provider)

        region_loss_map_data = []

        for block_id in self.blocks_keys:
            LOGGER.debug("Dispatching task for block %s of %s"
                % (block_id, len(self.blocks_keys)))
            a_task = risk_job.compute_risk.delay(
                self.id, block_id, vuln_model=vuln_model,
                epsilon_provider=epsilon_provider)
            tasks.append(a_task)

        for task in tasks:
            task.wait()
            if not task.successful():
                raise Exception(task.result)

            block_loss, block_loss_map_data = task.result

            # do some basic validation on our results
            assert block_loss is not None, "Expected a result != None"
            assert isinstance(block_loss, numpy.ndarray), \
                "Expected a numpy array"

            # our result should be a 1-dimensional numpy.array of loss values
            sum_per_gmf.sum_losses(block_loss)

            region_loss_map_data.extend(block_loss_map_data)

        # serialize the loss map data to XML
        loss_map_path = os.path.join(
            self['BASE_PATH'],
            self['OUTPUT_DIR'],
            'loss-map-%s.xml' % self.id)
        loss_map_xml_writer = risk_output.LossMapXMLWriter(loss_map_path)

        # Add a metadata dict in the first list position
        # TODO(LB): we need to define some meaningful values for the metadata
        # here. For now, I'm just going to leave it blank.
        loss_map_metadata = {}
        region_loss_map_data.insert(0, loss_map_metadata)
        loss_map_xml_writer.serialize(region_loss_map_data)

        # For now, just print these values.
        # These are not debug statements; please don't remove them!
        print "Mean region loss value: %s" % sum_per_gmf.mean
        print "Standard deviation region loss value: %s" % sum_per_gmf.stddev
        return [True]
Пример #38
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.vulnerability_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)
            gmf = self._load_ground_motion_field(site)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:
                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self._compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self._compute_loss_ratio_curve(
                    asset, gmf, loss_ratios)

                self._loss_ratio_curve_on_kvs(
                    point.column, point.row, loss_ratio_curve, asset)

                losses = loss_ratios * asset.value

                aggregate_curve.append(losses)

                if loss_ratio_curve:
                    loss_curve = self._compute_loss_curve(
                        loss_ratio_curve, asset)

                    self._loss_curve_on_kvs(point.column, point.row,
                        loss_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.job_ctxt.params):
                        general.compute_conditional_loss(
                                self.job_ctxt.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

                    if self.job_ctxt.params.get("INSURED_LOSSES"):
                        insured_losses = general.compute_insured_losses(
                            asset, losses)

                        insured_loss_ratio_curve = (
                            self._compute_insured_loss_ratio_curve(
                                insured_losses, asset, gmf))

                        self._insured_loss_ratio_curve_on_kvs(point.column,
                            point.row, insured_loss_ratio_curve, asset)

                        insured_loss_curve = self._compute_loss_curve(
                            insured_loss_ratio_curve, asset)

                        self._insured_loss_curve_on_kvs(point.column,
                            point.row, insured_loss_curve, asset)

        return aggregate_curve.losses