예제 #1
0
    def setUp(self):
        inputs = [("fragility", ""), ("exposure", "")]
        self.job = self.setup_classic_job(inputs=inputs)

        kvs.mark_job_as_current(self.job.id)
        kvs.cache_gc(self.job.id)

        self.site = Site(1.0, 1.0)
        block = Block(self.job.id, BLOCK_ID, [self.site])
        block.to_kvs()

        # this region contains a single site, that is exactly
        # a site with longitude == 1.0 and latitude == 1.0
        params = {"REGION_VERTEX": "1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0",
                "REGION_GRID_SPACING": "0.5", "BASE_PATH": ".",
                "OUTPUT_DIR": "."}

        self.job_ctxt = JobContext(params, self.job.id, oq_job=self.job)

        self.em = self._store_em()
        self._store_gmvs([0.40, 0.30, 0.45, 0.35, 0.40])

        self.calculator = ScenarioDamageRiskCalculator(self.job_ctxt)

        # just stubbing out some preprocessing stuff...
        ScenarioDamageRiskCalculator.store_exposure_assets = lambda self: None
        ScenarioDamageRiskCalculator.store_fragility_model = lambda self: None
        ScenarioDamageRiskCalculator.partition = lambda self: None
예제 #2
0
    def test_eq(self):
        # Test the __eq__ method.
        # __eq__ is a shallow test and only compares ids.
        block1 = Block(7, 0, [shapes.Site(1.0, 1.0)])
        block2 = Block(7, 0, [shapes.Site(1.0, 0.0)])

        self.assertTrue(block1 == block2)
예제 #3
0
    def test_not_eq(self):
        # Test __eq__ with 2 Blocks that should not be equal
        block1 = Block(7, 0, [shapes.Site(1.0, 1.0)])
        block2 = Block(8, 0, [shapes.Site(1.0, 1.0)])
        self.assertFalse(block1 == block2)

        block1 = Block(7, 0, [shapes.Site(1.0, 1.0)])
        block2 = Block(7, 1, [shapes.Site(1.0, 1.0)])
        self.assertFalse(block1 == block2)
예제 #4
0
    def test_compute_bcr_in_the_classical_psha_calculator(self):
        self._compute_risk_classical_psha_setup()
        helpers.delete_profile(self.job)
        bcr_config = helpers.demo_file('benefit_cost_ratio/config.gem')
        job_profile, params, sections = engine.import_job_profile(
            bcr_config, self.job)

        # We need to adjust a few of the parameters for this test:
        job_profile.imls = [
            0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527,
            0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778]
        params['ASSET_LIFE_EXPECTANCY'] = '50'
        job_profile.asset_life_expectancy = 50
        params['REGION_VERTEX'] = '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0'
        job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords(
            params['REGION_VERTEX']))
        job_profile.save()

        job_ctxt = engine.JobContext(
            params, self.job_id, sections=sections, oq_job_profile=job_profile)

        calculator = classical_core.ClassicalRiskCalculator(job_ctxt)

        [input] = models.inputs4job(self.job.id, input_type="exposure")
        emdl = input.model()
        if not emdl:
            emdl = models.ExposureModel(
                owner=self.job.owner, input=input,
                description="c-psha test exposure model",
                category="c-psha power plants", stco_unit="watt",
                stco_type="aggregated", reco_unit="joule",
                reco_type="aggregated")
            emdl.save()

        assets = emdl.exposuredata_set.filter(asset_ref="rubcr")
        if not assets:
            asset = models.ExposureData(exposure_model=emdl, taxonomy="ID",
                                        asset_ref="rubcr", stco=1, reco=123.45,
                                        site=GEOSGeometry("POINT(1.0 1.0)"))
            asset.save()

        Block.from_kvs(self.job_id, self.block_id)
        calculator.compute_risk(self.block_id)

        result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id)
        res = kvs.get_value_json_decoded(result_key)
        expected_result = {'bcr': 0.0, 'eal_original': 0.003032,
                           'eal_retrofitted': 0.003032}

        helpers.assertDeepAlmostEqual(
            self, res, [[[1, 1], [[expected_result, "rubcr"]]]])
예제 #5
0
    def test_block_kvs_serialization(self):
        # Test that a Block is properly serialized/deserialized from the cache.
        job_id = 7
        block_id = 0
        expected_block = Block(job_id, block_id,
                               [shapes.Site(1.0, 1.0), shapes.Site(2.0, 2.0)])
        expected_block.to_kvs()

        actual_block = Block.from_kvs(job_id, block_id)

        self.assertEqual(expected_block, actual_block)
        # The sites are not compared in Block.__eq__; we need to check those
        # also.
        self.assertEqual(expected_block.sites, actual_block.sites)
예제 #6
0
    def test_block_kvs_serialization(self):
        # Test that a Block is properly serialized/deserialized from the cache.
        job_id = 7
        block_id = 0
        expected_block = Block(job_id, block_id,
                               [shapes.Site(1.0, 1.0),
                                shapes.Site(2.0, 2.0)])
        expected_block.to_kvs()

        actual_block = Block.from_kvs(job_id, block_id)

        self.assertEqual(expected_block, actual_block)
        # The sites are not compared in Block.__eq__; we need to check those
        # also.
        self.assertEqual(expected_block.sites, actual_block.sites)
예제 #7
0
    def test_compute_bcr(self):
        cfg_path = helpers.demo_file(
            'probabilistic_event_based_risk/config.gem')
        helpers.delete_profile(self.job)
        job_profile, params, sections = engine.import_job_profile(
            cfg_path, self.job)
        job_profile.calc_mode = 'event_based_bcr'
        job_profile.interest_rate = 0.05
        job_profile.asset_life_expectancy = 50
        job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords(
            '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0'))
        job_profile.region_grid_spacing = 0.1
        job_profile.maximum_distance = 200.0
        job_profile.gmf_random_seed = None
        job_profile.save()

        params.update(dict(CALCULATION_MODE='Event Based BCR',
                           INTEREST_RATE='0.05',
                           ASSET_LIFE_EXPECTANCY='50',
                           MAXIMUM_DISTANCE='200.0',
                           REGION_VERTEX=('0.0, 0.0, 0.0, 2.0, '
                                          '2.0, 2.0, 2.0, 0.0'),
                           REGION_GRID_SPACING='0.1'))

        job_ctxt = engine.JobContext(
            params, self.job_id, sections=sections, oq_job_profile=job_profile)

        calculator = eb_core.EventBasedRiskCalculator(job_ctxt)

        self.block_id = 7
        SITE = shapes.Site(1.0, 1.0)
        block = Block(self.job_id, self.block_id, (SITE, ))
        block.to_kvs()

        location = GEOSGeometry(SITE.point.to_wkt())
        asset = models.ExposureData(exposure_model=self.emdl, taxonomy="ID",
                                    asset_ref=22.61, stco=1, reco=123.45,
                                    site=location)
        asset.save()

        calculator.compute_risk(self.block_id)

        result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id)
        result = kvs.get_value_json_decoded(result_key)
        expected_result = {'bcr': 0.0, 'eal_original': 0.0,
                           'eal_retrofitted': 0.0}
        helpers.assertDeepAlmostEqual(
            self, [[[1, 1], [[expected_result, "22.61"]]]], result)
예제 #8
0
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = Block.from_kvs(self.job_ctxt.job_id, block_id)

        vuln_curves = vulnerability.load_vuln_model_from_kvs(self.job_ctxt.job_id)

        lrem_steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval
        loss_poes = conditional_loss_poes(self.job_ctxt.params)

        assets_getter = lambda site: BaseRiskCalculator.assets_at(self.job_ctxt.job_id, site)

        hazard_getter = lambda site: (
            self.job_ctxt.region.grid.point_at(site),
            self._get_db_curve(hazard_input_site(self.job_ctxt, site)),
        )

        def on_asset_complete(asset, point, loss_ratio_curve, loss_curve, loss_conditionals):
            loss_key = kvs.tokens.loss_curve_key(self.job_ctxt.job_id, point.row, point.column, asset.asset_ref)

            kvs.get_client().set(loss_key, loss_curve.to_json())

            for poe, loss in loss_conditionals.items():
                key = kvs.tokens.loss_key(self.job_ctxt.job_id, point.row, point.column, asset.asset_ref, poe)
                kvs.get_client().set(key, loss)

            loss_ratio_key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id, point.row, point.column, asset.asset_ref)

            kvs.get_client().set(loss_ratio_key, loss_ratio_curve.to_json())

        classical.compute(
            block.sites, assets_getter, vuln_curves, hazard_getter, lrem_steps, loss_poes, on_asset_complete
        )
예제 #9
0
    def test_split_into_blocks(self):
        # Test a typical split case.
        # We will use a block size of 3, which will
        # give us 2 blocks of 3 sites and 1 block of 2 sites.
        expected = [
            Block(self.job_id, 0, self.all_sites[:3]),
            Block(self.job_id, 1, self.all_sites[3:6]),
            Block(self.job_id, 2, self.all_sites[6:])
        ]

        actual = [
            block for block in general.split_into_blocks(
                self.job_id, self.all_sites, block_size=3)
        ]

        self.assertEqual(expected, actual)
예제 #10
0
    def _compute_risk_classical_psha_setup(self):
        SITE = shapes.Site(1.0, 1.0)
        # deletes all keys from kvs
        kvs.get_client().flushall()

        # at the moment the hazard part doesn't do exp on the 'x'
        # so it's done on the risk part. To adapt the calculation
        # we do the reverse of the exp, i.e. log(x)
        self.hazard_curve = [
            (SITE,
             {'IMLValues': [0.001, 0.080, 0.170, 0.260, 0.360,
                            0.550, 0.700],
              'PoEValues': [0.99, 0.96, 0.89, 0.82, 0.70, 0.40, 0.01],
              'statistics': 'mean'})]

        # Vitor provided this Vulnerability Function
        imls_1 = [0.03, 0.04, 0.07, 0.1, 0.12, 0.22, 0.37, 0.52]
        loss_ratios_1 = [0.001, 0.022, 0.051, 0.08, 0.1, 0.2, 0.405, 0.700]
        covs_1 = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1]
        self.vuln_function = vulnerability_function.VulnerabilityFunction(
            imls_1, loss_ratios_1, covs_1, "LN")

        imls_2 = [0.1, 0.2, 0.4, 0.6]
        loss_ratios_2 = [0.05, 0.08, 0.2, 0.4]
        covs_2 = [0.5, 0.3, 0.2, 0.1]
        self.vuln_function_2 = vulnerability_function.VulnerabilityFunction(
            imls_2, loss_ratios_2, covs_2, "LN")

        self.asset_1 = {"taxonomy": "ID", "assetValue": 124.27}

        self.region = shapes.RegionConstraint.from_simple(
                (0.0, 0.0), (2.0, 2.0))

        block = Block(self.job_id, self.block_id, (SITE, ))
        block.to_kvs()

        writer = hazard.HazardCurveDBWriter('test_path.xml', self.job_id)
        writer.serialize(self.hazard_curve)

        kvs.set_value_json_encoded(
                kvs.tokens.vuln_key(self.job_id),
                {"ID": self.vuln_function.to_json()})
        kvs.set_value_json_encoded(
                kvs.tokens.vuln_key(self.job_id, retrofitted=True),
                {"ID": self.vuln_function.to_json()})
예제 #11
0
    def test_split_block_size_gt_site_list_size(self):
        # If the block size is greater than the input site list size,
        # the generator should just yield a single block containing all of the
        # sites.
        actual = [
            block for block in general.split_into_blocks(
                self.job_id, self.all_sites, block_size=9)
        ]

        self.assertEqual([Block(self.job_id, 0, self.all_sites)], actual)
예제 #12
0
    def test_split_block_size_eq_1(self):
        # Test splitting when block_size==1.
        expected = [
            Block(self.job_id, i, [self.all_sites[i]])
            for i in xrange(len(self.all_sites))
        ]

        actual = [
            block for block in general.split_into_blocks(
                self.job_id, self.all_sites, block_size=1)
        ]

        self.assertEqual(expected, actual)
예제 #13
0
파일: core.py 프로젝트: arbeit/oq-engine
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """

        result = defaultdict(list)
        block = Block.from_kvs(self.job_ctxt.job_id, block_id)

        vulnerability_model_original = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        vulnerability_model_retrofitted = (
            vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id, retrofitted=True))

        steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval

        assets_getter = lambda site: BaseRiskCalculator.assets_at(
            self.job_ctxt.job_id, site)

        hazard_getter = lambda site: (
            self._get_db_curve(hazard_input_site(self.job_ctxt, site)))

        bcr = api.bcr(api.classical(vulnerability_model_original, steps=steps),
            api.classical(vulnerability_model_retrofitted, steps=steps),
            float(self.job_ctxt.params["INTEREST_RATE"]),
            float(self.job_ctxt.params["ASSET_LIFE_EXPECTANCY"]))

        for asset_output in api.compute_on_sites(
            block.sites, assets_getter, hazard_getter, bcr):

            asset = asset_output.asset

            result[(asset.site.x, asset.site.y)].append(({
                "bcr": asset_output.bcr,
                "eal_original": asset_output.eal_original,
                "eal_retrofitted": asset_output.eal_retrofitted},
                asset.asset_ref))

        bcr = result.items()
        bcr_block_key = kvs.tokens.bcr_block_key(
            self.job_ctxt.job_id, block_id)

        kvs.set_value_json_encoded(bcr_block_key, bcr)
        LOGGER.debug("bcr result for block %s: %r", block_id, bcr)

        return True
예제 #14
0
파일: core.py 프로젝트: arbeit/oq-engine
    def _compute_loss(self, block_id):
        """
        Calculate and store in the kvs the loss data.
        """
        block = Block.from_kvs(self.job_ctxt.job_id, block_id)

        vulnerability_model = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval

        assets_getter = lambda site: BaseRiskCalculator.assets_at(
            self.job_ctxt.job_id, site)

        hazard_getter = lambda site: (
            self._get_db_curve(hazard_input_site(self.job_ctxt, site)))

        calculator = api.conditional_losses(
            conditional_loss_poes(self.job_ctxt.params),
            api.classical(vulnerability_model, steps=steps))

        for asset_output in api.compute_on_sites(block.sites,
            assets_getter, hazard_getter, calculator):

            location = asset_output.asset.site

            point = self.job_ctxt.region.grid.point_at(
                shapes.Site(location.x, location.y))

            loss_key = kvs.tokens.loss_curve_key(
                self.job_ctxt.job_id, point.row,
                point.column, asset_output.asset.asset_ref)

            kvs.get_client().set(loss_key, asset_output.loss_curve.to_json())

            loss_ratio_key = kvs.tokens.loss_ratio_key(self.job_ctxt.job_id,
                point.row, point.column, asset_output.asset.asset_ref)

            kvs.get_client().set(loss_ratio_key,
                asset_output.loss_ratio_curve.to_json())

            for poe, loss in asset_output.conditional_losses.items():
                key = kvs.tokens.loss_key(
                    self.job_ctxt.job_id, point.row, point.column,
                    asset_output.asset.asset_ref, poe)

                kvs.get_client().set(key, loss)
예제 #15
0
    def test_compute_risk_in_the_classical_psha_calculator(self):
        """
            tests ClassicalRiskCalculator.compute_risk by retrieving
            all the loss curves in the kvs and checks their presence
        """
        helpers.delete_profile(self.job)
        cls_risk_cfg = helpers.demo_file(
            'classical_psha_based_risk/config.gem')
        job_profile, params, sections = engine.import_job_profile(
            cls_risk_cfg, self.job)

        # We need to adjust a few of the parameters for this test:
        params['REGION_VERTEX'] = '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0'
        job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords(
            params['REGION_VERTEX']))
        job_profile.save()

        job_ctxt = engine.JobContext(
            params, self.job_id, sections=sections, oq_job_profile=job_profile)

        self._compute_risk_classical_psha_setup()

        calculator = classical_core.ClassicalRiskCalculator(job_ctxt)
        calculator.vuln_curves = {"ID": self.vuln_function}

        block = Block.from_kvs(self.job_id, self.block_id)

        # computes the loss curves and puts them in kvs
        calculator.compute_risk(self.block_id)

        for point in block.grid(job_ctxt.region):
            assets = BaseRiskCalculator.assets_for_cell(
                self.job_id, point.site)
            for asset in assets:
                loss_ratio_key = kvs.tokens.loss_ratio_key(
                    self.job_id, point.row, point.column, asset.asset_ref)

                self.assertTrue(kvs.get_client().get(loss_ratio_key))

                loss_key = kvs.tokens.loss_curve_key(
                    self.job_id, point.row, point.column, asset.asset_ref)

                self.assertTrue(kvs.get_client().get(loss_key))
예제 #16
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """
        job_ctxt = self.job_ctxt
        job_id = job_ctxt.job_id
        block = Block.from_kvs(job_id, block_id)

        result = defaultdict(list)

        def on_asset_complete(asset, bcr, eal_original, eal_retrofitted):
            result[(asset.site.x, asset.site.y)].append(
                ({"bcr": bcr, "eal_original": eal_original, "eal_retrofitted": eal_retrofitted}, asset.asset_ref)
            )

        benefit_cost_ratio.compute(
            block.sites,
            lambda site: BaseRiskCalculator.assets_at(job_id, site),
            vulnerability.load_vuln_model_from_kvs(job_id),
            vulnerability.load_vuln_model_from_kvs(job_id, retrofitted=True),
            lambda site: self._get_db_curve(hazard_input_site(self.job_ctxt, site)),
            self.job_ctxt.oq_job_profile.lrem_steps_per_interval,
            float(job_ctxt.params["INTEREST_RATE"]),
            float(job_ctxt.params["ASSET_LIFE_EXPECTANCY"]),
            on_asset_complete,
        )

        bcr = result.items()
        bcr_block_key = kvs.tokens.bcr_block_key(job_ctxt.job_id, block_id)
        kvs.set_value_json_encoded(bcr_block_key, bcr)
        LOGGER.debug("bcr result for block %s: %r", block_id, bcr)
        return True