Ejemplo n.º 1
0
 def write_loss_map(self, loss_poe):
     """ Iterates through all the assets and maps losses at loss_poe """
     # Make a special grid at a higher resolution
     risk_grid = shapes.Grid(self.region, float(self['RISK_CELL_SIZE']))
     path = os.path.join(self.base_path,
                         self['OUTPUT_DIR'],
                         "losses_at-%s.tiff" % loss_poe)
     output_generator = geotiff.LossMapGeoTiffFile(path, risk_grid,
             init_value=0.0, normalize=True)
     for point in self.region.grid:
         asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
         asset_list = kvs.get_client().lrange(asset_key, 0, -1)
         for asset in [json.loads(x) for x in asset_list]:
             key = kvs.tokens.loss_key(self.id, point.row, point.column,
                     asset["assetID"], loss_poe)
             loss = kvs.get(key)
             LOG.debug("Loss for asset %s at %s %s is %s" %
                 (asset["assetID"], asset['lon'], asset['lat'], loss))
             if loss:
                 loss_ratio = float(loss) / float(asset["assetValue"])
                 risk_site = shapes.Site(asset['lon'], asset['lat'])
                 risk_point = risk_grid.point_at(risk_site)
                 output_generator.write(
                         (risk_point.row, risk_point.column), loss_ratio)
     output_generator.close()
     return [path]
Ejemplo n.º 2
0
    def _write_output_for_block(self, job_id, block_id):
        """ Given a job and a block, write out a plotted curve """
        decoder = json.JSONDecoder()
        loss_ratio_curves = []
        block = job.Block.from_kvs(block_id)
        for point in block.grid(self.region):
            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [decoder.decode(x) for x in asset_list]:
                site = shapes.Site(asset["lon"], asset["lat"])
                key = kvs.tokens.loss_ratio_key(job_id, point.row, point.column, asset["AssetID"])
                loss_ratio_curve = kvs.get(key)
                if loss_ratio_curve:
                    loss_ratio_curve = shapes.Curve.from_json(loss_ratio_curve)
                    loss_ratio_curves.append((site, (loss_ratio_curve, asset)))

        LOG.debug("Serializing loss_ratio_curves")
        filename = "%s-block-%s.xml" % (self["LOSS_CURVES_OUTPUT_PREFIX"], block_id)
        path = os.path.join(self.base_path, self["OUTPUT_DIR"], filename)
        output_generator = risk_output.LossRatioCurveXMLWriter(path)
        # TODO(JMC): Take mean or max for each site
        output_generator.serialize(loss_ratio_curves)

        filename = "%s-block-%s.svg" % (self["LOSS_CURVES_OUTPUT_PREFIX"], block_id)
        curve_path = os.path.join(self.base_path, self["OUTPUT_DIR"], filename)

        plotter = curve.RiskCurvePlotter(curve_path, path, mode="loss_ratio")
        plotter.plot(autoscale_y=False)

        results = [path]
        results.extend(list(plotter.filenames()))
        return results
Ejemplo n.º 3
0
 def write_loss_map(self, loss_poe):
     """ Iterates through all the assets and maps losses at loss_poe """
     decoder = json.JSONDecoder()
     # Make a special grid at a higher resolution
     risk_grid = shapes.Grid(self.region, float(self['RISK_CELL_SIZE']))
     filename = "losses_at-%s.tiff" % (loss_poe)
     path = os.path.join(self.base_path, self['OUTPUT_DIR'], filename) 
     output_generator = geotiff.LossMapGeoTiffFile(path, risk_grid, 
             init_value=0.0, normalize=True)
     for point in self.region.grid:
         asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
         asset_list = kvs.get_client().lrange(asset_key, 0, -1)
         for asset in [decoder.decode(x) for x in asset_list]:
             key = kvs.tokens.loss_key(self.id, point.row, point.column, 
                     asset["assetID"], loss_poe)
             loss = kvs.get(key)
             LOG.debug("Loss for asset %s at %s %s is %s" % 
                 (asset["assetID"], asset['lon'], asset['lat'], loss))
             if loss:
                 loss_ratio = float(loss) / float(asset["assetValue"])
                 risk_site = shapes.Site(asset['lon'], asset['lat'])
                 risk_point = risk_grid.point_at(risk_site)
                 output_generator.write(
                         (risk_point.row, risk_point.column), loss_ratio)
     output_generator.close()
     return [path]
Ejemplo n.º 4
0
    def _write_output_for_block(self, job_id, block_id):
        """ Given a job and a block, write out a plotted curve """
        loss_ratio_curves = []
        loss_curves = []
        block = job.Block.from_kvs(block_id)
        for point in block.grid(self.region):
            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.loads(x) for x in asset_list]:
                site = shapes.Site(asset['lon'], asset['lat'])

                loss_curve = kvs.get(
                                kvs.tokens.loss_curve_key(job_id,
                                                          point.row,
                                                          point.column,
                                                          asset["assetID"]))
                loss_ratio_curve = kvs.get(
                                kvs.tokens.loss_ratio_key(job_id,
                                                          point.row,
                                                          point.column,
                                                          asset["assetID"]))

                if loss_curve:
                    loss_curve = shapes.Curve.from_json(loss_curve)
                    loss_curves.append((site, (loss_curve, asset)))

                if loss_ratio_curve:
                    loss_ratio_curve = shapes.Curve.from_json(loss_ratio_curve)
                    loss_ratio_curves.append((site, (loss_ratio_curve, asset)))

        results = self._serialize_and_plot(block_id,
                                           curves=loss_ratio_curves,
                                           curve_mode='loss_ratio')
        if loss_curves:
            results.extend(self._serialize_and_plot(block_id,
                                                curves=loss_curves,
                                                curve_mode='loss',
                                                curve_mode_prefix='loss_curve',
                                                render_multi=True))
        return results
Ejemplo n.º 5
0
    def _write_output_for_block(self, job_id, block_id):
        """ Given a job and a block, write out a plotted curve """
        loss_ratio_curves = []
        loss_curves = []
        block = Block.from_kvs(block_id)
        for point, asset in self.grid_assets_iterator(
                block.grid(self.region)):
            site = shapes.Site(asset['lon'], asset['lat'])

            loss_curve = kvs.get(
                            kvs.tokens.loss_curve_key(job_id,
                                                        point.row,
                                                        point.column,
                                                        asset["assetID"]))
            loss_ratio_curve = kvs.get(
                            kvs.tokens.loss_ratio_key(job_id,
                                                        point.row,
                                                        point.column,
                                                        asset["assetID"]))

            if loss_curve:
                loss_curve = shapes.Curve.from_json(loss_curve)
                loss_curves.append((site, (loss_curve, asset)))

            if loss_ratio_curve:
                loss_ratio_curve = shapes.Curve.from_json(loss_ratio_curve)
                loss_ratio_curves.append((site, (loss_ratio_curve, asset)))

        results = self._serialize(block_id,
                                           curves=loss_ratio_curves,
                                           curve_mode='loss_ratio')
        if loss_curves:
            results.extend(self._serialize(block_id,
                                                curves=loss_curves,
                                                curve_mode='loss',
                                                curve_mode_prefix='loss_curve',
                                                render_multi=True))
        return results
Ejemplo n.º 6
0
    def test_compute_risk_in_the_classical_psha_mixin(self):
        """
            tests ClassicalPSHABasedMixin.compute_risk by retrieving
            all the loss curves in the kvs and checks their presence
        """

        self._compute_risk_classical_psha_setup()
        # mixin "instance"
        mixin = ClassicalPSHABasedMixin()
        mixin.region = self.region
        mixin.job_id = self.job_id
        mixin.id = self.job_id
        mixin.vuln_curves = {"ID": self.vuln_function}
        mixin.params = {}

        block = Block.from_kvs(self.block_id)

        asset = {"taxonomy": "ID",
                 "assetID": 22.61, "assetValue": 1}

        self._store_asset(asset, 10, 10)

        # computes the loss curves and puts them in kvs
        self.assertTrue(mixin.compute_risk(self.block_id,
            point=shapes.GridPoint(None, 10, 20)))

        for point in block.grid(mixin.region):
            asset_key = kvs.tokens.asset_key(self.job_id, point.row,
                point.column)
            for asset in kvs.get_list_json_decoded(asset_key):
                loss_ratio_key = kvs.tokens.loss_ratio_key(
                    self.job_id, point.row, point.column, asset['assetID'])
                self.assertTrue(kvs.get(loss_ratio_key))

                loss_key = kvs.tokens.loss_curve_key(self.job_id, point.row,
                    point.column, asset['assetID'])

                self.assertTrue(kvs.get(loss_key))
Ejemplo n.º 7
0
    def from_kvs(job_id):
        """Return an aggregate curve using the computed
        loss curves in the kvs system."""
        client = kvs.get_client(binary=False)
        keys = client.keys("%s*%s*" % (job_id,
                kvs.tokens.LOSS_CURVE_KEY_TOKEN))

        LOG.debug("Found %s stored loss curves..." % len(keys))

        aggregate_curve = AggregateLossCurve()

        for key in keys:
            aggregate_curve.append(shapes.Curve.from_json(kvs.get(key)))
        
        return aggregate_curve
Ejemplo n.º 8
0
    def _get_kvs_gmfs(self, sites, histories, realizations):
        """Aggregates GMF data from the KVS by site"""
        gmf_keys = self._sites_to_gmf_keys(sites)
        gmfs = dict((k, []) for k in gmf_keys)

        for i in range(0, histories):
            for j in range(0, realizations):
                key = kvs.tokens.stochastic_set_key(self.job_id, i, j)
                fieldset = shapes.FieldSet.from_json(kvs.get(key), self.region.grid)

                for field in fieldset:
                    for key in gmfs.keys():
                        (row, col) = key.split("!")
                        gmfs[key].append(field.get(int(row), int(col)))

        return gmfs
Ejemplo n.º 9
0
    def test_that_conditional_loss_is_in_kvs(self):
        asset = {"assetID": 1}
        loss_poe = 0.1
        job_id = "1"
        col = 1
        row = 2
        loss_curve = shapes.Curve([(0.21, 0.131), (0.24, 0.108),
                (0.27, 0.089), (0.30, 0.066)])

        # should set in kvs the conditional loss
        general.compute_conditional_loss(job_id, col, row, loss_curve, asset,
                loss_poe)
        loss_key = kvs.tokens.loss_key(job_id, row, col,
                asset["assetID"], loss_poe)

        self.assertTrue(kvs.get(loss_key))
Ejemplo n.º 10
0
    def _get_kvs_gmfs(self, sites, histories, realizations):
        """Aggregates GMF data from the KVS by site"""
        gmf_keys = self._sites_to_gmf_keys(sites)
        gmfs = dict((k, []) for k in gmf_keys)

        for i in range(0, histories):
            for j in range(0, realizations):
                key = kvs.tokens.stochastic_set_key(self.job_ctxt.job_id, i, j)
                fieldset = shapes.FieldSet.from_json(kvs.get(key),
                                                     self.job_ctxt.region.grid)

                for field in fieldset:
                    for key in gmfs.keys():
                        (row, col) = key.split("!")
                        gmfs[key].append(field.get(int(row), int(col)))

        return gmfs
Ejemplo n.º 11
0
    def asset_losses_per_site(self, loss_poe, assets_iterator):
        """
        For each site in the region of this job, returns a list of assets and
        their losses at a given probability of exceedance.

        :param:loss_poe: the probability of exceedance
        :type:loss_poe: float
        :param:assets_iterator: an iterator over the assets, returning (point,
            asset) tuples. See
            :py:class:`openquake.risk.job.general.grid_assets_iterator`.

        :returns: A list of tuples in the form expected by the
        :py:class:`LossMapWriter.serialize` method:

           (site, [(loss, asset), ...])

           Where:

            :py:class:`openquake.shapes.Site` the site
            :py:class:`dict` the asset dict
            :py:class:`dict` (loss dict) with the following key:
                ***value*** - the value of the loss for the asset
        """
        result = defaultdict(list)

        for point, asset in assets_iterator:
            key = kvs.tokens.loss_key(self.job_id, point.row, point.column,
                    asset["assetID"], loss_poe)

            loss_value = kvs.get(key)
            LOG.debug("Loss for asset %s at %s %s is %s" %
                (asset["assetID"], asset['lon'], asset['lat'], loss_value))

            if loss_value:
                risk_site = shapes.Site(asset['lon'], asset['lat'])
                loss = {
                    "value": loss_value,
                }
                result[risk_site].append((loss, asset))

        return result.items()
Ejemplo n.º 12
0
    def slice_gmfs(self, block_id):
        """Load and collate GMF values for all sites in this block. """
        # TODO(JMC): Confirm this works regardless of the method of haz calc.
        histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        num_ses = histories * realizations

        block = job.Block.from_kvs(block_id)
        sites_list = block.sites
        gmfs = {}
        for site in sites_list:
            risk_point = self.region.grid.point_at(site)
            key = "%s!%s" % (risk_point.row, risk_point.column)
            gmfs[key] = []

        for i in range(0, histories):
            for j in range(0, realizations):
                key = kvs.generate_product_key(self.id,
                                               kvs.tokens.STOCHASTIC_SET_TOKEN,
                                               "%s!%s" % (i, j))
                fieldset = shapes.FieldSet.from_json(kvs.get(key),
                                                     self.region.grid)

                for field in fieldset:
                    for key in gmfs.keys():
                        (row, col) = key.split("!")
                        gmfs[key].append(field.get(int(row), int(col)))

        for key, gmf_slice in gmfs.items():
            (row, col) = key.split("!")
            key_gmf = kvs.tokens.gmfs_key(self.id, col, row)
            LOGGER.debug("GMF_SLICE for %s X %s : \n\t%s" %
                         (col, row, gmf_slice))
            timespan = float(self['INVESTIGATION_TIME'])
            gmf = {
                "IMLs": gmf_slice,
                "TSES": num_ses * timespan,
                "TimeSpan": timespan
            }
            kvs.set_value_json_encoded(key_gmf, gmf)
Ejemplo n.º 13
0
    def test_alloc_job_key(self):
        """
        Test the generation of job keys using
        :py:function:`openquake.kvs.tokens.alloc_job_key`.
        """

        job_key_1 = JOB_KEY_FMT % 1
        job_key_2 = JOB_KEY_FMT % 2

        kvs.get_client().delete(tokens.NEXT_JOB_ID)

        # it should be empty to start with
        self.assertTrue(kvs.get(tokens.NEXT_JOB_ID) is None)

        self.assertEqual(job_key_1, tokens.alloc_job_key())

        # verify that the IDs are incrementing properly
        self.assertEqual(job_key_2, tokens.alloc_job_key())

        # now verify that these keys have been added to the CURRENT_JOBS set
        self.assertTrue(self.client.sismember(tokens.CURRENT_JOBS, job_key_1))
        self.assertTrue(self.client.sismember(tokens.CURRENT_JOBS, job_key_2))
Ejemplo n.º 14
0
    def slice_gmfs(self, block_id):
        """Load and collate GMF values for all sites in this block. """
        # TODO(JMC): Confirm this works regardless of the method of haz calc.
        histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        num_ses = histories * realizations
        
        block = job.Block.from_kvs(block_id)
        sites_list = block.sites
        gmfs = {}
        for site in sites_list:
            risk_point = self.region.grid.point_at(site)
            key = "%s!%s" % (risk_point.row, risk_point.column)
            gmfs[key] = []
            
        for i in range(0, histories):
            for j in range(0, realizations):
                key = kvs.generate_product_key(
                        self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, "%s!%s" % 
                            (i, j))
                fieldset = shapes.FieldSet.from_json(kvs.get(key), 
                    self.region.grid)

                for field in fieldset:
                    for key in gmfs.keys():
                        (row, col) = key.split("!")
                        gmfs[key].append(field.get(int(row), int(col)))
                                        
        for key, gmf_slice in gmfs.items():
            (row, col) = key.split("!")
            key_gmf = kvs.generate_product_key(self.id,
                kvs.tokens.GMF_KEY_TOKEN, col, row)
            LOGGER.debug( "GMF_SLICE for %s X %s : \n\t%s" % (
                    col, row, gmf_slice ))
            timespan = float(self['INVESTIGATION_TIME'])
            gmf = {"IMLs": gmf_slice, "TSES": num_ses * timespan, 
                    "TimeSpan": timespan}
            kvs.set_value_json_encoded(key_gmf, gmf)
Ejemplo n.º 15
0
 def _has_computed_mean_curve_for_site(self, site):
     self.assertTrue(kvs.get(kvs.tokens.mean_hazard_curve_key(
             self.job_id, site)) != None)
Ejemplo n.º 16
0
    def test_quantile_hazard_maps_computation(self):
        self.params[self.poes_levels] = "0.10"
        self.params[self.quantiles_levels] = "0.25 0.50 0.75"

        curve_1 = {"site_lon": 3.0, "site_lat": 3.0,
                "curve": classical_psha._reconstruct_curve_list_from(
                [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01,
                8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01,
                2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02,
                7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06])}

        curve_2 = {"site_lon": 3.5, "site_lat": 3.5,
                "curve": classical_psha._reconstruct_curve_list_from(
                [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01,
                8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01,
                2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02,
                7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06])}

        # keys for shapes.Site(3.0, 3.0)
        key_1 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.25)

        key_2 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.50)

        key_3 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.75)

        # keys for shapes.Site(3.5, 3.5)
        key_4 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.25)

        key_5 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.50)

        key_6 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.75)

        # setting values in kvs
        kvs.set_value_json_encoded(key_1, curve_1)
        kvs.set_value_json_encoded(key_2, curve_1)
        kvs.set_value_json_encoded(key_3, curve_1)

        kvs.set_value_json_encoded(key_4, curve_2)
        kvs.set_value_json_encoded(key_5, curve_2)
        kvs.set_value_json_encoded(key_6, curve_2)

        classical_psha.compute_quantile_hazard_maps(self.engine)

        # asserting imls have been produced for all poes and quantiles
        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.25)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.50)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.75)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.25)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.50)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.75)))
Ejemplo n.º 17
0
 def _has_computed_IML_for_site(self, site, poe):
     self.assertTrue(kvs.get(kvs.tokens.mean_hazard_map_key(
         self.job_id, site, poe)))
Ejemplo n.º 18
0
    def test_quantile_hazard_maps_computation(self):
        self.params[self.poes_levels] = "0.10"
        self.params[self.quantiles_levels] = "0.25 0.50 0.75"

        curve_1 = {"site_lon": 3.0, "site_lat": 3.0,
                "curve": classical_psha._reconstruct_curve_list_from(
                [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01,
                8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01,
                2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02,
                7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06])}

        curve_2 = {"site_lon": 3.5, "site_lat": 3.5,
                "curve": classical_psha._reconstruct_curve_list_from(
                [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01,
                8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01,
                2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02,
                7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06])}

        # keys for shapes.Site(3.0, 3.0)
        key_1 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.25)

        key_2 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.50)

        key_3 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.75)

        # keys for shapes.Site(3.5, 3.5)
        key_4 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.25)

        key_5 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.50)

        key_6 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.75)

        # setting values in kvs
        kvs.set_value_json_encoded(key_1, curve_1)
        kvs.set_value_json_encoded(key_2, curve_1)
        kvs.set_value_json_encoded(key_3, curve_1)

        kvs.set_value_json_encoded(key_4, curve_2)
        kvs.set_value_json_encoded(key_5, curve_2)
        kvs.set_value_json_encoded(key_6, curve_2)

        classical_psha.compute_quantile_hazard_maps(self.engine)

        # asserting imls have been produced for all poes and quantiles
        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.25)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.50)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.75)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.25)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.50)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.75)))
Ejemplo n.º 19
0
 def _has_computed_mean_curve_for_site(self, site):
     self.assertTrue(kvs.get(kvs.tokens.mean_hazard_curve_key(
             self.job_id, site)) != None)
Ejemplo n.º 20
0
    def test_quantile_hazard_maps_computation(self):
        self.params[classical_psha.POES_PARAM_NAME] = "0.10"
        self.params[classical_psha.QUANTILE_PARAM_NAME] = "0.25 0.50 0.75"

        curve_1 = [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01,
                8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01,
                2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02,
                7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06]

        curve_2 = [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01,
                8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01,
                2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02,
                7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06]

        sites = [shapes.Site(3.0, 3.0), shapes.Site(3.5, 3.5)]

        # keys for sites[0]
        key_1 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, sites[0], 0.25)

        key_2 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, sites[0], 0.50)

        key_3 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, sites[0], 0.75)

        # keys for sites[1]
        key_4 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, sites[1], 0.25)

        key_5 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, sites[1], 0.50)

        key_6 = kvs.tokens.quantile_hazard_curve_key(
                self.job_id, sites[1], 0.75)

        # setting values in kvs
        kvs.set_value_json_encoded(key_1, curve_1)
        kvs.set_value_json_encoded(key_2, curve_1)
        kvs.set_value_json_encoded(key_3, curve_1)

        kvs.set_value_json_encoded(key_4, curve_2)
        kvs.set_value_json_encoded(key_5, curve_2)
        kvs.set_value_json_encoded(key_6, curve_2)

        classical_psha.compute_quantile_hazard_maps(self.job.job_id, sites,
            [0.25, 0.50, 0.75], self.imls, [0.10])

        # asserting imls have been produced for all poes and quantiles
        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, sites[0], 0.10, 0.25)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, sites[0], 0.10, 0.50)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, sites[0], 0.10, 0.75)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, sites[1], 0.10, 0.25)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, sites[1], 0.10, 0.50)))

        self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key(
                self.job_id, sites[1], 0.10, 0.75)))