def from_kvs(job_id, epsilon_provider):
        """Return an aggregate curve using the GMFs and assets
        stored in the underlying kvs system."""

        vuln_model = vulnerability.load_vuln_model_from_kvs(job_id)
        aggregate_curve = AggregateLossCurve(vuln_model, epsilon_provider)

        gmfs_keys = kvs.get_keys("%s*%s*" % (
                job_id, kvs.tokens.GMF_KEY_TOKEN))

        LOG.debug("Found %s stored GMFs..." % len(gmfs_keys))
        asset_counter = 0

        for gmfs_key in gmfs_keys:
            assets = _assets_keys_for_gmfs(job_id, gmfs_key)

            for asset in assets:
                asset_counter += 1
                gmfs = kvs.get_value_json_decoded(gmfs_key)

                aggregate_curve.append(gmfs,
                        json.JSONDecoder().decode(asset))

        LOG.debug("Found %s stored assets..." % asset_counter)
        return aggregate_curve
Exemple #2
0
 def from_kvs(job_id):
     """Return the job in the underlying kvs system with the given id."""
     params = kvs.get_value_json_decoded(kvs.tokens.generate_job_key(job_id))
     job_profile = profile4job(job_id)
     job = OqJob.objects.get(id=job_id)
     job = JobContext(params, job_id, oq_job_profile=job_profile, oq_job=job, log_level=params["debug"])
     return job
Exemple #3
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = job.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            curve_token = kvs.tokens.mean_hazard_curve_key(self.job_id,
                                point.site)

            decoded_curve = kvs.get_value_json_decoded(curve_token)

            hazard_curve = Curve([(exp(float(el['x'])), el['y'])
                            for el in decoded_curve['curve']])

            asset_key = kvs.tokens.asset_key(self.id,
                            point.row, point.column)
            assets = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in assets]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) exposure portfolio (=assets)
         3) vulnerability

        """

        block = job.Block.from_kvs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        for point in block.grid(self.region):
            curve_token = kvs.tokens.mean_hazard_curve_key(
                self.job_id, point.site)

            decoded_curve = kvs.get_value_json_decoded(curve_token)

            hazard_curve = Curve([(exp(float(el['x'])), el['y'])
                                  for el in decoded_curve['curve']])

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            assets = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in assets]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point, asset, hazard_curve)

                self.compute_loss_curve(point, loss_ratio_curve, asset)

        return True
Exemple #5
0
 def from_kvs(job_id):
     """Return the job in the underlying kvs system with the given id."""
     params = kvs.get_value_json_decoded(
         kvs.tokens.generate_job_key(job_id))
     calculation = OqCalculation.objects.get(id=job_id)
     job_profile = calculation.oq_job_profile
     job = CalculationProxy(params, job_id, oq_job_profile=job_profile,
                            oq_calculation=calculation)
     return job
def _asset_for_gmfs(job_id, gmfs_key):
    """Return the asset related to the GMFs given."""

    row = lambda key: key.split(kvs.MEMCACHE_KEY_SEPARATOR)[2]
    column = lambda key: key.split(kvs.MEMCACHE_KEY_SEPARATOR)[3]

    key = kvs.tokens.asset_key(
            job_id, row(gmfs_key), column(gmfs_key))
    
    return kvs.get_value_json_decoded(key)
Exemple #7
0
    def from_kvs(job_id):
        """Return the job in the underlying kvs system with the given id."""

        logs.init_logs(
            level=FLAGS.debug, log_type=oq_config.get("logging", "backend"))

        params = kvs.get_value_json_decoded(
            kvs.tokens.generate_job_key(job_id))
        job = Job(params, job_id)
        return job
Exemple #8
0
    def serialize_hazard_map_at_poe(self, sites, poe, key_template,
                                    hm_attrib_update, nrml_file):
        """
        Serialize the hazard map for a set of sites at a given PoE.

        Depending on the parameters the serialized map will be a mean or
        quantile hazard map.

        :param sites: the sites of which the map will be serialized
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param poe: the PoE at which the map will be serialized
        :type poe: :py:class:`float`
        :param key_template: a template for constructing the key used to get,
                             for each site, its map from the KVS
        :type key_template: :py:class:`string`
        :param hc_attrib_update: a dictionary containing metadata for the set
                                 of maps that will be serialized
        :type hc_attrib_update: :py:class:`dict`
        :param nrml_file: the output filename
        :type nrml_file: :py:class:`string`
        """
        nrml_path = self.job_ctxt.build_nrml_path(nrml_file)

        LOG.info("Generating NRML hazard map file for PoE %s, "
                 "%s nodes in hazard map: %s" % (poe, len(sites), nrml_file))

        map_writer = hazard_output.create_hazardmap_writer(
            self.job_ctxt.job_id, self.job_ctxt.serialize_results_to,
            nrml_path)
        hm_data = []

        for site in sites:
            key = key_template % hash(site)
            # use hazard map IML values from KVS
            hm_attrib = {
                'investigationTimeSpan': self.job_ctxt['INVESTIGATION_TIME'],
                'IMT': self.job_ctxt['INTENSITY_MEASURE_TYPE'],
                'vs30': self.job_ctxt['REFERENCE_VS30_VALUE'],
                'IML': kvs.get_value_json_decoded(key),
                'poE': poe
            }

            hm_attrib.update(hm_attrib_update)
            hm_data.append((site, hm_attrib))

        LOG.debug(">> path: %s" % nrml_path)
        # XML serialization context
        xsc = namedtuple("XSC", "blocks, cblock, i_total, i_done, i_next")(
            stats.pk_get(self.job_ctxt.job_id, "blocks"),
            stats.pk_get(self.job_ctxt.job_id, "cblock"), len(sites), 0,
            len(hm_data))
        hazard_output.SerializerContext().update(xsc)
        map_writer.serialize(hm_data)

        return nrml_path
Exemple #9
0
    def serialize_hazard_map_at_poe(self, sites, poe, key_template,
                                    hm_attrib_update, nrml_file):
        """
        Serialize the hazard map for a set of sites at a given PoE.

        Depending on the parameters the serialized map will be a mean or
        quantile hazard map.

        :param sites: the sites of which the map will be serialized
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param poe: the PoE at which the map will be serialized
        :type poe: :py:class:`float`
        :param key_template: a template for constructing the key used to get,
                             for each site, its map from the KVS
        :type key_template: :py:class:`string`
        :param hc_attrib_update: a dictionary containing metadata for the set
                                 of maps that will be serialized
        :type hc_attrib_update: :py:class:`dict`
        :param nrml_file: the output filename
        :type nrml_file: :py:class:`string`
        """
        nrml_path = self.job_ctxt.build_nrml_path(nrml_file)

        LOG.info("Generating NRML hazard map file for PoE %s, "
                 "%s nodes in hazard map: %s" % (poe, len(sites), nrml_file))

        map_writer = hazard_output.create_hazardmap_writer(
            self.job_ctxt.job_id, self.job_ctxt.serialize_results_to,
            nrml_path)
        hm_data = []

        for site in sites:
            key = key_template % hash(site)
            # use hazard map IML values from KVS
            hm_attrib = {
                'investigationTimeSpan':
                    self.job_ctxt['INVESTIGATION_TIME'],
                'IMT': self.job_ctxt['INTENSITY_MEASURE_TYPE'],
                'vs30': self.job_ctxt['REFERENCE_VS30_VALUE'],
                'IML': kvs.get_value_json_decoded(key),
                'poE': poe}

            hm_attrib.update(hm_attrib_update)
            hm_data.append((site, hm_attrib))

        LOG.debug(">> path: %s" % nrml_path)
        # XML serialization context
        xsc = namedtuple("XSC", "blocks, cblock, i_total, i_done, i_next")(
                         stats.pk_get(self.job_ctxt.job_id, "blocks"),
                         stats.pk_get(self.job_ctxt.job_id, "cblock"),
                         len(sites), 0, len(hm_data))
        hazard_output.SerializerContext().update(xsc)
        map_writer.serialize(hm_data)

        return nrml_path
Exemple #10
0
    def from_kvs(cls, block_id):
        """Return the block in the underlying KVS system with the given id."""

        raw_sites = kvs.get_value_json_decoded(block_id)

        sites = []

        for raw_site in raw_sites:
            sites.append(shapes.Site(raw_site[0], raw_site[1]))

        return Block(sites, block_id)
Exemple #11
0
    def from_kvs(cls, block_id):
        """Return the block in the underlying kvs system with the given id."""

        raw_sites = kvs.get_value_json_decoded(block_id)

        sites = []

        for raw_site in raw_sites:
            sites.append(shapes.Site(raw_site[0], raw_site[1]))

        return Block(sites, block_id)
Exemple #12
0
 def from_kvs(job_id):
     """Return the job in the underlying kvs system with the given id."""
     params = kvs.get_value_json_decoded(
         kvs.tokens.generate_job_key(job_id))
     job_profile = profile4job(job_id)
     job = OqJob.objects.get(id=job_id)
     job = JobContext(params,
                      job_id,
                      oq_job_profile=job_profile,
                      oq_job=job,
                      log_level=params['debug'])
     return job
Exemple #13
0
def load_vuln_curves_from_kvs(job_id):
    """ Get JSON decoded vulnerability curve from kvs """
    vulnerability_curves_mc = kvs.get_value_json_decoded(
        kvs.tokens.vuln_key(job_id))

    vulnerability_curves = {}

    if vulnerability_curves_mc is not None:
        for k, v in vulnerability_curves_mc.items():
            vulnerability_curves["%s" % k] = shapes.Curve.from_json(v)

    return vulnerability_curves
Exemple #14
0
def load_vuln_model_from_kvs(job_id):
    """Load the vulnerability model from kvs for the given job."""

    vulnerability_model = kvs.get_value_json_decoded(kvs.tokens.vuln_key(job_id))

    vulnerability_curves = {}

    if vulnerability_model is not None:
        for k, v in vulnerability_model.items():
            vulnerability_curves[k] = shapes.VulnerabilityFunction.from_json(v)

    return vulnerability_curves
Exemple #15
0
    def test_an_empty_hazard_curve_produces_an_empty_quantile_curve(self):
        hazard_curve = []
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve)

        self._run([shapes.Site(2.0, 5.0)], 1, [0.75])

        result = kvs.get_value_json_decoded(
            kvs.tokens.quantile_hazard_curve_key(self.job_id,
                                                 shapes.Site(2.0, 5.0), 0.75))

        # no values
        self.assertTrue(numpy.allclose([], numpy.array(result)))
Exemple #16
0
    def test_an_empty_hazard_curve_produces_an_empty_quantile_curve(self):
        hazard_curve = []
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve)

        self._run([shapes.Site(2.0, 5.0)], 1, [0.75])

        result = kvs.get_value_json_decoded(
                kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(2.0, 5.0), 0.75))

        # no values
        self.assertTrue(numpy.allclose([], numpy.array(result)))
Exemple #17
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.slice_gmfs(block_id)

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)

            key = kvs.tokens.gmf_set_key(
                self.job_ctxt.job_id, point.column, point.row)

            gmf = kvs.get_value_json_decoded(key)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf, loss_ratios)

                aggregate_curve.append(loss_ratios * asset.value)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.job_ctxt.params):

                        general.compute_conditional_loss(
                                self.job_ctxt.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

        return aggregate_curve.losses
    def execute(self):
        """Main hazard processing block.
        
        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        results = []
        
        source_model_generator = random.Random()
        source_model_generator.seed(
                self.params.get('SOURCE_MODEL_LT_RANDOM_SEED', None))
        
        gmpe_generator = random.Random()
        gmpe_generator.seed(self.params.get('GMPE_LT_RANDOM_SEED', None))
        
        gmf_generator = random.Random()
        gmf_generator.seed(self.params.get('GMF_RANDOM_SEED', None))
        
        histories = int(self.params['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        LOG.info("Going to run hazard for %s histories of %s realizations each."
                % (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                for site_list in self.site_list_generator():
                    stochastic_set_id = "%s!%s" % (i, j)
                    # pylint: disable=E1101
                    pending_tasks.append(
                        tasks.compute_ground_motion_fields.delay(
                            self.id,
                            site_list,
                            stochastic_set_id, gmf_generator.getrandbits(32)))
        
            for task in pending_tasks:
                task.wait()
                if task.status != 'SUCCESS': 
                    raise Exception(task.result)
                    
            # if self.params['OUTPUT_GMF_FILES']
            for j in range(0, realizations):
                stochastic_set_id = "%s!%s" % (i, j)
                stochastic_set_key = kvs.generate_product_key(
                    self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, 
                    stochastic_set_id)
                print "Writing output for ses %s" % stochastic_set_key
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    results.extend(self.write_gmf_files(ses))
        return results
Exemple #19
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.slice_gmfs(block_id)

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.job_ctxt.job_id)

        block = general.Block.from_kvs(self.job_ctxt.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for site in block.sites:
            point = self.job_ctxt.region.grid.point_at(site)

            key = kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column,
                                         point.row)

            gmf = kvs.get_value_json_decoded(key)
            assets = general.BaseRiskCalculator.assets_at(
                self.job_ctxt.job_id, site)

            for asset in assets:

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf, loss_ratios)

                aggregate_curve.append(loss_ratios * asset.value)

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                            self.job_ctxt.params):

                        general.compute_conditional_loss(
                            self.job_ctxt.job_id, point.column, point.row,
                            loss_curve, asset, loss_poe)

        return aggregate_curve.losses
Exemple #20
0
    def from_kvs(calculation_id, block_id):
        """Return the block in the underlying KVS system with the given id."""

        block_key = kvs.tokens.risk_block_key(calculation_id, block_id)

        raw_sites = kvs.get_value_json_decoded(block_key)

        sites = []

        for raw_site in raw_sites:
            sites.append(shapes.Site(raw_site[0], raw_site[1]))

        return Block(calculation_id, block_id, sites)
Exemple #21
0
def load_vuln_model_from_kvs(job_id, retrofitted=False):
    """Load the vulnerability model from kvs for the given job."""

    vulnerability_model = kvs.get_value_json_decoded(
            kvs.tokens.vuln_key(job_id, retrofitted))

    vulnerability_curves = {}

    if vulnerability_model is not None:
        for k, v in vulnerability_model.items():
            vulnerability_curves[k] = shapes.VulnerabilityFunction.from_json(v)

    return vulnerability_curves
Exemple #22
0
    def from_kvs(job_id, block_id):
        """Return the block in the underlying KVS system with the given id."""

        block_key = kvs.tokens.risk_block_key(job_id, block_id)

        raw_sites = kvs.get_value_json_decoded(block_key)

        sites = []

        for raw_site in raw_sites:
            sites.append(shapes.Site(raw_site[0], raw_site[1]))

        return Block(job_id, block_id, sites)
    def test_compute_bcr_in_the_classical_psha_calculator(self):
        self._compute_risk_classical_psha_setup()
        helpers.delete_profile(self.job)
        bcr_config = helpers.demo_file('benefit_cost_ratio/config.gem')
        job_profile, params, sections = engine.import_job_profile(
            bcr_config, self.job)

        # We need to adjust a few of the parameters for this test:
        job_profile.imls = [
            0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527,
            0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778]
        params['ASSET_LIFE_EXPECTANCY'] = '50'
        job_profile.asset_life_expectancy = 50
        params['REGION_VERTEX'] = '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0'
        job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords(
            params['REGION_VERTEX']))
        job_profile.save()

        job_ctxt = engine.JobContext(
            params, self.job_id, sections=sections, oq_job_profile=job_profile)

        calculator = classical_core.ClassicalRiskCalculator(job_ctxt)

        [input] = models.inputs4job(self.job.id, input_type="exposure")
        emdl = input.model()
        if not emdl:
            emdl = models.ExposureModel(
                owner=self.job.owner, input=input,
                description="c-psha test exposure model",
                category="c-psha power plants", stco_unit="watt",
                stco_type="aggregated", reco_unit="joule",
                reco_type="aggregated")
            emdl.save()

        assets = emdl.exposuredata_set.filter(asset_ref="rubcr")
        if not assets:
            asset = models.ExposureData(exposure_model=emdl, taxonomy="ID",
                                        asset_ref="rubcr", stco=1, reco=123.45,
                                        site=GEOSGeometry("POINT(1.0 1.0)"))
            asset.save()

        Block.from_kvs(self.job_id, self.block_id)
        calculator.compute_risk(self.block_id)

        result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id)
        res = kvs.get_value_json_decoded(result_key)
        expected_result = {'bcr': 0.0, 'eal_original': 0.003032,
                           'eal_retrofitted': 0.003032}

        helpers.assertDeepAlmostEqual(
            self, res, [[[1, 1], [[expected_result, "rubcr"]]]])
Exemple #24
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """
        self.slice_gmfs(block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        points = list(
            general.Block.from_kvs(self.job_ctxt.job_id,
                                   block_id).grid(self.job_ctxt.region))
        gmf_slices = dict(
            (point.site,
             kvs.get_value_json_decoded(
                 kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column,
                                        point.row))) for point in points)
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        def get_loss_curve(point, vuln_function, asset):
            "Compute loss curve basing on GMF data"
            gmf_slice = gmf_slices[point.site]
            loss_ratios = general.compute_loss_ratios(vuln_function, gmf_slice,
                                                      epsilon_provider, asset)
            loss_ratio_curve = general.compute_loss_ratio_curve(
                vuln_function,
                gmf_slice,
                epsilon_provider,
                asset,
                self.job_ctxt.oq_job_profile.loss_histogram_bins,
                loss_ratios=loss_ratios)

            aggregate_curve.append(loss_ratios * asset.value)

            return loss_ratio_curve.rescale_abscissae(asset.value)

        result = general.compute_bcr_for_block(
            self.job_ctxt.job_id, points, get_loss_curve,
            float(self.job_ctxt.params['INTEREST_RATE']),
            float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY']))

        bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id,
                                                 block_id)
        kvs.set_value_json_encoded(bcr_block_key, result)
        LOGGER.debug('bcr result for block %s: %r', block_id, result)

        return aggregate_curve.losses
Exemple #25
0
    def _compute_loss(self, block_id):
        """Compute risk for a block of sites, that means:

        * loss ratio curves
        * loss curves
        * conditional losses
        * (partial) aggregate loss curve
        """

        self.slice_gmfs(block_id)

        self.vuln_curves = vulnerability.load_vuln_model_from_kvs(
            self.calc_proxy.job_id)

        block = general.Block.from_kvs(self.calc_proxy.job_id, block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        for point in block.grid(self.calc_proxy.region):
            key = kvs.tokens.gmf_set_key(self.calc_proxy.job_id, point.column,
                                         point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(
                self.calc_proxy.job_id, point.row, point.column)

            for asset in kvs.get_list_json_decoded(asset_key):
                LOGGER.debug("Processing asset %s" % (asset))

                # loss ratios, used both to produce the curve
                # and to aggregate the losses
                loss_ratios = self.compute_loss_ratios(asset, gmf_slice)

                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf_slice, loss_ratios)

                aggregate_curve.append(loss_ratios * asset["assetValue"])

                if loss_ratio_curve:
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in general.conditional_loss_poes(
                        self.calc_proxy.params):

                        general.compute_conditional_loss(
                                self.calc_proxy.job_id, point.column,
                                point.row, loss_curve, asset, loss_poe)

        return aggregate_curve.losses
Exemple #26
0
    def execute(self):
        """Main hazard processing block.

        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        results = []

        source_model_generator = random.Random()
        source_model_generator.seed(
            self.params.get('SOURCE_MODEL_LT_RANDOM_SEED', None))

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.params.get('GMPE_LT_RANDOM_SEED', None))

        gmf_generator = random.Random()
        gmf_generator.seed(self.params.get('GMF_RANDOM_SEED', None))

        histories = int(self.params['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        LOG.info(
            "Going to run hazard for %s histories of %s realizations each." %
            (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                stochastic_set_id = "%s!%s" % (i, j)
                pending_tasks.append(
                    tasks.compute_ground_motion_fields.delay(
                        self.id, self.sites_for_region(), stochastic_set_id,
                        gmf_generator.getrandbits(32)))

            for task in pending_tasks:
                task.wait()
                if task.status != 'SUCCESS':
                    raise Exception(task.result)

            for j in range(0, realizations):
                stochastic_set_id = "%s!%s" % (i, j)
                stochastic_set_key = kvs.generate_product_key(
                    self.id, kvs.tokens.STOCHASTIC_SET_TOKEN,
                    stochastic_set_id)
                print "Writing output for ses %s" % stochastic_set_key
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    results.extend(self.write_gmf_files(ses))
        return results
Exemple #27
0
    def execute(self):
        """Main hazard processing block.

        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        source_model_generator = random.Random()
        source_model_generator.seed(
            self.job_ctxt['SOURCE_MODEL_LT_RANDOM_SEED'])

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.job_ctxt['GMPE_LT_RANDOM_SEED'])

        gmf_generator = random.Random()
        gmf_generator.seed(self.job_ctxt['GMF_RANDOM_SEED'])

        histories = self.job_ctxt['NUMBER_OF_SEISMICITY_HISTORIES']
        realizations = self.job_ctxt['NUMBER_OF_LOGIC_TREE_SAMPLES']
        self.initialize_pr_data(num_calculations=histories * realizations)

        LOG.info(
            "Going to run hazard for %s histories of %s realizations each." %
            (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                pending_tasks.append(
                    compute_ground_motion_fields.delay(
                        self.job_ctxt.job_id,
                        self.job_ctxt.sites_to_compute(),
                        i,
                        realization=j,
                        seed=gmf_generator.getrandbits(32)))

            for each_task in pending_tasks:
                each_task.wait()
                if each_task.status != 'SUCCESS':
                    raise Exception(each_task.result)
                logs.log_percent_complete(self.job_ctxt.job_id, "hazard")

            for j in range(0, realizations):
                stochastic_set_key = kvs.tokens.stochastic_set_key(
                    self.job_ctxt.job_id, i, j)
                LOG.info("Writing output for ses %s" % stochastic_set_key)
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    self.serialize_gmf(ses)
Exemple #28
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) gmfs
         3) exposure portfolio (=assets)
         4) vulnerability

        TODO(fab): make conditional_loss_poe (set of probabilities of
        exceedance for which the loss computation is done)
        a list of floats, and read it from the job configuration.
        """

        conditional_loss_poes = [
            float(x)
            for x in self.params.get('CONDITIONAL_LOSS_POE', "0.01").split()
        ]
        self.slice_gmfs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # TODO(jmc): DONT assumes that hazard and risk grid are the same
        block = job.Block.from_kvs(block_id)

        for point in block.grid(self.region):
            key = kvs.generate_product_key(self.job_id,
                                           kvs.tokens.GMF_KEY_TOKEN,
                                           point.column, point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in asset_list]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                    point.column, point.row, asset, gmf_slice)
                if loss_ratio_curve is not None:

                    # compute loss curve
                    loss_curve = self.compute_loss_curve(
                        point.column, point.row, loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes:
                        self.compute_conditional_loss(point.column, point.row,
                                                      loss_curve, asset,
                                                      loss_poe)
        return True
Exemple #29
0
    def _compute_bcr(self, block_id):
        """
        Calculate and store in the kvs the benefit-cost ratio data for block.

        A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`.
        See :func:`openquake.risk.job.general.compute_bcr_for_block` for result
        data structure spec.
        """
        self.slice_gmfs(block_id)

        # aggregate the losses for this block
        aggregate_curve = general.AggregateLossCurve()

        points = list(general.Block.from_kvs(
            self.job_ctxt.job_id, block_id).grid(self.job_ctxt.region))
        gmf_slices = dict(
            (point.site, kvs.get_value_json_decoded(
                 kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column,
                                        point.row)
            ))
            for point in points
        )
        epsilon_provider = general.EpsilonProvider(self.job_ctxt.params)

        def get_loss_curve(point, vuln_function, asset):
            "Compute loss curve basing on GMF data"
            gmf_slice = gmf_slices[point.site]
            loss_ratios = general.compute_loss_ratios(
                vuln_function, gmf_slice, epsilon_provider, asset)
            loss_ratio_curve = general.compute_loss_ratio_curve(
                vuln_function, gmf_slice, epsilon_provider, asset,
                self.job_ctxt.oq_job_profile.loss_histogram_bins,
                loss_ratios=loss_ratios)

            aggregate_curve.append(loss_ratios * asset.value)

            return loss_ratio_curve.rescale_abscissae(asset.value)

        result = general.compute_bcr_for_block(self.job_ctxt.job_id, points,
            get_loss_curve, float(self.job_ctxt.params['INTEREST_RATE']),
            float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY'])
        )

        bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id,
                                                 block_id)
        kvs.set_value_json_encoded(bcr_block_key, result)
        LOGGER.debug('bcr result for block %s: %r', block_id, result)

        return aggregate_curve.losses
Exemple #30
0
    def asset_bcr_per_site(self):
        """
        Fetch and return Benefit-Cost Ratio results computed by workers.

        :return:
            List of two-item tuples: site object and lists of BCR values per
            asset in that site. See :func:`compute_bcr_for_block`.
        """
        data = []
        for block_id in self.job_ctxt.blocks_keys:
            key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id, block_id)
            block_data = kvs.get_value_json_decoded(key)
            data += [(shapes.Site(latitude=lat, longitude=lon), payload)
                     for ((lon, lat), payload) in block_data]
        return data
Exemple #31
0
    def asset_bcr_per_site(self):
        """
        Fetch and return Benefit-Cost Ratio results computed by workers.

        :return:
            List of two-item tuples: site object and lists of BCR values per
            asset in that site. See :func:`compute_bcr_for_block`.
        """
        data = []
        for block_id in self.calc_proxy.blocks_keys:
            key = kvs.tokens.bcr_block_key(self.calc_proxy.job_id, block_id)
            block_data = kvs.get_value_json_decoded(key)
            data += [(shapes.Site(latitude=lat, longitude=lon), payload)
                     for ((lat, lon), payload) in block_data]
        return data
    def test_compute_bcr(self):
        cfg_path = helpers.demo_file(
            'probabilistic_event_based_risk/config.gem')
        helpers.delete_profile(self.job)
        job_profile, params, sections = engine.import_job_profile(
            cfg_path, self.job)
        job_profile.calc_mode = 'event_based_bcr'
        job_profile.interest_rate = 0.05
        job_profile.asset_life_expectancy = 50
        job_profile.region = GEOSGeometry(shapes.polygon_ewkt_from_coords(
            '0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0, 0.0'))
        job_profile.region_grid_spacing = 0.1
        job_profile.maximum_distance = 200.0
        job_profile.gmf_random_seed = None
        job_profile.save()

        params.update(dict(CALCULATION_MODE='Event Based BCR',
                           INTEREST_RATE='0.05',
                           ASSET_LIFE_EXPECTANCY='50',
                           MAXIMUM_DISTANCE='200.0',
                           REGION_VERTEX=('0.0, 0.0, 0.0, 2.0, '
                                          '2.0, 2.0, 2.0, 0.0'),
                           REGION_GRID_SPACING='0.1'))

        job_ctxt = engine.JobContext(
            params, self.job_id, sections=sections, oq_job_profile=job_profile)

        calculator = eb_core.EventBasedRiskCalculator(job_ctxt)

        self.block_id = 7
        SITE = shapes.Site(1.0, 1.0)
        block = Block(self.job_id, self.block_id, (SITE, ))
        block.to_kvs()

        location = GEOSGeometry(SITE.point.to_wkt())
        asset = models.ExposureData(exposure_model=self.emdl, taxonomy="ID",
                                    asset_ref=22.61, stco=1, reco=123.45,
                                    site=location)
        asset.save()

        calculator.compute_risk(self.block_id)

        result_key = kvs.tokens.bcr_block_key(self.job_id, self.block_id)
        result = kvs.get_value_json_decoded(result_key)
        expected_result = {'bcr': 0.0, 'eal_original': 0.0,
                           'eal_retrofitted': 0.0}
        helpers.assertDeepAlmostEqual(
            self, [[[1, 1], [[expected_result, "22.61"]]]], result)
Exemple #33
0
    def test_an_empty_hazard_curve_produces_an_empty_mean_curve(self):
        hazard_curve = {"site_lon": 2.0, "site_lat": 5.0, "curve": []}
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve)

        self._run([shapes.Site(2.0, 5.0)])

        result = kvs.get_value_json_decoded(
                kvs.tokens.mean_hazard_curve_key(
                self.job_id, shapes.Site(2.0, 5.0)))

        # site is correct
        self.assertEqual(2.0, result["site_lon"])
        self.assertEqual(5.0, result["site_lat"])

        # no values
        self.assertTrue(numpy.allclose([], numpy.array(result["curve"])))
Exemple #34
0
    def test_an_empty_hazard_curve_produces_an_empty_mean_curve(self):
        hazard_curve = {"site_lon": 2.0, "site_lat": 5.0, "curve": []}
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve)

        self._run([shapes.Site(2.0, 5.0)])

        result = kvs.get_value_json_decoded(
                kvs.tokens.mean_hazard_curve_key(
                self.job_id, shapes.Site(2.0, 5.0)))

        # site is correct
        self.assertEqual(2.0, result["site_lon"])
        self.assertEqual(5.0, result["site_lat"])

        # no values
        self.assertTrue(numpy.allclose([], numpy.array(result["curve"])))
Exemple #35
0
    def compute_loss_curve(self, gridpoint, loss_ratio_curve):
        """Return the loss curve based on loss ratio and exposure."""
        
        if loss_ratio_curve is None:
            return None

        kvs_key_exposure = kvs.generate_product_key(self.job_id,
            kvs.tokens.EXPOSURE_KEY_TOKEN, self.block_id, gridpoint)

        asset = kvs.get_value_json_decoded(kvs_key_exposure)

        if asset is None:
            return None

        return classical_psha_based.compute_loss_curve(
            loss_ratio_curve, asset['AssetValue'])
    def execute(self):
        """Main hazard processing block.

        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        source_model_generator = random.Random()
        source_model_generator.seed(
            self.job_ctxt['SOURCE_MODEL_LT_RANDOM_SEED'])

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.job_ctxt['GMPE_LT_RANDOM_SEED'])

        gmf_generator = random.Random()
        gmf_generator.seed(self.job_ctxt['GMF_RANDOM_SEED'])

        histories = self.job_ctxt['NUMBER_OF_SEISMICITY_HISTORIES']
        realizations = self.job_ctxt['NUMBER_OF_LOGIC_TREE_SAMPLES']
        self.initialize_pr_data(num_calculations=histories * realizations)

        LOG.info(
            "Going to run hazard for %s histories of %s realizations each."
            % (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                pending_tasks.append(
                    compute_ground_motion_fields.delay(
                        self.job_ctxt.job_id,
                        self.job_ctxt.sites_to_compute(),
                        i, realization=j, seed=gmf_generator.getrandbits(32)))

            for each_task in pending_tasks:
                each_task.wait()
                if each_task.status != 'SUCCESS':
                    raise Exception(each_task.result)
                logs.log_percent_complete(self.job_ctxt.job_id, "hazard")

            for j in range(0, realizations):
                stochastic_set_key = kvs.tokens.stochastic_set_key(
                    self.job_ctxt.job_id, i, j)
                LOG.info("Writing output for ses %s" % stochastic_set_key)
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    self.serialize_gmf(ses)
    def from_kvs(job_id):
        """Return an aggregate curve using the GMFs and assets
        stored in the underlying kvs system."""
        
        vuln_model = vulnerability.load_vuln_model_from_kvs(job_id)
        aggregate_curve = AggregateLossCurve(vuln_model)
        
        client = kvs.get_client(binary=False)
        gmfs_keys = client.keys("%s*%s*" % (job_id, kvs.tokens.GMF_KEY_TOKEN))
        LOG.debug("Found %s stored GMFs..." % len(gmfs_keys))

        for gmfs_key in gmfs_keys: # O(2*n)
            asset = _asset_for_gmfs(job_id, gmfs_key)
            gmfs = kvs.get_value_json_decoded(gmfs_key)
            aggregate_curve.append(gmfs, asset)
        
        return aggregate_curve
Exemple #38
0
    def compute_risk(self, block_id, **kwargs):  # pylint: disable=W0613
        """This task computes risk for a block of sites. It requires to have
        pre-initialized in kvs:
         1) list of sites
         2) gmfs
         3) exposure portfolio (=assets)
         4) vulnerability

        TODO(fab): make conditional_loss_poe (set of probabilities of
        exceedance for which the loss computation is done)
        a list of floats, and read it from the job configuration.
        """

        conditional_loss_poes = [float(x) for x in self.params.get(
                    'CONDITIONAL_LOSS_POE', "0.01").split()]
        self.slice_gmfs(block_id)

        #pylint: disable=W0201
        self.vuln_curves = \
                vulnerability.load_vuln_model_from_kvs(self.job_id)

        # TODO(jmc): DONT assumes that hazard and risk grid are the same
        block = job.Block.from_kvs(block_id)

        for point in block.grid(self.region):
            key = kvs.generate_product_key(self.job_id,
                kvs.tokens.GMF_KEY_TOKEN, point.column, point.row)
            gmf_slice = kvs.get_value_json_decoded(key)

            asset_key = kvs.tokens.asset_key(self.id, point.row, point.column)
            asset_list = kvs.get_client().lrange(asset_key, 0, -1)
            for asset in [json.JSONDecoder().decode(x) for x in asset_list]:
                LOGGER.debug("processing asset %s" % (asset))
                loss_ratio_curve = self.compute_loss_ratio_curve(
                        point.column, point.row, asset, gmf_slice)
                if loss_ratio_curve is not None:

                    # compute loss curve
                    loss_curve = self.compute_loss_curve(
                            point.column, point.row,
                            loss_ratio_curve, asset)

                    for loss_poe in conditional_loss_poes:
                        self.compute_conditional_loss(point.column, point.row,
                                loss_curve, asset, loss_poe)
        return True
Exemple #39
0
    def serialize_hazard_map_at_poe(self, sites, poe, key_template, hm_attrib_update, nrml_file):
        """
        Serialize the hazard map for a set of sites at a given PoE.

        Depending on the parameters the serialized map will be a mean or
        quantile hazard map.

        :param sites: the sites of which the map will be serialized
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param poe: the PoE at which the map will be serialized
        :type poe: :py:class:`float`
        :param key_template: a template for constructing the key used to get,
                             for each site, its map from the KVS
        :type key_template: :py:class:`string`
        :param hc_attrib_update: a dictionary containing metadata for the set
                                 of maps that will be serialized
        :type hc_attrib_update: :py:class:`dict`
        :param nrml_file: the output filename
        :type nrml_file: :py:class:`string`
        """
        nrml_path = self.build_nrml_path(nrml_file)

        LOG.info(
            "Generating NRML hazard map file for PoE %s, " "%s nodes in hazard map: %s" % (poe, len(sites), nrml_file)
        )

        map_writer = hazard_output.create_hazardmap_writer(self.job_id, self.serialize_results_to, nrml_path)
        hm_data = []

        for site in sites:
            key = key_template % hash(site)
            # use hazard map IML values from KVS
            hm_attrib = {
                "investigationTimeSpan": self["INVESTIGATION_TIME"],
                "IMT": self["INTENSITY_MEASURE_TYPE"],
                "vs30": self["REFERENCE_VS30_VALUE"],
                "IML": kvs.get_value_json_decoded(key),
                "poE": poe,
            }

            hm_attrib.update(hm_attrib_update)
            hm_data.append((site, hm_attrib))

        map_writer.serialize(hm_data)

        return nrml_path
Exemple #40
0
    def execute(self):
        """Main hazard processing block.

        Loops through various random realizations, spawning tasks to compute
        GMFs."""
        source_model_generator = random.Random()
        source_model_generator.seed(
                self.params.get('SOURCE_MODEL_LT_RANDOM_SEED', None))

        gmpe_generator = random.Random()
        gmpe_generator.seed(self.params.get('GMPE_LT_RANDOM_SEED', None))

        gmf_generator = random.Random()
        gmf_generator.seed(self.params.get('GMF_RANDOM_SEED', None))

        histories = int(self.params['NUMBER_OF_SEISMICITY_HISTORIES'])
        realizations = int(self.params['NUMBER_OF_LOGIC_TREE_SAMPLES'])
        LOG.info(
            "Going to run hazard for %s histories of %s realizations each."
            % (histories, realizations))

        for i in range(0, histories):
            pending_tasks = []
            for j in range(0, realizations):
                self.store_source_model(source_model_generator.getrandbits(32))
                self.store_gmpe_map(gmpe_generator.getrandbits(32))
                pending_tasks.append(
                    tasks.compute_ground_motion_fields.delay(
                        self.job_id, self.sites_for_region(),
                        i, j, gmf_generator.getrandbits(32)))

            for task in pending_tasks:
                task.wait()
                if task.status != 'SUCCESS':
                    raise Exception(task.result)

            for j in range(0, realizations):
                stochastic_set_key = kvs.tokens.stochastic_set_key(self.job_id,
                                                                   i, j)
                print "Writing output for ses %s" % stochastic_set_key
                ses = kvs.get_value_json_decoded(stochastic_set_key)
                if ses:
                    self.serialize_gmf(ses)
Exemple #41
0
def compute_mean_hazard_maps(job_id, sites, imls, poes):
    """Compute mean hazard maps using as input all the
    pre computed mean hazard curves.
    """

    LOG.debug("[MEAN_HAZARD_MAPS] List of POEs is %s" % poes)

    keys = []
    for site in sites:
        mean_poes = kvs.get_value_json_decoded(kvs.tokens.mean_hazard_curve_key(job_id, site))
        interpolate = build_interpolator(mean_poes, imls, site)

        for poe in poes:
            key = kvs.tokens.mean_hazard_map_key(job_id, site, poe)
            keys.append(key)

            kvs.set_value_json_encoded(key, interpolate(poe))

    return keys
Exemple #42
0
    def compute_loss_ratio_curve(self, gridpoint):
        """ Returns the loss ratio curve for a single gridpoint"""

        # check in kvs if hazard and exposure for gridpoint are there
        kvs_key_hazard = kvs.generate_product_key(self.job_id, 
            kvs.tokens.HAZARD_CURVE_KEY_TOKEN, self.block_id, gridpoint)
       
        hazard_curve_json = kvs.get_client(binary=False).get(kvs_key_hazard)
        LOGGER.debug("hazard curve as JSON: %s" % hazard_curve_json)
 
        hazard_curve = shapes.EMPTY_CURVE
        hazard_curve.from_json(hazard_curve_json)

        LOGGER.debug("hazard curve at key %s is %s" % (kvs_key_hazard, 
            hazard_curve))

        if hazard_curve is None:
            LOGGER.debug("no hazard curve found")
            return None

        kvs_key_exposure = kvs.generate_product_key(self.job_id, 
            kvs.tokens.EXPOSURE_KEY_TOKEN, self.block_id, gridpoint)
        
        asset = kvs.get_value_json_decoded(kvs_key_exposure)

        LOGGER.debug("asset at key %s is %s" % (kvs_key_exposure, asset))

        if asset is None:
            LOGGER.debug("no asset found")
            return None

        LOGGER.debug("compute method: vuln curves are")
        for k, v in self.vulnerability_curves.items(): #pylint: disable=E1101
            LOGGER.debug("%s: %s" % (k, v.values))

        #pylint: disable=E1101
        vulnerability_curve = \
            self.vulnerability_curves[asset['VulnerabilityFunction']]

        # selected vuln function is Curve
        return classical_psha_based.compute_loss_ratio_curve(
            vulnerability_curve, hazard_curve)
Exemple #43
0
    def serialize_hazard_curve(self, nrml_file, key_template, hc_attrib_update,
                               sites):
        """
        Serialize the hazard curves of a set of sites.

        Depending on the parameters the serialized curve will be a plain, mean
        or quantile hazard curve.

        :param nrml_file: the output filename
        :type nrml_file: :py:class:`string`
        :param key_template: a template for constructing the key to get, for
                             each site, its curve from the KVS
        :type key_template: :py:class:`string`
        :param hc_attrib_update: a dictionary containing metadata for the set
                                 of curves that will be serialized
        :type hc_attrib_update: :py:class:`dict`
        :param sites: the sites of which the curve will be serialized
        :type sites: list of :py:class:`openquake.shapes.Site`
        """
        nrml_path = self.build_nrml_path(nrml_file)

        curve_writer = hazard_output.create_hazardcurve_writer(
            self.job_id, self.serialize_results_to, nrml_path)
        hc_data = []

        for site in sites:
            # Use hazard curve ordinate values (PoE) from KVS and abscissae
            # from the IML list in config.
            hc_attrib = {
                'investigationTimeSpan': self['INVESTIGATION_TIME'],
                'IMLValues': self.imls,
                'IMT': self['INTENSITY_MEASURE_TYPE'],

                'PoEValues': kvs.get_value_json_decoded(key_template
                                                        % hash(site))}

            hc_attrib.update(hc_attrib_update)
            hc_data.append((site, hc_attrib))

        curve_writer.serialize(hc_data)

        return nrml_path
Exemple #44
0
def compute_mean_hazard_maps(job_id, sites, imls, poes):
    """Compute mean hazard maps using as input all the
    pre computed mean hazard curves.
    """

    LOG.debug("[MEAN_HAZARD_MAPS] List of POEs is %s" % poes)

    keys = []
    for site in sites:
        mean_poes = kvs.get_value_json_decoded(
            kvs.tokens.mean_hazard_curve_key(job_id, site))
        interpolate = build_interpolator(mean_poes, imls, site)

        for poe in poes:
            key = kvs.tokens.mean_hazard_map_key(job_id, site, poe)
            keys.append(key)

            kvs.set_value_json_encoded(key, interpolate(poe))

    return keys
Exemple #45
0
def compute_quantile_hazard_maps(job_id, sites, quantiles, imls, poes):
    """Compute quantile hazard maps using as input all the
    pre computed quantile hazard curves.
    """

    LOG.debug("[QUANTILE_HAZARD_MAPS] List of POEs is %s" % poes)
    LOG.debug("[QUANTILE_HAZARD_MAPS] List of quantiles is %s" % quantiles)

    keys = []
    for quantile in quantiles:
        for site in sites:
            quantile_poes = kvs.get_value_json_decoded(kvs.tokens.quantile_hazard_curve_key(job_id, site, quantile))

            interpolate = build_interpolator(quantile_poes, imls, site)

            for poe in poes:
                key = kvs.tokens.quantile_hazard_map_key(job_id, site, poe, quantile)
                keys.append(key)

                kvs.set_value_json_encoded(key, interpolate(poe))

    return keys
Exemple #46
0
def map2db(job_ctxt, sites, poes, quantile=None):
    """Write (mean|quantile) hazard map data to database.

    :param job_ctxt: the `JobContext` instance to use.
    :param sites: the sites for which the maps will be serialized
    :type sites: list of :py:class:`openquake.shapes.Site`
    :param poes: the PoEs at which the maps will be serialized
    :type poes: list of :py:class:`float`
    :param float quantile: the quantile at which the maps will be serialized
    """
    rtype = "mean" if quantile is None else "quantile"
    for poe in poes:
        datum = (poe, ) if quantile is None else (poe, quantile)
        key_template, path, hm_meta = hms_meta(job_ctxt, rtype, datum)

        LOG.info("Generating hazard map file for PoE %s, "
                 "%s nodes" % (poe, len(sites)))

        map_writer = hzrd_out.HazardMapDBWriter(path, job_ctxt.job_id)
        hm_data = []

        for site in sites:
            key = key_template % hash(site)
            # use hazard map IML values from KVS
            hm_attrib = {
                'investigationTimeSpan': job_ctxt['INVESTIGATION_TIME'],
                'IMT': job_ctxt['INTENSITY_MEASURE_TYPE'],
                'vs30': job_ctxt['REFERENCE_VS30_VALUE'],
                'IML': kvs.get_value_json_decoded(key),
                'poE': poe
            }

            hm_attrib.update(hm_meta)
            hm_data.append((site, hm_attrib))

        LOG.debug(">> path: %s" % path)
        map_writer.serialize(hm_data)
Exemple #47
0
    def bb_hazard_map_values_are_correctly_stored_in_kvs(self):
        """Hazard map values are correct and stored in kvs.

        This test verifies that the hazard map values we are going
        to store in .tiff and .xml formats are correct.
        """

        self.engine.launch()

        pattern = "%s*%s*%s*" % (kvs.tokens.MEAN_HAZARD_MAP_KEY_TOKEN,
                                 self.engine.id, POE)

        map_values = kvs.mget_decoded(pattern)

        self.assertEqual(len(self.expected_results), len(map_values))

        for expected_result in self.expected_results:
            key = kvs.tokens.mean_hazard_map_key(self.engine.id,
                                                 expected_result[0], POE)

            computed_value = float(kvs.get_value_json_decoded(key)["IML"])

            self.assertTrue(
                numpy.allclose(computed_value, expected_result[1], atol=0.001))
Exemple #48
0
def compute_quantile_hazard_maps(job_id, sites, quantiles, imls, poes):
    """Compute quantile hazard maps using as input all the
    pre computed quantile hazard curves.
    """

    LOG.debug("[QUANTILE_HAZARD_MAPS] List of POEs is %s" % poes)
    LOG.debug("[QUANTILE_HAZARD_MAPS] List of quantiles is %s" % quantiles)

    keys = []
    for quantile in quantiles:
        for site in sites:
            quantile_poes = kvs.get_value_json_decoded(
                kvs.tokens.quantile_hazard_curve_key(job_id, site, quantile))

            interpolate = build_interpolator(quantile_poes, imls, site)

            for poe in poes:
                key = kvs.tokens.quantile_hazard_map_key(
                        job_id, site, poe, quantile)
                keys.append(key)

                kvs.set_value_json_encoded(key, interpolate(poe))

    return keys
Exemple #49
0
    def write_hazardcurve_file(self, curve_keys):
        """Generate a NRML file with hazard curves for a collection of
        hazard curves from KVS, identified through their KVS keys.

        curve_keys is a list of KVS keys of the hazard curves to be
        serialized.

        The hazard curve file can be written
        (1) for a set of hazard curves belonging to the same realization
            (= endBranchLabel) and a set of sites.
        (2) for a mean hazard curve at a set of sites
        (3) for a quantile hazard curve at a set of sites

        Mixing of these three cases is not allowed, i.e., all hazard curves
        from the set of curve_keys have to be either for the same realization,
        mean, or quantile.
        """

        if _is_mean_hazard_curve_key(curve_keys[0]):
            hc_attrib_update = {'statistics': 'mean'}
            filename_part = 'mean'
            curve_mode = 'mean'

        elif _is_quantile_hazard_curve_key(curve_keys[0]):

            # get quantile value from KVS key
            quantile_value = tokens.quantile_value_from_hazard_curve_key(
                curve_keys[0])
            hc_attrib_update = {
                'statistics': 'quantile',
                'quantileValue': quantile_value
            }
            filename_part = "quantile-%.2f" % quantile_value
            curve_mode = 'quantile'

        elif _is_realization_hazard_curve_key(curve_keys[0]):
            realization_reference_str = \
                tokens.realization_value_from_hazard_curve_key(curve_keys[0])
            hc_attrib_update = {'endBranchLabel': realization_reference_str}
            filename_part = realization_reference_str
            curve_mode = 'realization'

        else:
            error_msg = "no valid hazard curve type found in KVS key"
            raise RuntimeError(error_msg)

        nrml_file = "%s-%s.xml" % (HAZARD_CURVE_FILENAME_PREFIX, filename_part)

        nrml_path = os.path.join(self['BASE_PATH'], self['OUTPUT_DIR'],
                                 nrml_file)
        iml_list = [
            float(param)
            for param in self.params['INTENSITY_MEASURE_LEVELS'].split(",")
        ]

        LOG.debug("Generating NRML hazard curve file for mode %s, "\
            "%s hazard curves: %s" % (curve_mode, len(curve_keys), nrml_file))
        LOG.debug("IML: %s" % iml_list)

        xmlwriter = hazard_output.HazardCurveXMLWriter(nrml_path)
        hc_data = []

        for hc_key in curve_keys:

            if curve_mode == 'mean' and not _is_mean_hazard_curve_key(hc_key):
                error_msg = "non-mean hazard curve key found in mean mode"
                raise RuntimeError(error_msg)

            elif curve_mode == 'quantile':
                if not _is_quantile_hazard_curve_key(hc_key):
                    error_msg = "non-quantile hazard curve key found in "\
                                "quantile mode"
                    raise RuntimeError(error_msg)

                elif tokens.quantile_value_from_hazard_curve_key(hc_key) != \
                    quantile_value:
                    error_msg = "quantile value must be the same for all "\
                                "hazard curves in an instance file"
                    raise ValueError(error_msg)

            elif curve_mode == 'realization':
                if not _is_realization_hazard_curve_key(hc_key):
                    error_msg = "non-realization hazard curve key found in "\
                                "realization mode"
                    raise RuntimeError(error_msg)
                elif tokens.realization_value_from_hazard_curve_key(
                        hc_key) != realization_reference_str:
                    error_msg = "realization value must be the same for all "\
                                "hazard curves in an instance file"
                    raise ValueError(error_msg)

            hc = kvs.get_value_json_decoded(hc_key)

            site_obj = shapes.Site(float(hc['site_lon']),
                                   float(hc['site_lat']))

            # use hazard curve ordinate values (PoE) from KVS
            # NOTE(fab): At the moment, the IMLs are stored along with the
            # PoEs in KVS. However, we are using the IML list from config.
            # The IMLs from KVS are ignored. Note that IMLs from KVS are
            # in logarithmic form, but the ones from config are not.
            # The way of storing the HC data in KVS is not very
            # efficient, we should store the abscissae and ordinates
            # separately as lists and not make pairs of them
            curve_poe = []
            for curve_pair in hc['curve']:
                curve_poe.append(float(curve_pair['y']))

            hc_attrib = {
                'investigationTimeSpan': self.params['INVESTIGATION_TIME'],
                'IMLValues': iml_list,
                'IMT': self.params['INTENSITY_MEASURE_TYPE'],
                'PoEValues': curve_poe
            }

            hc_attrib.update(hc_attrib_update)
            hc_data.append((site_obj, hc_attrib))

        xmlwriter.serialize(hc_data)
        return nrml_path
Exemple #50
0
    def serialize_hazard_curve(self, nrml_file, key_template, hc_attrib_update,
                               sites):
        """
        Serialize the hazard curves of a set of sites.

        Depending on the parameters the serialized curve will be a plain, mean
        or quantile hazard curve.

        :param nrml_file: the output filename
        :type nrml_file: :py:class:`string`
        :param key_template: a template for constructing the key to get, for
                             each site, its curve from the KVS
        :type key_template: :py:class:`string`
        :param hc_attrib_update: a dictionary containing metadata for the set
                                 of curves that will be serialized
        :type hc_attrib_update: :py:class:`dict`
        :param sites: the sites of which the curve will be serialized
        :type sites: list of :py:class:`openquake.shapes.Site`
        """
        def pause_generator(value):
            """
            Returns the initial value when called for the first time and
            the double value upon each subsequent invocation.

            N.B.: the maximum value returned will never exceed 90 (seconds).
            """
            yield value
            while True:
                if value < 45:
                    value *= 2
                yield value

        # XML serialization context
        xsc = namedtuple("XSC", "blocks, cblock, i_total, i_done, i_next")(
            stats.pk_get(self.job_ctxt.job_id, "blocks"),
            stats.pk_get(self.job_ctxt.job_id, "cblock"), len(sites), 0, 0)

        nrml_path = self.job_ctxt.build_nrml_path(nrml_file)

        curve_writer = hazard_output.create_hazardcurve_writer(
            self.job_ctxt.job_id, self.job_ctxt.serialize_results_to,
            nrml_path)

        sites = set(sites)
        accounted_for = set()
        min_pause = 0.1
        pgen = pause_generator(min_pause)
        pause = pgen.next()

        while accounted_for != sites:
            failures = stats.failure_counters(self.job_ctxt.job_id, "h")
            if failures:
                raise RuntimeError("hazard failures (%s), aborting" % failures)
            hc_data = []
            # Sleep a little before checking the availability of additional
            # hazard curve results.
            time.sleep(pause)
            results_found = 0
            for site in sites:
                if site in accounted_for:
                    continue
                value = kvs.get_value_json_decoded(key_template % hash(site))
                if value is None:
                    # No value yet, proceed to next site.
                    continue
                # Use hazard curve ordinate values (PoE) from KVS and abscissae
                # from the IML list in config.
                hc_attrib = {
                    'investigationTimeSpan':
                    self.job_ctxt['INVESTIGATION_TIME'],
                    'IMLValues': self.job_ctxt.imls,
                    'IMT': self.job_ctxt['INTENSITY_MEASURE_TYPE'],
                    'PoEValues': value
                }
                hc_attrib.update(hc_attrib_update)
                hc_data.append((site, hc_attrib))
                accounted_for.add(site)
                results_found += 1
            if not results_found:
                # No results found, increase the sleep pause.
                pause = pgen.next()
            else:
                hazard_output.SerializerContext().update(
                    xsc._replace(i_next=len(hc_data)))
                curve_writer.serialize(hc_data)
                xsc = xsc._replace(i_done=xsc.i_done + len(hc_data))
                pause *= 0.8
                pause = min_pause if pause < min_pause else pause

        return nrml_path
Exemple #51
0
    def from_kvs(job_id):
        """Return the job in the underlying kvs system with the given id."""

        params = kvs.get_value_json_decoded(kvs.generate_job_key(job_id))
        return Job(params, job_id)
Exemple #52
0
 def _get_iml_at(self, site, poe):
     return kvs.get_value_json_decoded(
         kvs.tokens.mean_hazard_map_key(self.job_id, site, poe))
Exemple #53
0
    def test_reads_and_stores_the_quantile_curve_in_kvs(self):
        hazard_curve_1 = [
            0.98161,
            0.97837,
            0.95579,
            0.92555,
            0.87052,
            0.78214,
            0.65708,
            0.50526,
            0.37044,
            0.3474,
            0.20502,
            0.10506,
            0.046531,
            0.017548,
            0.0054791,
            0.0013377,
            0.00022489,
            2.2345e-05,
            4.2696e-07,
        ]
        hazard_curve_2 = [
            0.97309,
            0.96857,
            0.93853,
            0.90089,
            0.83673,
            0.74057,
            0.61272,
            0.46467,
            0.33694,
            0.31536,
            0.1834,
            0.092412,
            0.040202,
            0.0149,
            0.0045924,
            0.0011126,
            0.00018647,
            1.8882e-05,
            4.7123e-07,
        ]
        hazard_curve_3 = [
            0.99178,
            0.98892,
            0.96903,
            0.9403,
            0.88405,
            0.78782,
            0.64627,
            0.47537,
            0.33168,
            0.30827,
            0.17279,
            0.08836,
            0.042766,
            0.019643,
            0.0081923,
            0.0029157,
            0.00079955,
            0.00015233,
            1.5582e-05,
        ]
        hazard_curve_4 = [
            0.98885,
            0.98505,
            0.95972,
            0.92494,
            0.8603,
            0.75574,
            0.61009,
            0.44217,
            0.30543,
            0.28345,
            0.1576,
            0.080225,
            0.038681,
            0.017637,
            0.0072685,
            0.0025474,
            0.00068347,
            0.00012596,
            1.2853e-05,
        ]
        hazard_curve_5 = [
            0.99178,
            0.98892,
            0.96903,
            0.9403,
            0.88405,
            0.78782,
            0.64627,
            0.47537,
            0.33168,
            0.30827,
            0.17279,
            0.08836,
            0.042766,
            0.019643,
            0.0081923,
            0.0029157,
            0.00079955,
            0.00015233,
            1.5582e-05,
        ]

        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_1, 0)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_2, 1)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_3, 2)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_4, 3)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_5, 4)

        self._run([shapes.Site(2.0, 5.0)], 5, [0.75])

        result = kvs.get_value_json_decoded(
            kvs.tokens.quantile_hazard_curve_key(self.job_id,
                                                 shapes.Site(2.0, 5.0), 0.75))

        # values are correct
        self.assertTrue(numpy.allclose(self.expected_curve, result,
                                       atol=0.005))
Exemple #54
0
    def test_reads_and_stores_the_quantile_curve_in_kvs(self):
        self.params[self.quantiles_levels] = "0.75"

        hazard_curve_1 = {"site_lon": 2.0, "site_lat": 5.0, "curve": [
                {"y": 9.8161000e-01, "x": 0}, {"y": 9.7837000e-01, "x": 0},
                {"y": 9.5579000e-01, "x": 0}, {"y": 9.2555000e-01, "x": 0},
                {"y": 8.7052000e-01, "x": 0}, {"y": 7.8214000e-01, "x": 0},
                {"y": 6.5708000e-01, "x": 0}, {"y": 5.0526000e-01, "x": 0},
                {"y": 3.7044000e-01, "x": 0}, {"y": 3.4740000e-01, "x": 0},
                {"y": 2.0502000e-01, "x": 0}, {"y": 1.0506000e-01, "x": 0},
                {"y": 4.6531000e-02, "x": 0}, {"y": 1.7548000e-02, "x": 0},
                {"y": 5.4791000e-03, "x": 0}, {"y": 1.3377000e-03, "x": 0},
                {"y": 2.2489000e-04, "x": 0}, {"y": 2.2345000e-05, "x": 0},
                {"y": 4.2696000e-07, "x": 0}]}

        hazard_curve_2 = {"site_lon": 2.0, "site_lat": 5.0, "curve": [
                {"y": 9.7309000e-01, "x": 0}, {"y": 9.6857000e-01, "x": 0},
                {"y": 9.3853000e-01, "x": 0}, {"y": 9.0089000e-01, "x": 0},
                {"y": 8.3673000e-01, "x": 0}, {"y": 7.4057000e-01, "x": 0},
                {"y": 6.1272000e-01, "x": 0}, {"y": 4.6467000e-01, "x": 0},
                {"y": 3.3694000e-01, "x": 0}, {"y": 3.1536000e-01, "x": 0},
                {"y": 1.8340000e-01, "x": 0}, {"y": 9.2412000e-02, "x": 0},
                {"y": 4.0202000e-02, "x": 0}, {"y": 1.4900000e-02, "x": 0},
                {"y": 4.5924000e-03, "x": 0}, {"y": 1.1126000e-03, "x": 0},
                {"y": 1.8647000e-04, "x": 0}, {"y": 1.8882000e-05, "x": 0},
                {"y": 4.7123000e-07, "x": 0}]}

        hazard_curve_3 = {"site_lon": 2.0, "site_lat": 5.0, "curve": [
                {"y": 9.9178000e-01, "x": 0}, {"y": 9.8892000e-01, "x": 0},
                {"y": 9.6903000e-01, "x": 0}, {"y": 9.4030000e-01, "x": 0},
                {"y": 8.8405000e-01, "x": 0}, {"y": 7.8782000e-01, "x": 0},
                {"y": 6.4627000e-01, "x": 0}, {"y": 4.7537000e-01, "x": 0},
                {"y": 3.3168000e-01, "x": 0}, {"y": 3.0827000e-01, "x": 0},
                {"y": 1.7279000e-01, "x": 0}, {"y": 8.8360000e-02, "x": 0},
                {"y": 4.2766000e-02, "x": 0}, {"y": 1.9643000e-02, "x": 0},
                {"y": 8.1923000e-03, "x": 0}, {"y": 2.9157000e-03, "x": 0},
                {"y": 7.9955000e-04, "x": 0}, {"y": 1.5233000e-04, "x": 0},
                {"y": 1.5582000e-05, "x": 0}]}

        hazard_curve_4 = {"site_lon": 2.0, "site_lat": 5.0, "curve": [
                {"y": 9.8885000e-01, "x": 0}, {"y": 9.8505000e-01, "x": 0},
                {"y": 9.5972000e-01, "x": 0}, {"y": 9.2494000e-01, "x": 0},
                {"y": 8.6030000e-01, "x": 0}, {"y": 7.5574000e-01, "x": 0},
                {"y": 6.1009000e-01, "x": 0}, {"y": 4.4217000e-01, "x": 0},
                {"y": 3.0543000e-01, "x": 0}, {"y": 2.8345000e-01, "x": 0},
                {"y": 1.5760000e-01, "x": 0}, {"y": 8.0225000e-02, "x": 0},
                {"y": 3.8681000e-02, "x": 0}, {"y": 1.7637000e-02, "x": 0},
                {"y": 7.2685000e-03, "x": 0}, {"y": 2.5474000e-03, "x": 0},
                {"y": 6.8347000e-04, "x": 0}, {"y": 1.2596000e-04, "x": 0},
                {"y": 1.2853000e-05, "x": 0}]}

        hazard_curve_5 = {"site_lon": 2.0, "site_lat": 5.0, "curve": [
                {"y": 9.9178000e-01, "x": 0}, {"y": 9.8892000e-01, "x": 0},
                {"y": 9.6903000e-01, "x": 0}, {"y": 9.4030000e-01, "x": 0},
                {"y": 8.8405000e-01, "x": 0}, {"y": 7.8782000e-01, "x": 0},
                {"y": 6.4627000e-01, "x": 0}, {"y": 4.7537000e-01, "x": 0},
                {"y": 3.3168000e-01, "x": 0}, {"y": 3.0827000e-01, "x": 0},
                {"y": 1.7279000e-01, "x": 0}, {"y": 8.8360000e-02, "x": 0},
                {"y": 4.2766000e-02, "x": 0}, {"y": 1.9643000e-02, "x": 0},
                {"y": 8.1923000e-03, "x": 0}, {"y": 2.9157000e-03, "x": 0},
                {"y": 7.9955000e-04, "x": 0}, {"y": 1.5233000e-04, "x": 0},
                {"y": 1.5582000e-05, "x": 0}]}

        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_1, 1)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_2, 2)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_3, 3)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_4, 4)
        self._store_hazard_curve_at(shapes.Site(2.0, 5.0), hazard_curve_5, 5)

        self._run([shapes.Site(2.0, 5.0)])

        result = kvs.get_value_json_decoded(
                kvs.tokens.quantile_hazard_curve_key(
                self.job_id, shapes.Site(2.0, 5.0), 0.75))

        # site is correct
        self.assertEqual(2.0, result["site_lon"])
        self.assertEqual(5.0, result["site_lat"])

        # values are correct
        self.assertTrue(numpy.allclose(self.expected_curve,
                classical_psha._extract_y_values_from(result["curve"]),
                atol=0.005))
Exemple #55
0
def write_out_ses(job_file, stochastic_set_key):
    """ Write out Stochastic Event Set """
    hazengine = job.Job.from_file(job_file)
    with mixins.Mixin(hazengine, hazjob.HazJobMixin, key="hazard"):
        ses = kvs.get_value_json_decoded(stochastic_set_key)
        hazengine.write_gmf_files(ses)
Exemple #56
0
    def write_hazardmap_file(self, map_keys):
        """Generate a NRML file with a hazard map for a collection of
        hazard map nodes from KVS, identified through their KVS keys.

        map_keys is a list of KVS keys of the hazard map nodes to be
        serialized.

        The hazard map file can be written
        (1) for a mean hazard map at a set of sites
        (2) for a quantile hazard map at a set of sites

        Mixing of these three cases is not allowed, i.e., all hazard maps
        from the set of curve_keys have to be either for mean, or quantile.
        """
        poe_list = [
            float(x)
            for x in self.params[classical_psha.POES_PARAM_NAME].split()
        ]
        if len(poe_list) == 0:
            return None

        if _is_mean_hazmap_key(map_keys[0]):
            hm_attrib_update = {'statistics': 'mean'}
            filename_part = 'mean'
            map_mode = 'mean'

        elif _is_quantile_hazmap_key(map_keys[0]):

            # get quantile value from KVS key
            quantile_value = tokens.quantile_value_from_hazard_map_key(
                map_keys[0])
            hm_attrib_update = {
                'statistics': 'quantile',
                'quantileValue': quantile_value
            }
            filename_part = "quantile-%.2f" % quantile_value
            map_mode = 'quantile'

        else:
            error_msg = "no valid hazard map type found in KVS key"
            raise RuntimeError(error_msg)

        files = []
        # path to the output directory
        output_path = os.path.join(self['BASE_PATH'], self['OUTPUT_DIR'])
        for poe in poe_list:

            nrml_file = "%s-%s-%s.xml" % (HAZARD_MAP_FILENAME_PREFIX, str(poe),
                                          filename_part)

            nrml_path = os.path.join(output_path, nrml_file)

            LOG.debug("Generating NRML hazard map file for PoE %s, mode %s, "\
                "%s nodes in hazard map: %s" % (
                poe, map_mode, len(map_keys), nrml_file))

            xmlwriter = hazard_output.HazardMapXMLWriter(nrml_path)
            hm_data = []

            for hm_key in map_keys:

                if tokens.poe_value_from_hazard_map_key(hm_key) != poe:
                    continue

                elif map_mode == 'mean' and not _is_mean_hazmap_key(hm_key):
                    error_msg = "non-mean hazard map key found in mean mode"
                    raise RuntimeError(error_msg)

                elif map_mode == 'quantile':
                    if not _is_quantile_hazmap_key(hm_key):
                        error_msg = "non-quantile hazard map key found in "\
                                    "quantile mode"
                        raise RuntimeError(error_msg)

                    elif tokens.quantile_value_from_hazard_map_key(hm_key) != \
                        quantile_value:
                        error_msg = "quantile value must be the same for all "\
                                    "hazard map nodes in an instance file"
                        raise ValueError(error_msg)

                hm = kvs.get_value_json_decoded(hm_key)

                site_obj = shapes.Site(float(hm['site_lon']),
                                       float(hm['site_lat']))

                # use hazard map IML and vs30 values from KVS
                hm_attrib = {
                    'investigationTimeSpan': self.params['INVESTIGATION_TIME'],
                    'IMT': self.params['INTENSITY_MEASURE_TYPE'],
                    'IML': hm['IML'],
                    'vs30': hm['vs30'],
                    'poE': poe
                }

                hm_attrib.update(hm_attrib_update)
                hm_data.append((site_obj, hm_attrib))

            hm_geotiff_name = '%s-%s-%s.tiff' % (HAZARD_MAP_FILENAME_PREFIX,
                                                 str(poe), filename_part)
            geotiff_path = os.path.join(output_path, hm_geotiff_name)

            self._write_hazard_map_geotiff(geotiff_path, hm_data)
            xmlwriter.serialize(hm_data)

            files.append(nrml_path)
            files.append(geotiff_path)

        return files
Exemple #57
0
    def ath(self, sites, rtype, datum=None):
        """
        Write calculation results to the database.

        :param sites: the sites for which to write calculation results.
        :type sites: list of :py:class:`openquake.shapes.Site`
        :param str rtype: hazard curve type, one of: curve, mean, quantile
        :param datum: one of: realization, None, quantile
        """
        def pause_generator(value):
            """
            Returns the initial value when called for the first time and
            the double value upon each subsequent invocation.

            N.B.: the maximum value returned will never exceed 90 (seconds).
            """
            yield value
            while True:
                if value < 45:
                    value *= 2
                yield value

        sites = set(sites)
        accounted_for = set()
        min_pause = 0.1
        pgen = pause_generator(min_pause)
        pause = pgen.next()

        key_template, nrml_path, hc_meta = psha_exp.hcs_meta(
            self.job_ctxt, rtype, datum)

        curve_writer = hazard_output.HazardCurveDBWriter(
            nrml_path, self.job_ctxt.job_id)

        while accounted_for != sites:
            failures = stats.failure_counters(self.job_ctxt.job_id, "h")
            if failures:
                raise RuntimeError("hazard failures (%s), aborting" % failures)
            hc_data = []
            # Sleep a little before checking the availability of additional
            # hazard curve results.
            time.sleep(pause)
            results_found = 0
            for site in sites:
                if site in accounted_for:
                    continue
                value = kvs.get_value_json_decoded(key_template % hash(site))
                if value is None:
                    # No value yet, proceed to next site.
                    continue
                # Use hazard curve ordinate values (PoE) from KVS and abscissae
                # from the IML list in config.
                hc_attrib = {
                    'investigationTimeSpan':
                    self.job_ctxt['INVESTIGATION_TIME'],
                    'IMLValues': self.job_ctxt.imls,
                    'IMT': self.job_ctxt['INTENSITY_MEASURE_TYPE'],
                    'PoEValues': value
                }
                hc_attrib.update(hc_meta)
                hc_data.append((site, hc_attrib))
                accounted_for.add(site)
                results_found += 1
            if not results_found:
                # No results found, increase the sleep pause.
                pause = pgen.next()
            else:
                curve_writer.serialize(hc_data)
                pause *= 0.8
                pause = min_pause if pause < min_pause else pause
            logs.log_percent_complete(self.job_ctxt.job_id, "hazard")

        return nrml_path