def compute_mean_hazard_curves(job_id, sites): """Compute a mean hazard curve for each site in the list using as input all the pre-computed curves for different realizations.""" keys = [] for site in sites: hazard_curves = curves_at(job_id, site) poes = [_extract_y_values_from(curve) for curve in hazard_curves] mean_poes = compute_mean_curve(poes) hazard_curve = hazard_curves.pop() x_values = [values["x"] for values in hazard_curve] full_curve = _reconstruct_curve_list_from(mean_poes, x_values) mean_curve = { "site_lon": site.longitude, "site_lat": site.latitude, "curve": full_curve } key = kvs.tokens.mean_hazard_curve_key(job_id, site) keys.append(key) kvs.set_value_json_encoded(key, mean_curve) return keys
def to_kvs(self): """Store this job into kvs.""" self._slurp_files() key = kvs.tokens.generate_job_key(self.job_id) data = self.params.copy() data['debug'] = self.log_level kvs.set_value_json_encoded(key, data)
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ job_ctxt = self.job_ctxt points = list(general.Block.from_kvs( job_ctxt.job_id, block_id).grid(job_ctxt.region)) hazard_curves = dict((point.site, self._get_db_curve(point.site)) for point in points) def get_loss_curve(point, vuln_function, asset): "Compute loss curve basing on hazard curve" job_profile = self.job_ctxt.oq_job_profile hazard_curve = hazard_curves[point.site] loss_ratio_curve = compute_loss_ratio_curve( vuln_function, hazard_curve, job_profile.lrem_steps_per_interval) return compute_loss_curve(loss_ratio_curve, asset.value) bcr = general.compute_bcr_for_block(job_ctxt.job_id, points, get_loss_curve, float(job_ctxt.params['INTEREST_RATE']), float(job_ctxt.params['ASSET_LIFE_EXPECTANCY']) ) bcr_block_key = kvs.tokens.bcr_block_key(job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, bcr) LOGGER.debug('bcr result for block %s: %r', block_id, bcr) return True
def compute_quantile_hazard_curves(job, sites): """Compute a quantile hazard curve for each site in the list using as input all the pre-computed curves for different realizations. The QUANTILE_LEVELS parameter in the configuration file specifies all the values used in the computation. """ keys = [] quantiles = _extract_values_from_config(job, QUANTILE_PARAM_NAME) LOG.debug("[QUANTILE_HAZARD_CURVES] List of quantiles is %s" % quantiles) for site in sites: for quantile in quantiles: hazard_curves = curves_at(job.id, site) poes = [_extract_y_values_from(curve) for curve in hazard_curves] quantile_poes = compute_quantile_curve(poes, quantile) quantile_curve = {"site_lat": site.latitude, "site_lon": site.longitude, "curve": _reconstruct_curve_list_from(quantile_poes)} key = kvs.tokens.quantile_hazard_curve_key( job.id, site, quantile) keys.append(key) kvs.set_value_json_encoded(key, quantile_curve) return keys
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ job_ctxt = self.job_ctxt points = list( general.Block.from_kvs(job_ctxt.job_id, block_id).grid(job_ctxt.region)) hazard_curves = dict( (point.site, self._get_db_curve(point.site)) for point in points) def get_loss_curve(point, vuln_function, asset): "Compute loss curve basing on hazard curve" job_profile = self.job_ctxt.oq_job_profile hazard_curve = hazard_curves[point.site] loss_ratio_curve = compute_loss_ratio_curve( vuln_function, hazard_curve, job_profile.lrem_steps_per_interval) return compute_loss_curve(loss_ratio_curve, asset.value) bcr = general.compute_bcr_for_block( job_ctxt.job_id, points, get_loss_curve, float(job_ctxt.params['INTEREST_RATE']), float(job_ctxt.params['ASSET_LIFE_EXPECTANCY'])) bcr_block_key = kvs.tokens.bcr_block_key(job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, bcr) LOGGER.debug('bcr result for block %s: %r', block_id, bcr) return True
def compute_quantile_hazard_curves(job, sites): """Compute a quantile hazard curve for each site in the list using as input all the pre-computed curves for different realizations. The QUANTILE_LEVELS parameter in the configuration file specifies all the values used in the computation. """ keys = [] quantiles = _extract_quantiles_from_config(job) LOG.debug("List of QUANTILES is %s" % quantiles) for site in sites: for quantile in quantiles: quantile_curve = { "site_lat": site.latitude, "site_lon": site.longitude, "curve": _reconstruct_curve_list_from(compute_quantile_curve(curves_at(job.id, site), quantile)), } key = kvs.tokens.quantile_hazard_curve_key(job.id, site, quantile) keys.append(key) LOG.debug("QUANTILE curve at %s is %s" % (key, quantile_curve)) kvs.set_value_json_encoded(key, quantile_curve) return keys
def to_kvs(self, write_cfg=True): """Store this job into kvs.""" self._slurp_files() if write_cfg: self._write_super_config() key = kvs.tokens.generate_job_key(self.job_id) kvs.set_value_json_encoded(key, self.params)
def to_kvs(self, write_cfg=True): """Store this job into kvs.""" self._slurp_files() if write_cfg: self._write_super_config() key = kvs.generate_job_key(self.job_id) kvs.set_value_json_encoded(key, self.params)
def to_kvs(self): """Store this block into the underlying kvs system.""" raw_sites = [] for site in self.sites: raw_sites.append(site.coords) kvs.set_value_json_encoded(self.id, raw_sites)
def to_kvs(self): """Store this block into the underlying KVS system.""" raw_sites = [] for site in self.sites: raw_sites.append(site.coords) kvs.set_value_json_encoded(self.id, raw_sites)
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ result = defaultdict(list) seed, correlation_type = self._get_correlation_type() block = general.Block.from_kvs(self.job_ctxt.job_id, block_id) loss_histogram_bins = self.job_ctxt.oq_job_profile.loss_histogram_bins vulnerability_model_original = vulnerability.load_vuln_model_from_kvs( self.job_ctxt.job_id) vulnerability_model_retrofitted = ( vulnerability.load_vuln_model_from_kvs( self.job_ctxt.job_id, retrofitted=True)) assets_getter = lambda site: general.BaseRiskCalculator.assets_at( self.job_ctxt.job_id, site) def hazard_getter(site): gmvs = self._get_gmvs_at(general.hazard_input_site( self.job_ctxt, site)) return {"IMLs": gmvs, "TSES": self._tses(), "TimeSpan": self._time_span()} bcr = api.bcr(api.probabilistic_event_based( vulnerability_model_original, loss_histogram_bins, seed, correlation_type), api.probabilistic_event_based( vulnerability_model_retrofitted, loss_histogram_bins, seed, correlation_type), float(self.job_ctxt.params["INTEREST_RATE"]), float(self.job_ctxt.params["ASSET_LIFE_EXPECTANCY"])) for asset_output in api.compute_on_sites( block.sites, assets_getter, hazard_getter, bcr): asset = asset_output.asset result[(asset.site.x, asset.site.y)].append(({ "bcr": asset_output.bcr, "eal_original": asset_output.eal_original, "eal_retrofitted": asset_output.eal_retrofitted}, asset.asset_ref)) bcr_block_key = kvs.tokens.bcr_block_key( self.job_ctxt.job_id, block_id) result = result.items() kvs.set_value_json_encoded(bcr_block_key, result) LOGGER.debug("bcr result for block %s: %r", block_id, result)
def slice_gmfs(self, block_id): """Load and collate GMF values for all sites in this block. """ block = general.Block.from_kvs(block_id) gmfs = self._get_db_gmfs(block.sites, self.job_id) for key, gmf_slice in gmfs.items(): (row, col) = key.split("!") key_gmf = kvs.tokens.gmf_set_key(self.job_id, col, row) LOGGER.debug("GMF_SLICE for %s X %s : \n\t%s" % (col, row, gmf_slice)) gmf = {"IMLs": gmf_slice, "TSES": self._tses(), "TimeSpan": self._time_span()} kvs.set_value_json_encoded(key_gmf, gmf)
def to_kvs(self): """Store this block into the underlying KVS system.""" raw_sites = [] for site in self.sites: raw_sites.append(site.coords) block_key = kvs.tokens.risk_block_key(self.job_id, self.block_id) kvs.set_value_json_encoded(block_key, raw_sites)
def _store_iml_for(curve, key, job, poe): """Store an interpolated IML in kvs along with all the needed metadata.""" im_level = {} im_level["site_lon"] = curve["site_lon"] im_level["site_lat"] = curve["site_lat"] im_level["vs30"] = float(job.params["REFERENCE_VS30_VALUE"]) im_level["IML"] = _get_iml_from(curve, job, poe) kvs.set_value_json_encoded(key, im_level)
def to_kvs(self): """Store this block into the underlying KVS system.""" raw_sites = [] for site in self.sites: raw_sites.append(site.coords) block_key = kvs.tokens.risk_block_key(self.calculation_id, self.block_id) kvs.set_value_json_encoded(block_key, raw_sites)
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ # aggregate the losses for this block aggregate_curve = general.AggregateLossCurve() block = general.Block.from_kvs(self.job_ctxt.job_id, block_id) epsilon_provider = general.EpsilonProvider(self.job_ctxt.params) def get_loss_curve(site, vuln_function, asset): "Compute loss curve basing on GMF data" gmvs = self._get_gmvs_at( general.hazard_input_site(self.job_ctxt, site)) gmf_slice = { "IMLs": gmvs, "TSES": self._tses(), "TimeSpan": self._time_span() } loss_ratios = general.compute_loss_ratios(vuln_function, gmf_slice, epsilon_provider, asset) loss_ratio_curve = general.compute_loss_ratio_curve( vuln_function, gmf_slice, epsilon_provider, asset, self.job_ctxt.oq_job_profile.loss_histogram_bins, loss_ratios=loss_ratios) aggregate_curve.append(loss_ratios * asset.value) return loss_ratio_curve.rescale_abscissae(asset.value) result = general.compute_bcr_for_block( self.job_ctxt, block.sites, get_loss_curve, float(self.job_ctxt.params['INTEREST_RATE']), float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY'])) bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, result) LOGGER.debug('bcr result for block %s: %r', block_id, result) return aggregate_curve.losses
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ self.slice_gmfs(block_id) # aggregate the losses for this block aggregate_curve = general.AggregateLossCurve() points = list( general.Block.from_kvs(self.job_ctxt.job_id, block_id).grid(self.job_ctxt.region)) gmf_slices = dict( (point.site, kvs.get_value_json_decoded( kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column, point.row))) for point in points) epsilon_provider = general.EpsilonProvider(self.job_ctxt.params) def get_loss_curve(point, vuln_function, asset): "Compute loss curve basing on GMF data" gmf_slice = gmf_slices[point.site] loss_ratios = general.compute_loss_ratios(vuln_function, gmf_slice, epsilon_provider, asset) loss_ratio_curve = general.compute_loss_ratio_curve( vuln_function, gmf_slice, epsilon_provider, asset, self.job_ctxt.oq_job_profile.loss_histogram_bins, loss_ratios=loss_ratios) aggregate_curve.append(loss_ratios * asset.value) return loss_ratio_curve.rescale_abscissae(asset.value) result = general.compute_bcr_for_block( self.job_ctxt.job_id, points, get_loss_curve, float(self.job_ctxt.params['INTEREST_RATE']), float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY'])) bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, result) LOGGER.debug('bcr result for block %s: %r', block_id, result) return aggregate_curve.losses
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ result = defaultdict(list) block = Block.from_kvs(self.job_ctxt.job_id, block_id) vulnerability_model_original = vulnerability.load_vuln_model_from_kvs( self.job_ctxt.job_id) vulnerability_model_retrofitted = ( vulnerability.load_vuln_model_from_kvs( self.job_ctxt.job_id, retrofitted=True)) steps = self.job_ctxt.oq_job_profile.lrem_steps_per_interval assets_getter = lambda site: BaseRiskCalculator.assets_at( self.job_ctxt.job_id, site) hazard_getter = lambda site: ( self._get_db_curve(hazard_input_site(self.job_ctxt, site))) bcr = api.bcr(api.classical(vulnerability_model_original, steps=steps), api.classical(vulnerability_model_retrofitted, steps=steps), float(self.job_ctxt.params["INTEREST_RATE"]), float(self.job_ctxt.params["ASSET_LIFE_EXPECTANCY"])) for asset_output in api.compute_on_sites( block.sites, assets_getter, hazard_getter, bcr): asset = asset_output.asset result[(asset.site.x, asset.site.y)].append(({ "bcr": asset_output.bcr, "eal_original": asset_output.eal_original, "eal_retrofitted": asset_output.eal_retrofitted}, asset.asset_ref)) bcr = result.items() bcr_block_key = kvs.tokens.bcr_block_key( self.job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, bcr) LOGGER.debug("bcr result for block %s: %r", block_id, bcr) return True
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ self.slice_gmfs(block_id) # aggregate the losses for this block aggregate_curve = general.AggregateLossCurve() points = list(general.Block.from_kvs( self.job_ctxt.job_id, block_id).grid(self.job_ctxt.region)) gmf_slices = dict( (point.site, kvs.get_value_json_decoded( kvs.tokens.gmf_set_key(self.job_ctxt.job_id, point.column, point.row) )) for point in points ) epsilon_provider = general.EpsilonProvider(self.job_ctxt.params) def get_loss_curve(point, vuln_function, asset): "Compute loss curve basing on GMF data" gmf_slice = gmf_slices[point.site] loss_ratios = general.compute_loss_ratios( vuln_function, gmf_slice, epsilon_provider, asset) loss_ratio_curve = general.compute_loss_ratio_curve( vuln_function, gmf_slice, epsilon_provider, asset, self.job_ctxt.oq_job_profile.loss_histogram_bins, loss_ratios=loss_ratios) aggregate_curve.append(loss_ratios * asset.value) return loss_ratio_curve.rescale_abscissae(asset.value) result = general.compute_bcr_for_block(self.job_ctxt.job_id, points, get_loss_curve, float(self.job_ctxt.params['INTEREST_RATE']), float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY']) ) bcr_block_key = kvs.tokens.bcr_block_key(self.job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, result) LOGGER.debug('bcr result for block %s: %r', block_id, result) return aggregate_curve.losses
def compute_mean_hazard_curves(job_id, sites, realizations): """Compute a mean hazard curve for each site in the list using as input all the pre-computed curves for different realizations.""" keys = [] for site in sites: poes = poes_at(job_id, site, realizations) mean_poes = compute_mean_curve(poes) key = kvs.tokens.mean_hazard_curve_key(job_id, site) keys.append(key) kvs.set_value_json_encoded(key, mean_poes) return keys
def _compute_risk_classical_psha_setup(self): SITE = shapes.Site(1.0, 1.0) # deletes all keys from kvs kvs.get_client().flushall() self.job = self.setup_classic_job() # at the moment the hazard part doesn't do exp on the 'x' # so it's done on the risk part. To adapt the calculation # we do the reverse of the exp, i.e. log(x) self.hazard_curve = [ (SITE, {'IMLValues': [0.001, 0.080, 0.170, 0.260, 0.360, 0.550, 0.700], 'PoEValues': [0.99, 0.96, 0.89, 0.82, 0.70, 0.40, 0.01], 'statistics': 'mean'})] # Vitor provided this Vulnerability Function imls_1 = [0.03, 0.04, 0.07, 0.1, 0.12, 0.22, 0.37, 0.52] loss_ratios_1 = [0.001, 0.022, 0.051, 0.08, 0.1, 0.2, 0.405, 0.700] covs_1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] self.vuln_function = shapes.VulnerabilityFunction(imls_1, loss_ratios_1, covs_1) imls_2 = [0.1, 0.2, 0.4, 0.6] loss_ratios_2 = [0.05, 0.08, 0.2, 0.4] covs_2 = [0.5, 0.3, 0.2, 0.1] self.vuln_function_2 = shapes.VulnerabilityFunction(imls_2, loss_ratios_2, covs_2) self.job_id = self.job.id self.asset_1 = {"taxonomy": "ID", "assetValue": 124.27} self.region = shapes.RegionConstraint.from_simple( (0.0, 0.0), (2.0, 2.0)) self.block_id = kvs.tokens.risk_block_key(self.job_id, 7) block = Block((SITE, SITE), self.block_id) block.to_kvs() writer = hazard.HazardCurveDBWriter('test_path.xml', self.job_id) writer.serialize(self.hazard_curve) kvs.set_value_json_encoded( kvs.tokens.vuln_key(self.job_id), {"ID": self.vuln_function.to_json()})
def compute_mean_hazard_curves(job_id, sites): """Compute a mean hazard curve for each site in the list using as input all the pre-computed curves for different realizations.""" keys = [] for site in sites: mean_curve = {"site_lon": site.longitude, "site_lat": site.latitude, "curve": _reconstruct_curve_list_from(compute_mean_curve( curves_at(job_id, site)))} key = kvs.tokens.mean_hazard_curve_key(job_id, site) keys.append(key) kvs.set_value_json_encoded(key, mean_curve) return keys
def load_vulnerability_model(job_id, path): """Load and store the vulnerability model defined in the given NRML file in the underlying kvs system.""" vulnerability_model = {} parser = VulnerabilityModelFile(path) for vuln_curve in parser: vuln_func = shapes.VulnerabilityFunction(vuln_curve['IML'], vuln_curve['lossRatio'], vuln_curve['coefficientsVariation']) vulnerability_model[vuln_curve["ID"]] = vuln_func.to_json() kvs.set_value_json_encoded(kvs.tokens.vuln_key(job_id), vulnerability_model)
def load_vulnerability_model(job_id, path, retrofitted=False): """Load and store the vulnerability model defined in the given NRML file in the underlying kvs system.""" vulnerability_model = {} parser = VulnerabilityModelFile(path) for vuln_curve in parser: vuln_func = shapes.VulnerabilityFunction(vuln_curve['IML'], vuln_curve['lossRatio'], vuln_curve['coefficientsVariation']) vulnerability_model[vuln_curve["ID"]] = vuln_func.to_json() kvs.set_value_json_encoded(kvs.tokens.vuln_key(job_id, retrofitted), vulnerability_model)
def slice_gmfs(self, block_id): """Load and collate GMF values for all sites in this block. """ block = general.Block.from_kvs(self.job_ctxt.job_id, block_id) gmfs = self._get_db_gmfs(block.sites, self.job_ctxt.job_id) for key, gmf_slice in gmfs.items(): (row, col) = key.split("!") key_gmf = kvs.tokens.gmf_set_key(self.job_ctxt.job_id, col, row) LOGGER.debug("GMF_SLICE for %s X %s : \n\t%s" % (col, row, gmf_slice)) gmf = { "IMLs": gmf_slice, "TSES": self._tses(), "TimeSpan": self._time_span() } kvs.set_value_json_encoded(key_gmf, gmf)
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ block = general.Block.from_kvs(self.job_ctxt.job_id, block_id) seed, correlation_type = self._get_correlation_type() def hazard_getter(site): "Compute loss curve basing on GMF data" gmvs = self._get_gmvs_at(general.hazard_input_site( self.job_ctxt, site)) return {"IMLs": gmvs, "TSES": self._tses(), "TimeSpan": self._time_span()} result = defaultdict(list) def on_asset_complete(asset, bcr, eal_original, eal_retrofitted): result[(asset.site.x, asset.site.y)].append( ({'bcr': bcr, 'eal_original': eal_original, 'eal_retrofitted': eal_retrofitted}, asset.asset_ref)) job_id = self.job_ctxt.job_id benefit_cost_ratio.compute_probabilistic( block.sites, lambda site: general.BaseRiskCalculator.assets_at(job_id, site), vulnerability.load_vuln_model_from_kvs(job_id), vulnerability.load_vuln_model_from_kvs(job_id, retrofitted=True), hazard_getter, float(self.job_ctxt.params['INTEREST_RATE']), float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY']), self.job_ctxt.oq_job_profile.loss_histogram_bins, seed, correlation_type, on_asset_complete) bcr_block_key = kvs.tokens.bcr_block_key( self.job_ctxt.job_id, block_id) result = result.items() kvs.set_value_json_encoded(bcr_block_key, result) LOGGER.debug('bcr result for block %s: %r', block_id, result)
def _prepare_bcr_result(self): self.job.blocks_keys = [19, 20] kvs.set_value_json_encoded(kvs.tokens.bcr_block_key(self.job_id, 19), [ ((-1.1, 19.0), [ ({'bcr': 35.1, 'eal_original': 12.34, 'eal_retrofitted': 4}, 'assetID-191'), ({'bcr': 35.2, 'eal_original': 2.5, 'eal_retrofitted': 2.2}, 'assetID-192'), ]) ]) kvs.set_value_json_encoded(kvs.tokens.bcr_block_key(self.job_id, 20), [ ((2.3, 20.0), [ ({'bcr': 35.1, 'eal_original': 1.23, 'eal_retrofitted': 0.3}, 'assetID-201'), ({'bcr': 35.2, 'eal_original': 4, 'eal_retrofitted': 0.4}, 'assetID-202'), ]) ])
def write_vuln_curves_to_kvs(job_id, vulnerability_curves): """ JSON encode vulnerability curve and write to KVS """ success = kvs.set_value_json_encoded(kvs.tokens.vuln_key(job_id), vulnerability_curves) if success is not True: raise RuntimeError( "Vulnerability module: cannot write " "vulnerability functions to the kvs")
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ # aggregate the losses for this block aggregate_curve = general.AggregateLossCurve() block = general.Block.from_kvs(self.job_ctxt.job_id, block_id) epsilon_provider = general.EpsilonProvider(self.job_ctxt.params) def get_loss_curve(site, vuln_function, asset): "Compute loss curve basing on GMF data" gmvs = self._get_gmvs_at(general.hazard_input_site( self.job_ctxt, site)) gmf_slice = {"IMLs": gmvs, "TSES": self._tses(), "TimeSpan": self._time_span()} loss_ratios = general.compute_loss_ratios( vuln_function, gmf_slice, epsilon_provider, asset) loss_ratio_curve = general.compute_loss_ratio_curve( vuln_function, gmf_slice, epsilon_provider, asset, self.job_ctxt.oq_job_profile.loss_histogram_bins, loss_ratios=loss_ratios) aggregate_curve.append(loss_ratios * asset.value) return loss_ratio_curve.rescale_abscissae(asset.value) result = general.compute_bcr_for_block(self.job_ctxt, block.sites, get_loss_curve, float(self.job_ctxt.params['INTEREST_RATE']), float(self.job_ctxt.params['ASSET_LIFE_EXPECTANCY'])) bcr_block_key = kvs.tokens.bcr_block_key( self.job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, result) LOGGER.debug('bcr result for block %s: %r', block_id, result) return aggregate_curve.losses
def compute_mean_hazard_maps(job_id, sites, imls, poes): """Compute mean hazard maps using as input all the pre computed mean hazard curves. """ LOG.debug("[MEAN_HAZARD_MAPS] List of POEs is %s" % poes) keys = [] for site in sites: mean_poes = kvs.get_value_json_decoded(kvs.tokens.mean_hazard_curve_key(job_id, site)) interpolate = build_interpolator(mean_poes, imls, site) for poe in poes: key = kvs.tokens.mean_hazard_map_key(job_id, site, poe) keys.append(key) kvs.set_value_json_encoded(key, interpolate(poe)) return keys
def setUp(self): self.params = {} self.params["OUTPUT_DIR"] = helpers.OUTPUT_DIR self.params["BASE_PATH"] = "." self.params["INVESTIGATION_TIME"] = 50.0 self.job_ctxt = helpers.create_job( self.params, base_path=".", job_id=self.job.id, oq_job=self.job, oq_job_profile=models.profile4job(self.job.id)) self.job_id = self.job_ctxt.job_id self.job_ctxt.to_kvs() self.vulnerability_function2 = vulnerability_function.VulnerabilityFunction([ 0.0, 0.04, 0.08, 0.12, 0.16, 0.2, 0.24, 0.28, 0.32, 0.36, 0.4, 0.44, 0.48, 0.53, 0.57, 0.61, 0.65, 0.69, 0.73, 0.77, 0.81, 0.85, 0.89, 0.93, 0.97, 1.01, 1.05, 1.09, 1.13, 1.17, 1.21, 1.25, 1.29, 1.33, 1.37, 1.41, 1.45, 1.49, 1.54, 1.58, 1.62, 1.66, 1.7, 1.74, 1.78, 1.82, 1.86, 1.9, 1.94, 1.98, 2.02, 2.06, 2.1, 2.14, 2.18, 2.22, 2.26, 2.3, 2.34, 2.38, 2.42, 2.46, 2.51, 2.55, 2.59, 2.63, 2.67, 2.71, 2.75, 2.79, 2.83, 2.87, 2.91, 2.95, 2.99, 3.03, 3.07, 3.11, 3.15, 3.19, 3.23, 3.27, 3.31, 3.35, 3.39, 3.43, 3.47, 3.52, 3.56, 3.6, 3.64, 3.68, 3.72, 3.76, 3.8, 3.84, 3.88, 3.92, 3.96, 4.0], [0.0, 0.0, 0.0, 0.01, 0.04, 0.07, 0.11, 0.15, 0.2, 0.25, 0.3, 0.35, 0.39, 0.43, 0.47, 0.51, 0.55, 0.58, 0.61, 0.64, 0.67, 0.69, 0.71, 0.73, 0.75, 0.77, 0.79, 0.8, 0.81, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.89, 0.9, 0.91, 0.91, 0.92, 0.92, 0.93, 0.93, 0.94, 0.94, 0.94, 0.95, 0.95, 0.95, 0.95, 0.96, 0.96, 0.96, 0.96, 0.97, 0.97, 0.97, 0.97, 0.97, 0.97, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.98, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 0.99, 1.0, 1.0, 1.0, 1.0, 1.0], [0.0] * 100, "LN") # deleting keys in kvs kvs.get_client().flushall() kvs.set_value_json_encoded( kvs.tokens.vuln_key(self.job_id), {"ID": self.vulnerability_function2.to_json()}) kvs.set_value_json_encoded( kvs.tokens.vuln_key(self.job_id, retrofitted=True), {"ID": self.vulnerability_function2.to_json()})
def compute_mean_hazard_maps(job_id, sites, imls, poes): """Compute mean hazard maps using as input all the pre computed mean hazard curves. """ LOG.debug("[MEAN_HAZARD_MAPS] List of POEs is %s" % poes) keys = [] for site in sites: mean_poes = kvs.get_value_json_decoded( kvs.tokens.mean_hazard_curve_key(job_id, site)) interpolate = build_interpolator(mean_poes, imls, site) for poe in poes: key = kvs.tokens.mean_hazard_map_key(job_id, site, poe) keys.append(key) kvs.set_value_json_encoded(key, interpolate(poe)) return keys
def compute_quantile_hazard_curves(job_id, sites, realizations, quantiles): """Compute a quantile hazard curve for each site in the list using as input all the pre-computed curves for different realizations. """ LOG.debug("[QUANTILE_HAZARD_CURVES] List of quantiles is %s" % quantiles) keys = [] for site in sites: poes = poes_at(job_id, site, realizations) for quantile in quantiles: quantile_poes = compute_quantile_curve(poes, quantile) key = kvs.tokens.quantile_hazard_curve_key(job_id, site, quantile) keys.append(key) kvs.set_value_json_encoded(key, quantile_poes) return keys
def slice_gmfs(self, block_id): """Load and collate GMF values for all sites in this block. """ # TODO(JMC): Confirm this works regardless of the method of haz calc. histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES']) realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES']) num_ses = histories * realizations block = general.Block.from_kvs(block_id) gmfs = self._get_db_gmfs(block.sites, self.job_id) for key, gmf_slice in gmfs.items(): (row, col) = key.split("!") key_gmf = kvs.tokens.gmf_set_key(self.job_id, col, row) LOGGER.debug("GMF_SLICE for %s X %s : \n\t%s" % ( col, row, gmf_slice)) timespan = float(self['INVESTIGATION_TIME']) gmf = {"IMLs": gmf_slice, "TSES": num_ses * timespan, "TimeSpan": timespan} kvs.set_value_json_encoded(key_gmf, gmf)
def compute_quantile_hazard_curves(job_id, sites, realizations, quantiles): """Compute a quantile hazard curve for each site in the list using as input all the pre-computed curves for different realizations. """ LOG.debug("[QUANTILE_HAZARD_CURVES] List of quantiles is %s" % quantiles) keys = [] for site in sites: poes = poes_at(job_id, site, realizations) for quantile in quantiles: quantile_poes = compute_quantile_curve(poes, quantile) key = kvs.tokens.quantile_hazard_curve_key( job_id, site, quantile) keys.append(key) kvs.set_value_json_encoded(key, quantile_poes) return keys
def slice_gmfs(self, block_id): """Load and collate GMF values for all sites in this block. """ # TODO(JMC): Confirm this works regardless of the method of haz calc. histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES']) realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES']) num_ses = histories * realizations block = job.Block.from_kvs(block_id) sites_list = block.sites gmfs = {} for site in sites_list: risk_point = self.region.grid.point_at(site) key = "%s!%s" % (risk_point.row, risk_point.column) gmfs[key] = [] for i in range(0, histories): for j in range(0, realizations): key = kvs.generate_product_key(self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, "%s!%s" % (i, j)) fieldset = shapes.FieldSet.from_json(kvs.get(key), self.region.grid) for field in fieldset: for key in gmfs.keys(): (row, col) = key.split("!") gmfs[key].append(field.get(int(row), int(col))) for key, gmf_slice in gmfs.items(): (row, col) = key.split("!") key_gmf = kvs.tokens.gmfs_key(self.id, col, row) LOGGER.debug("GMF_SLICE for %s X %s : \n\t%s" % (col, row, gmf_slice)) timespan = float(self['INVESTIGATION_TIME']) gmf = { "IMLs": gmf_slice, "TSES": num_ses * timespan, "TimeSpan": timespan } kvs.set_value_json_encoded(key_gmf, gmf)
def compute_quantile_hazard_maps(job_id, sites, quantiles, imls, poes): """Compute quantile hazard maps using as input all the pre computed quantile hazard curves. """ LOG.debug("[QUANTILE_HAZARD_MAPS] List of POEs is %s" % poes) LOG.debug("[QUANTILE_HAZARD_MAPS] List of quantiles is %s" % quantiles) keys = [] for quantile in quantiles: for site in sites: quantile_poes = kvs.get_value_json_decoded(kvs.tokens.quantile_hazard_curve_key(job_id, site, quantile)) interpolate = build_interpolator(quantile_poes, imls, site) for poe in poes: key = kvs.tokens.quantile_hazard_map_key(job_id, site, poe, quantile) keys.append(key) kvs.set_value_json_encoded(key, interpolate(poe)) return keys
def slice_gmfs(self, block_id): """Load and collate GMF values for all sites in this block. """ # TODO(JMC): Confirm this works regardless of the method of haz calc. histories = int(self['NUMBER_OF_SEISMICITY_HISTORIES']) realizations = int(self['NUMBER_OF_LOGIC_TREE_SAMPLES']) num_ses = histories * realizations block = job.Block.from_kvs(block_id) sites_list = block.sites gmfs = {} for site in sites_list: risk_point = self.region.grid.point_at(site) key = "%s!%s" % (risk_point.row, risk_point.column) gmfs[key] = [] for i in range(0, histories): for j in range(0, realizations): key = kvs.generate_product_key( self.id, kvs.tokens.STOCHASTIC_SET_TOKEN, "%s!%s" % (i, j)) fieldset = shapes.FieldSet.from_json(kvs.get(key), self.region.grid) for field in fieldset: for key in gmfs.keys(): (row, col) = key.split("!") gmfs[key].append(field.get(int(row), int(col))) for key, gmf_slice in gmfs.items(): (row, col) = key.split("!") key_gmf = kvs.generate_product_key(self.id, kvs.tokens.GMF_KEY_TOKEN, col, row) LOGGER.debug( "GMF_SLICE for %s X %s : \n\t%s" % ( col, row, gmf_slice )) timespan = float(self['INVESTIGATION_TIME']) gmf = {"IMLs": gmf_slice, "TSES": num_ses * timespan, "TimeSpan": timespan} kvs.set_value_json_encoded(key_gmf, gmf)
def load_vulnerability_model(job_id, path): """Load and store the vulnerability model defined in the given NRML file in the underlying kvs system.""" vulnerability_model = {} parser = VulnerabilityModelFile(path) for vulnerability_curve in parser: curve = [] data = zip(vulnerability_curve["lossRatio"], vulnerability_curve["coefficientsVariation"]) for idx, iml in enumerate( vulnerability_curve["IML"]): curve.append((iml, data[idx])) vulnerability_model[vulnerability_curve["ID"]] = \ shapes.VulnerabilityFunction(curve).to_json() kvs.set_value_json_encoded(kvs.tokens.vuln_key(job_id), vulnerability_model)
def _compute_bcr(self, block_id): """ Calculate and store in the kvs the benefit-cost ratio data for block. A value is stored with key :func:`openquake.kvs.tokens.bcr_block_key`. See :func:`openquake.risk.job.general.compute_bcr_for_block` for result data structure spec. """ job_ctxt = self.job_ctxt job_id = job_ctxt.job_id block = Block.from_kvs(job_id, block_id) result = defaultdict(list) def on_asset_complete(asset, bcr, eal_original, eal_retrofitted): result[(asset.site.x, asset.site.y)].append( ({"bcr": bcr, "eal_original": eal_original, "eal_retrofitted": eal_retrofitted}, asset.asset_ref) ) benefit_cost_ratio.compute( block.sites, lambda site: BaseRiskCalculator.assets_at(job_id, site), vulnerability.load_vuln_model_from_kvs(job_id), vulnerability.load_vuln_model_from_kvs(job_id, retrofitted=True), lambda site: self._get_db_curve(hazard_input_site(self.job_ctxt, site)), self.job_ctxt.oq_job_profile.lrem_steps_per_interval, float(job_ctxt.params["INTEREST_RATE"]), float(job_ctxt.params["ASSET_LIFE_EXPECTANCY"]), on_asset_complete, ) bcr = result.items() bcr_block_key = kvs.tokens.bcr_block_key(job_ctxt.job_id, block_id) kvs.set_value_json_encoded(bcr_block_key, bcr) LOGGER.debug("bcr result for block %s: %r", block_id, bcr) return True
def compute_mean_hazard_curves(job_id, sites): """Compute a mean hazard curve for each site in the list using as input all the pre-computed curves for different realizations.""" keys = [] for site in sites: hazard_curves = curves_at(job_id, site) poes = [_extract_y_values_from(curve) for curve in hazard_curves] mean_poes = compute_mean_curve(poes) hazard_curve = hazard_curves.pop() x_values = [values["x"] for values in hazard_curve] full_curve = _reconstruct_curve_list_from(mean_poes, x_values) mean_curve = {"site_lon": site.longitude, "site_lat": site.latitude, "curve": full_curve} key = kvs.tokens.mean_hazard_curve_key(job_id, site) keys.append(key) kvs.set_value_json_encoded(key, mean_curve) return keys
def compute_quantile_hazard_maps(job_id, sites, quantiles, imls, poes): """Compute quantile hazard maps using as input all the pre computed quantile hazard curves. """ LOG.debug("[QUANTILE_HAZARD_MAPS] List of POEs is %s" % poes) LOG.debug("[QUANTILE_HAZARD_MAPS] List of quantiles is %s" % quantiles) keys = [] for quantile in quantiles: for site in sites: quantile_poes = kvs.get_value_json_decoded( kvs.tokens.quantile_hazard_curve_key(job_id, site, quantile)) interpolate = build_interpolator(quantile_poes, imls, site) for poe in poes: key = kvs.tokens.quantile_hazard_map_key( job_id, site, poe, quantile) keys.append(key) kvs.set_value_json_encoded(key, interpolate(poe)) return keys
def _store_curve_at(self, site, mean_curve): kvs.set_value_json_encoded( kvs.tokens.mean_hazard_curve_key( self.job_id, site), mean_curve)
def test_quantile_hazard_maps_computation(self): self.params[hazard_general.POES_PARAM_NAME] = "0.10" self.params[hazard_general.QUANTILE_PARAM_NAME] = "0.25 0.50 0.75" curve_1 = [ 9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06 ] curve_2 = [ 9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06 ] sites = [shapes.Site(3.0, 3.0), shapes.Site(3.5, 3.5)] # keys for sites[0] key_1 = kvs.tokens.quantile_hazard_curve_key(self.job_id, sites[0], 0.25) key_2 = kvs.tokens.quantile_hazard_curve_key(self.job_id, sites[0], 0.50) key_3 = kvs.tokens.quantile_hazard_curve_key(self.job_id, sites[0], 0.75) # keys for sites[1] key_4 = kvs.tokens.quantile_hazard_curve_key(self.job_id, sites[1], 0.25) key_5 = kvs.tokens.quantile_hazard_curve_key(self.job_id, sites[1], 0.50) key_6 = kvs.tokens.quantile_hazard_curve_key(self.job_id, sites[1], 0.75) # setting values in kvs kvs.set_value_json_encoded(key_1, curve_1) kvs.set_value_json_encoded(key_2, curve_1) kvs.set_value_json_encoded(key_3, curve_1) kvs.set_value_json_encoded(key_4, curve_2) kvs.set_value_json_encoded(key_5, curve_2) kvs.set_value_json_encoded(key_6, curve_2) hazard_general.compute_quantile_hazard_maps(self.job_ctxt.job_id, sites, [0.25, 0.50, 0.75], self.imls, [0.10]) # asserting imls have been produced for all poes and quantiles self.assertTrue(kvs.get_client().get( kvs.tokens.quantile_hazard_map_key(self.job_id, sites[0], 0.10, 0.25))) self.assertTrue(kvs.get_client().get( kvs.tokens.quantile_hazard_map_key(self.job_id, sites[0], 0.10, 0.50))) self.assertTrue(kvs.get_client().get( kvs.tokens.quantile_hazard_map_key(self.job_id, sites[0], 0.10, 0.75))) self.assertTrue(kvs.get_client().get( kvs.tokens.quantile_hazard_map_key(self.job_id, sites[1], 0.10, 0.25))) self.assertTrue(kvs.get_client().get( kvs.tokens.quantile_hazard_map_key(self.job_id, sites[1], 0.10, 0.50))) self.assertTrue(kvs.get_client().get( kvs.tokens.quantile_hazard_map_key(self.job_id, sites[1], 0.10, 0.75)))
def _store_gmfs(self, gmfs, row, column): key = kvs.tokens.gmfs_key(self.job_id, column, row) kvs.set_value_json_encoded(key, gmfs)
def _store_hazard_curve_at(self, site, curve, realization=1): kvs.set_value_json_encoded( kvs.tokens.hazard_curve_key(self.job_id, realization, site), curve)
def test_quantile_hazard_maps_computation(self): self.params[self.poes_levels] = "0.10" self.params[self.quantiles_levels] = "0.25 0.50 0.75" curve_1 = {"site_lon": 3.0, "site_lat": 3.0, "curve": classical_psha._reconstruct_curve_list_from( [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06])} curve_2 = {"site_lon": 3.5, "site_lat": 3.5, "curve": classical_psha._reconstruct_curve_list_from( [9.8784e-01, 9.8405e-01, 9.5719e-01, 9.1955e-01, 8.5019e-01, 7.4038e-01, 5.9153e-01, 4.2626e-01, 2.9755e-01, 2.7731e-01, 1.6218e-01, 8.8035e-02, 4.3499e-02, 1.9065e-02, 7.0442e-03, 2.1300e-03, 4.9498e-04, 8.1768e-05, 7.3425e-06])} # keys for shapes.Site(3.0, 3.0) key_1 = kvs.tokens.quantile_hazard_curve_key( self.job_id, shapes.Site(3.0, 3.0), 0.25) key_2 = kvs.tokens.quantile_hazard_curve_key( self.job_id, shapes.Site(3.0, 3.0), 0.50) key_3 = kvs.tokens.quantile_hazard_curve_key( self.job_id, shapes.Site(3.0, 3.0), 0.75) # keys for shapes.Site(3.5, 3.5) key_4 = kvs.tokens.quantile_hazard_curve_key( self.job_id, shapes.Site(3.5, 3.5), 0.25) key_5 = kvs.tokens.quantile_hazard_curve_key( self.job_id, shapes.Site(3.5, 3.5), 0.50) key_6 = kvs.tokens.quantile_hazard_curve_key( self.job_id, shapes.Site(3.5, 3.5), 0.75) # setting values in kvs kvs.set_value_json_encoded(key_1, curve_1) kvs.set_value_json_encoded(key_2, curve_1) kvs.set_value_json_encoded(key_3, curve_1) kvs.set_value_json_encoded(key_4, curve_2) kvs.set_value_json_encoded(key_5, curve_2) kvs.set_value_json_encoded(key_6, curve_2) classical_psha.compute_quantile_hazard_maps(self.engine) # asserting imls have been produced for all poes and quantiles self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key( self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.25))) self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key( self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.50))) self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key( self.job_id, shapes.Site(3.0, 3.0), 0.10, 0.75))) self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key( self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.25))) self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key( self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.50))) self.assertTrue(kvs.get(kvs.tokens.quantile_hazard_map_key( self.job_id, shapes.Site(3.5, 3.5), 0.10, 0.75)))
def _store_hazard_curve_at(self, site, curve, realization=0): kvs.set_value_json_encoded( kvs.tokens.hazard_curve_poes_key(self.job_id, realization, site), curve)
def setUp(self): self.vuln_function_1 = shapes.VulnerabilityFunction([ (0.01, (0.001, 0.00)), (0.04, (0.022, 0.00)), (0.07, (0.051, 0.00)), (0.10, (0.080, 0.00)), (0.12, (0.100, 0.00)), (0.22, (0.200, 0.00)), (0.37, (0.405, 0.00)), (0.52, (0.700, 0.00))]) self.gmfs = GMFs self.cum_histogram = numpy.array([112, 46, 26, 18, 14, 12, 8, 7, 7, 6, 5, 4, 4, 4, 4, 4, 2, 1, 1, 1, 1, 1, 1, 1]) self.vuln_function_2 = shapes.VulnerabilityFunction([ (0.00, (0.00, 0.00)), (0.04, (0.00, 0.00)), (0.08, (0.00, 0.00)), (0.12, (0.01, 0.00)), (0.16, (0.04, 0.00)), (0.20, (0.07, 0.00)), (0.24, (0.11, 0.00)), (0.28, (0.15, 0.00)), (0.32, (0.20, 0.00)), (0.36, (0.25, 0.00)), (0.40, (0.30, 0.00)), (0.44, (0.35, 0.00)), (0.48, (0.39, 0.00)), (0.53, (0.43, 0.00)), (0.57, (0.47, 0.00)), (0.61, (0.51, 0.00)), (0.65, (0.55, 0.00)), (0.69, (0.58, 0.00)), (0.73, (0.61, 0.00)), (0.77, (0.64, 0.00)), (0.81, (0.67, 0.00)), (0.85, (0.69, 0.00)), (0.89, (0.71, 0.00)), (0.93, (0.73, 0.00)), (0.97, (0.75, 0.00)), (1.01, (0.77, 0.00)), (1.05, (0.79, 0.00)), (1.09, (0.80, 0.00)), (1.13, (0.81, 0.00)), (1.17, (0.83, 0.00)), (1.21, (0.84, 0.00)), (1.25, (0.85, 0.00)), (1.29, (0.86, 0.00)), (1.33, (0.87, 0.00)), (1.37, (0.88, 0.00)), (1.41, (0.89, 0.00)), (1.45, (0.89, 0.00)), (1.49, (0.90, 0.00)), (1.54, (0.91, 0.00)), (1.58, (0.91, 0.00)), (1.62, (0.92, 0.00)), (1.66, (0.92, 0.00)), (1.70, (0.93, 0.00)), (1.74, (0.93, 0.00)), (1.78, (0.94, 0.00)), (1.82, (0.94, 0.00)), (1.86, (0.94, 0.00)), (1.90, (0.95, 0.00)), (1.94, (0.95, 0.00)), (1.98, (0.95, 0.00)), (2.02, (0.95, 0.00)), (2.06, (0.96, 0.00)), (2.10, (0.96, 0.00)), (2.14, (0.96, 0.00)), (2.18, (0.96, 0.00)), (2.22, (0.97, 0.00)), (2.26, (0.97, 0.00)), (2.30, (0.97, 0.00)), (2.34, (0.97, 0.00)), (2.38, (0.97, 0.00)), (2.42, (0.97, 0.00)), (2.46, (0.98, 0.00)), (2.51, (0.98, 0.00)), (2.55, (0.98, 0.00)), (2.59, (0.98, 0.00)), (2.63, (0.98, 0.00)), (2.67, (0.98, 0.00)), (2.71, (0.98, 0.00)), (2.75, (0.98, 0.00)), (2.79, (0.98, 0.00)), (2.83, (0.98, 0.00)), (2.87, (0.99, 0.00)), (2.91, (0.99, 0.00)), (2.95, (0.99, 0.00)), (2.99, (0.99, 0.00)), (3.03, (0.99, 0.00)), (3.07, (0.99, 0.00)), (3.11, (0.99, 0.00)), (3.15, (0.99, 0.00)), (3.19, (0.99, 0.00)), (3.23, (0.99, 0.00)), (3.27, (0.99, 0.00)), (3.31, (0.99, 0.00)), (3.35, (0.99, 0.00)), (3.39, (0.99, 0.00)), (3.43, (0.99, 0.00)), (3.47, (0.99, 0.00)), (3.52, (0.99, 0.00)), (3.56, (0.99, 0.00)), (3.60, (0.99, 0.00)), (3.64, (0.99, 0.00)), (3.68, (0.99, 0.00)), (3.72, (0.99, 0.00)), (3.76, (0.99, 0.00)), (3.80, (0.99, 0.00)), (3.84, (1.00, 0.00)), (3.88, (1.00, 0.00)), (3.92, (1.00, 0.00)), (3.96, (1.00, 0.00)), (4.00, (1.00, 0.00))]) self.job_id = 1234 self.gmfs_1 = {"IMLs": (0.1439, 0.1821, 0.5343, 0.171, 0.2177, 0.6039, 0.0618, 0.186, 0.5512, 1.2602, 0.2824, 0.2693, 0.1705, 0.8453, 0.6355, 0.0721, 0.2475, 0.1601, 0.3544, 0.1756), "TSES": 200, "TimeSpan": 50} self.asset_1 = {"vulnerabilityFunctionReference": "ID", "assetValue": 22.61} self.gmfs_2 = {"IMLs": (0.1507, 0.2656, 0.5422, 0.3685, 0.3172, 0.6604, 0.1182, 0.1545, 0.7613, 0.5246, 0.2428, 0.2882, 0.2179, 1.2939, 0.6042, 0.1418, 0.3637, 0.222, 0.3613, 0.113), "TSES": 200, "TimeSpan": 50} self.asset_2 = {"vulnerabilityFunctionReference": "ID", "assetValue": 124.27} self.gmfs_3 = {"IMLs": (0.156, 0.3158, 0.3968, 0.2827, 0.1915, 0.5862, 0.1438, 0.2114, 0.5101, 1.0097, 0.226, 0.3443, 0.1693, 1.0754, 0.3533, 0.1461, 0.347, 0.2665, 0.2977, 0.2925), "TSES": 200, "TimeSpan": 50} self.asset_3 = {"vulnerabilityFunctionReference": "ID", "assetValue": 42.93} self.gmfs_4 = {"IMLs": (0.1311, 0.3566, 0.4895, 0.3647, 0.2313, 0.9297, 0.2337, 0.2862, 0.5278, 0.6603, 0.3537, 0.2997, 0.1097, 1.1875, 0.4752, 0.1575, 0.4009, 0.2519, 0.2653, 0.1394), "TSES": 200, "TimeSpan": 50} self.asset_4 = {"vulnerabilityFunctionReference": "ID", "assetValue": 29.37} self.gmfs_5 = {"IMLs": (0.0879, 0.2895, 0.465, 0.2463, 0.1862, 0.763, 0.2189, 0.3324, 0.3215, 0.6406, 0.5014, 0.3877, 0.1318, 1.0545, 0.3035, 0.1118, 0.2981, 0.3492, 0.2406, 0.1043), "TSES": 200, "TimeSpan": 50} self.asset_5 = {"vulnerabilityFunctionReference": "ID", "assetValue": 40.68} self.gmfs_6 = {"IMLs": (0.0872, 0.2288, 0.5655, 0.2118, 0.2, 0.6633, 0.2095, 0.6537, 0.3838, 0.781, 0.3054, 0.5375, 0.1361, 0.8838, 0.3726, 0.0845, 0.1942, 0.4629, 0.1354, 0.1109), "TSES": 200, "TimeSpan": 50} self.asset_6 = {"vulnerabilityFunctionReference": "ID", "assetValue": 178.47} # deleting keys in kvs kvs.get_client(binary=False).flushall() kvs.set_value_json_encoded( kvs.tokens.vuln_key(self.job_id), {"ID": self.vuln_function_2.to_json()}) # store the gmfs self._store_gmfs(self.gmfs_1, 1, 1) self._store_gmfs(self.gmfs_2, 1, 2) self._store_gmfs(self.gmfs_3, 1, 3) self._store_gmfs(self.gmfs_4, 1, 4) self._store_gmfs(self.gmfs_5, 1, 5) self._store_gmfs(self.gmfs_6, 1, 6) # store the assets self._store_asset(self.asset_1, 1, 1) self._store_asset(self.asset_2, 1, 2) self._store_asset(self.asset_3, 1, 3) self._store_asset(self.asset_4, 1, 4) self._store_asset(self.asset_5, 1, 5) self._store_asset(self.asset_6, 1, 6) self.params = {} self.params["OUTPUT_DIR"] = test.OUTPUT_DIR self.params["AGGREGATE_LOSS_CURVE"] = 1 self.params["BASE_PATH"] = "." self.params["INVESTIGATION_TIME"] = 50.0 self.job = job.Job(self.params, self.job_id, base_path=".") self.job.to_kvs() # deleting old file self._delete_test_file()
def _store_hazard_curve_at(self, site, curve, realization=1): kvs.set_value_json_encoded( kvs.tokens.hazard_curve_key(self.job_id, realization, site.longitude, site.latitude), curve)