def execute(self): """Loop over realizations (logic tree samples), split the geometry of interest into blocks of sites, and distribute Celery tasks to carry out the UHS computation. """ job_ctxt = self.job_ctxt all_sites = job_ctxt.sites_to_compute() site_block_size = config.hazard_block_size() job_profile = job_ctxt.oq_job_profile src_model_rnd = random.Random(job_profile.source_model_lt_random_seed) gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed) for rlz in xrange(job_ctxt.oq_job_profile.realizations): # Sample the gmpe and source models: general.store_source_model( job_ctxt.job_id, src_model_rnd.getrandbits(32), job_ctxt.params, self.lt_processor) general.store_gmpe_map( job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor) for site_block in block_splitter(all_sites, site_block_size): tf_args = dict(job_id=job_ctxt.job_id, realization=rlz) num_tasks_completed = completed_task_count(job_ctxt.job_id) ath_args = dict(job_id=job_ctxt.job_id, num_tasks=len(site_block), start_count=num_tasks_completed) utils_tasks.distribute( compute_uhs_task, ('site', site_block), tf_args=tf_args, ath=uhs_task_handler, ath_args=ath_args)
def execute(self, kvs_keys_purged=None): # pylint: disable=W0221 """ Trigger the calculation and serialization of hazard curves, mean hazard curves/maps and quantile curves. :param kvs_keys_purged: a list only passed by tests who check the kvs keys used/purged in the course of the job. :returns: the keys used in the course of the calculation (for the sake of testability). """ sites = self.job_ctxt.sites_to_compute() realizations = self.job_ctxt["NUMBER_OF_LOGIC_TREE_SAMPLES"] LOG.info("Going to run classical PSHA hazard for %s realizations " "and %s sites" % (realizations, len(sites))) stats.pk_set(self.job_ctxt.job_id, "hcls_sites", len(sites)) stats.pk_set(self.job_ctxt.job_id, "hcls_realizations", realizations) block_size = config.hazard_block_size() stats.pk_set(self.job_ctxt.job_id, "block_size", block_size) blocks = range(0, len(sites), block_size) stats.pk_set(self.job_ctxt.job_id, "blocks", len(blocks)) stats.pk_set(self.job_ctxt.job_id, "cblock", 0) for start in blocks: stats.pk_inc(self.job_ctxt.job_id, "cblock") end = start + block_size data = sites[start:end] LOG.debug("> curves!") self.do_curves( data, realizations, serializer=self.serialize_hazard_curve_of_realization) LOG.debug("> means!") # mean curves self.do_means(data, realizations, curve_serializer=self.serialize_mean_hazard_curves, map_func=general.compute_mean_hazard_maps, map_serializer=self.serialize_mean_hazard_map) LOG.debug("> quantiles!") # quantile curves quantiles = self.quantile_levels self.do_quantiles( data, realizations, quantiles, curve_serializer=self.serialize_quantile_hazard_curves, map_func=general.compute_quantile_hazard_maps, map_serializer=self.serialize_quantile_hazard_map) # Done with this block, purge intermediate results from kvs. release_data_from_kvs(self.job_ctxt.job_id, data, realizations, quantiles, self.poes_hazard_maps, kvs_keys_purged)
def test_not_configured_default_overriden(self): """ The hazard block size was not set in openquake.cfg, the default is specified by the caller is returned. """ with patch("openquake.utils.config.get") as mget: mget.return_value = None self.assertEqual(333, config.hazard_block_size(333))
def test_not_configured(self): """ The hazard block size was not set in openquake.cfg, the default is returned. """ with patch("openquake.utils.config.get") as mget: mget.return_value = None self.assertEqual(8192, config.hazard_block_size())
def execute(self, kvs_keys_purged=None): # pylint: disable=W0221 """ Trigger the calculation and serialization of hazard curves, mean hazard curves/maps and quantile curves. :param kvs_keys_purged: a list only passed by tests who check the kvs keys used/purged in the course of the job. :returns: the keys used in the course of the calculation (for the sake of testability). """ sites = self.job_ctxt.sites_to_compute() realizations = self.job_ctxt["NUMBER_OF_LOGIC_TREE_SAMPLES"] LOG.info("Going to run classical PSHA hazard for %s realizations " "and %s sites" % (realizations, len(sites))) stats.pk_set(self.job_ctxt.job_id, "hcls_sites", len(sites)) stats.pk_set(self.job_ctxt.job_id, "hcls_realizations", realizations) block_size = config.hazard_block_size() stats.pk_set(self.job_ctxt.job_id, "block_size", block_size) blocks = range(0, len(sites), block_size) stats.pk_set(self.job_ctxt.job_id, "blocks", len(blocks)) stats.pk_set(self.job_ctxt.job_id, "cblock", 0) for start in blocks: stats.pk_inc(self.job_ctxt.job_id, "cblock") end = start + block_size data = sites[start:end] LOG.debug("> curves!") self.do_curves( data, realizations, serializer=self.serialize_hazard_curve_of_realization) LOG.debug("> means!") # mean curves self.do_means( data, realizations, curve_serializer=self.serialize_mean_hazard_curves, map_func=general.compute_mean_hazard_maps, map_serializer=self.serialize_mean_hazard_map) LOG.debug("> quantiles!") # quantile curves quantiles = self.quantile_levels self.do_quantiles( data, realizations, quantiles, curve_serializer=self.serialize_quantile_hazard_curves, map_func=general.compute_quantile_hazard_maps, map_serializer=self.serialize_quantile_hazard_map) # Done with this block, purge intermediate results from kvs. release_data_from_kvs(self.job_ctxt.job_id, data, realizations, quantiles, self.poes_hazard_maps, kvs_keys_purged)
def test_configured(self): """The hazard block size *was* configured in openquake.cfg""" content = ''' [hazard] block_size=33''' local_path = self.touch(content=textwrap.dedent(content)) os.environ["OQ_LOCAL_CFG_PATH"] = local_path config.Config().cfg.clear() config.Config()._load_from_file() self.assertEqual(33, config.hazard_block_size())
def execute(self, kvs_keys_purged=None): """ Trigger the calculation and serialization of hazard curves, mean hazard curves/maps and quantile curves. :param kvs_keys_purged: a list only passed by tests who check the kvs keys used/purged in the course of the calculation. :returns: the keys used in the course of the calculation (for the sake of testability). """ sites = self.sites_to_compute() realizations = self["NUMBER_OF_LOGIC_TREE_SAMPLES"] LOG.info("Going to run classical PSHA hazard for %s realizations " "and %s sites" % (realizations, len(sites))) stats.set_total(self.job_id, "classical:execute:sites", len(sites)) stats.set_total(self.job_id, "classical:execute:realizations", realizations) block_size = config.hazard_block_size() for start in xrange(0, len(sites), block_size): end = start + block_size data = sites[start:end] self.do_curves(data, realizations, serializer=self.serialize_hazard_curve_of_realization) # mean curves self.do_means( data, realizations, curve_serializer=self.serialize_mean_hazard_curves, map_func=classical_psha.compute_mean_hazard_maps, map_serializer=self.serialize_mean_hazard_map, ) # quantile curves quantiles = self.quantile_levels self.do_quantiles( data, realizations, quantiles, curve_serializer=self.serialize_quantile_hazard_curves, map_func=classical_psha.compute_quantile_hazard_maps, map_serializer=self.serialize_quantile_hazard_map, ) # Done with this chunk, purge intermediate results from kvs. release_data_from_kvs(self.job_id, data, realizations, quantiles, self.poes_hazard_maps, kvs_keys_purged)
def execute(self): """Loop over realizations (logic tree samples), split the geometry of interest into blocks of sites, and distribute Celery tasks to carry out the UHS computation. """ job_ctxt = self.job_ctxt all_sites = job_ctxt.sites_to_compute() site_block_size = config.hazard_block_size() job_profile = job_ctxt.oq_job_profile self.initialize_pr_data( sites=all_sites, realizations=job_ctxt.oq_job_profile.realizations) src_model_rnd = random.Random(job_profile.source_model_lt_random_seed) gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed) for rlz in xrange(job_ctxt.oq_job_profile.realizations): # Sample the gmpe and source models: general.store_source_model( job_ctxt.job_id, src_model_rnd.getrandbits(32), job_ctxt.params, self.lt_processor) general.store_gmpe_map( job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor) for site_block in block_splitter(all_sites, site_block_size): tf_args = dict(job_id=job_ctxt.job_id, realization=rlz) num_tasks_completed = completed_task_count(job_ctxt.job_id) ath_args = dict(job_id=job_ctxt.job_id, num_tasks=len(site_block), start_count=num_tasks_completed) utils_tasks.distribute( compute_uhs_task, ('site', site_block), tf_args=tf_args, ath=uhs_task_handler, ath_args=ath_args)
def test_configured(self): """The hazard block size *was* configured in openquake.cfg""" with patch("openquake.utils.config.get") as mget: mget.return_value = "33" self.assertEqual(33, config.hazard_block_size())
def test_not_configured_default_overriden(self): """ The hazard block size was not set in openquake.cfg, the default is specified by the caller is returned. """ self.assertEqual(333, config.hazard_block_size(333))
def test_not_configured(self): """ The hazard block size was not set in openquake.cfg, the default is returned. """ self.assertEqual(8192, config.hazard_block_size())