def execute(self): """Loop over realizations (logic tree samples), split the geometry of interest into blocks of sites, and distribute Celery tasks to carry out the UHS computation. """ job_ctxt = self.job_ctxt all_sites = job_ctxt.sites_to_compute() site_block_size = config.hazard_block_size() job_profile = job_ctxt.oq_job_profile src_model_rnd = random.Random(job_profile.source_model_lt_random_seed) gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed) for rlz in xrange(job_ctxt.oq_job_profile.realizations): # Sample the gmpe and source models: general.store_source_model( job_ctxt.job_id, src_model_rnd.getrandbits(32), job_ctxt.params, self.lt_processor) general.store_gmpe_map( job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor) for site_block in block_splitter(all_sites, site_block_size): tf_args = dict(job_id=job_ctxt.job_id, realization=rlz) num_tasks_completed = completed_task_count(job_ctxt.job_id) ath_args = dict(job_id=job_ctxt.job_id, num_tasks=len(site_block), start_count=num_tasks_completed) utils_tasks.distribute( compute_uhs_task, ('site', site_block), tf_args=tf_args, ath=uhs_task_handler, ath_args=ath_args)
def execute(self): """Loop over realizations (logic tree samples), split the geometry of interest into blocks of sites, and distribute Celery tasks to carry out the UHS computation. """ job_ctxt = self.job_ctxt all_sites = job_ctxt.sites_to_compute() site_block_size = config.hazard_block_size() job_profile = job_ctxt.oq_job_profile self.initialize_pr_data( sites=all_sites, realizations=job_ctxt.oq_job_profile.realizations) src_model_rnd = random.Random(job_profile.source_model_lt_random_seed) gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed) for rlz in xrange(job_ctxt.oq_job_profile.realizations): # Sample the gmpe and source models: general.store_source_model( job_ctxt.job_id, src_model_rnd.getrandbits(32), job_ctxt.params, self.lt_processor) general.store_gmpe_map( job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor) for site_block in block_splitter(all_sites, site_block_size): tf_args = dict(job_id=job_ctxt.job_id, realization=rlz) num_tasks_completed = completed_task_count(job_ctxt.job_id) ath_args = dict(job_id=job_ctxt.job_id, num_tasks=len(site_block), start_count=num_tasks_completed) utils_tasks.distribute( compute_uhs_task, ('site', site_block), tf_args=tf_args, ath=uhs_task_handler, ath_args=ath_args)
def test_complete_task_count_success_and_fail(self): # Test `complete_task_count` with success and fail counters: stats.pk_inc(self.job_id, "nhzrd_done") stats.pk_inc(self.job_id, "nhzrd_failed") self.assertEqual(2, completed_task_count(self.job_id))
def test_complete_task_count_failures(self): stats.pk_inc(self.job_id, "nhzrd_failed") self.assertEqual(1, completed_task_count(self.job_id))
def test_complete_task_count_success(self): stats.pk_inc(self.job_id, "nhzrd_done") self.assertEqual(1, completed_task_count(self.job_id))
def test_completed_task_count_no_stats(self): # Test `complete_task_count` with no counters set; # it should just return 0. self.assertEqual(0, completed_task_count(self.job_id))
def test_complete_task_count_success_and_fail(self): # Test `complete_task_count` with success and fail counters: stats.incr_counter(self.job_id, 'h', 'compute_uhs_task') stats.incr_counter(self.job_id, 'h', 'compute_uhs_task-failures') self.assertEqual(2, completed_task_count(self.job_id))
def test_complete_task_count_failures(self): stats.incr_counter(self.job_id, 'h', 'compute_uhs_task-failures') self.assertEqual(1, completed_task_count(self.job_id))
def test_complete_task_count_success(self): stats.incr_counter(self.job_id, 'h', 'compute_uhs_task') self.assertEqual(1, completed_task_count(self.job_id))