Beispiel #1
0
    def execute(self):
        """Loop over realizations (logic tree samples), split the geometry of
        interest into blocks of sites, and distribute Celery tasks to carry out
        the UHS computation.
        """
        job_ctxt = self.job_ctxt
        all_sites = job_ctxt.sites_to_compute()
        site_block_size = config.hazard_block_size()
        job_profile = job_ctxt.oq_job_profile

        src_model_rnd = random.Random(job_profile.source_model_lt_random_seed)
        gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed)

        for rlz in xrange(job_ctxt.oq_job_profile.realizations):

            # Sample the gmpe and source models:
            general.store_source_model(
                job_ctxt.job_id, src_model_rnd.getrandbits(32),
                job_ctxt.params, self.lt_processor)
            general.store_gmpe_map(
                job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor)

            for site_block in block_splitter(all_sites, site_block_size):

                tf_args = dict(job_id=job_ctxt.job_id, realization=rlz)

                num_tasks_completed = completed_task_count(job_ctxt.job_id)

                ath_args = dict(job_id=job_ctxt.job_id,
                                num_tasks=len(site_block),
                                start_count=num_tasks_completed)

                utils_tasks.distribute(
                    compute_uhs_task, ('site', site_block), tf_args=tf_args,
                    ath=uhs_task_handler, ath_args=ath_args)
 def test_block_splitter(self):
     expected = [
         [0, 1, 2],
         [3, 4, 5],
         [6, 7, 8],
         [9],
     ]
     actual = [x for x in block_splitter(self.DATA, 3)]
     self.assertEqual(expected, actual)
Beispiel #3
0
 def test_block_splitter(self):
     expected = [
         [0, 1, 2],
         [3, 4, 5],
         [6, 7, 8],
         [9],
     ]
     actual = [x for x in block_splitter(self.DATA, 3)]
     self.assertEqual(expected, actual)
Beispiel #4
0
 def test_block_splitter_with_iter(self):
     # Test the block with a data set of unknown length
     data = iter(range(10))
     expected = [
         [0, 1, 2],
         [3, 4, 5],
         [6, 7, 8],
         [9],
     ]
     actual = [x for x in block_splitter(data, 3)]
     self.assertEqual(expected, actual)
Beispiel #5
0
 def test_block_splitter_with_generator(self):
     # Test the block with a data set of unknown length
     # (such as a generator)
     data = xrange(10)
     expected = [
         [0, 1, 2],
         [3, 4, 5],
         [6, 7, 8],
         [9],
     ]
     actual = [x for x in block_splitter(data, 3)]
     self.assertEqual(expected, actual)
Beispiel #6
0
    def execute(self):
        """Loop over realizations (logic tree samples), split the geometry of
        interest into blocks of sites, and distribute Celery tasks to carry out
        the UHS computation.
        """
        job_ctxt = self.job_ctxt
        all_sites = job_ctxt.sites_to_compute()
        site_block_size = config.hazard_block_size()
        job_profile = job_ctxt.oq_job_profile

        self.initialize_pr_data(
            sites=all_sites, realizations=job_ctxt.oq_job_profile.realizations)

        src_model_rnd = random.Random(job_profile.source_model_lt_random_seed)
        gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed)

        for rlz in xrange(job_ctxt.oq_job_profile.realizations):

            # Sample the gmpe and source models:
            general.store_source_model(
                job_ctxt.job_id, src_model_rnd.getrandbits(32),
                job_ctxt.params, self.lt_processor)
            general.store_gmpe_map(
                job_ctxt.job_id, gmpe_rnd.getrandbits(32), self.lt_processor)

            for site_block in block_splitter(all_sites, site_block_size):

                tf_args = dict(job_id=job_ctxt.job_id, realization=rlz)

                num_tasks_completed = completed_task_count(job_ctxt.job_id)

                ath_args = dict(job_id=job_ctxt.job_id,
                                num_tasks=len(site_block),
                                start_count=num_tasks_completed)

                utils_tasks.distribute(
                    compute_uhs_task, ('site', site_block), tf_args=tf_args,
                    ath=uhs_task_handler, ath_args=ath_args)
Beispiel #7
0
def do_hazard_map_post_process(job):
    """
    Create and distribute tasks for processing hazard curves into hazard maps.

    :param job:
        A :class:`openquake.db.models.OqJob` which has some hazard curves
        associated with it.
    """
    logs.LOG.debug('> Post-processing - Hazard Maps')
    block_size = int(config.get('hazard', 'concurrent_tasks'))

    poes = job.hazard_calculation.poes_hazard_maps

    # Stats for debug logging:
    hazard_curve_ids = models.HazardCurve.objects.filter(
        output__oq_job=job).values_list('id', flat=True)
    logs.LOG.debug('num haz curves: %s' % len(hazard_curve_ids))

    # Limit the number of concurrent tasks to the configured concurrency level:
    block_gen = block_splitter(hazard_curve_ids, block_size)
    total_blocks = int(math.ceil(len(hazard_curve_ids) / float(block_size)))

    for i, block in enumerate(block_gen):
        logs.LOG.debug('> Hazard post-processing block, %s of %s'
                       % (i + 1, total_blocks))

        tasks = []
        for hazard_curve_id in block:
            tasks.append(hazard_curves_to_hazard_map_task.subtask(
                (job.id, hazard_curve_id, poes)))
        results = TaskSet(tasks=tasks).apply_async()

        utils_tasks._check_exception(results)

        logs.LOG.debug('< Done Hazard Map post-processing block, %s of %s'
                       % (i + 1, total_blocks))
    logs.LOG.debug('< Done post-processing - Hazard Maps')
 def test_block_splitter_block_size_lt_zero(self):
     gen = block_splitter(self.DATA, -1)
     self.assertRaises(ValueError, gen.next)
 def test_block_splitter_zero_block_size(self):
     gen = block_splitter(self.DATA, 0)
     self.assertRaises(ValueError, gen.next)
 def test_block_splitter_block_size_gt_data_len(self):
     expected = [self.DATA]
     actual = [x for x in block_splitter(self.DATA, 11)]
     self.assertEqual(expected, actual)
Beispiel #11
0
 def test_block_splitter_block_size_lt_zero(self):
     gen = block_splitter(self.DATA, -1)
     self.assertRaises(ValueError, gen.next)
Beispiel #12
0
 def test_block_splitter_zero_block_size(self):
     gen = block_splitter(self.DATA, 0)
     self.assertRaises(ValueError, gen.next)
Beispiel #13
0
 def test_block_splitter_block_size_gt_data_len(self):
     expected = [self.DATA]
     actual = [x for x in block_splitter(self.DATA, 11)]
     self.assertEqual(expected, actual)