示例#1
0
def do_hazard_map_post_process(job):
    """
    Create and distribute tasks for processing hazard curves into hazard maps.

    :param job:
        A :class:`openquake.engine.db.models.OqJob` which has some hazard
        curves associated with it.
    """
    logs.LOG.debug('> Post-processing - Hazard Maps')
    block_size = int(config.get('hazard', 'concurrent_tasks'))

    poes = job.hazard_calculation.poes_hazard_maps

    # Stats for debug logging:
    hazard_curve_ids = models.HazardCurve.objects.filter(
        output__oq_job=job).values_list('id', flat=True)
    logs.LOG.debug('num haz curves: %s' % len(hazard_curve_ids))

    # Limit the number of concurrent tasks to the configured concurrency level:
    block_gen = block_splitter(hazard_curve_ids, block_size)
    total_blocks = int(math.ceil(len(hazard_curve_ids) / float(block_size)))

    for i, block in enumerate(block_gen):
        logs.LOG.debug('> Hazard post-processing block, %s of %s'
                       % (i + 1, total_blocks))

        if openquake.engine.no_distribute():
            # just execute the post-processing using the plain function form of
            # the task
            for hazard_curve_id in block:
                hazard_curves_to_hazard_map_task(job.id, hazard_curve_id, poes)
        else:
            tasks = []
            for hazard_curve_id in block:
                tasks.append(hazard_curves_to_hazard_map_task.subtask(
                    (job.id, hazard_curve_id, poes)))
            results = TaskSet(tasks=tasks).apply_async()

            utils_tasks._check_exception(results)

        logs.LOG.debug('< Done Hazard Map post-processing block, %s of %s'
                       % (i + 1, total_blocks))
    logs.LOG.debug('< Done post-processing - Hazard Maps')
示例#2
0
def do_post_process(job):
    """
    Run the GMF to hazard curve post-processing tasks for the given ``job``.

    :param job:
        A :class:`openquake.engine.db.models.OqJob` instance.
    """
    logs.LOG.debug('> Post-processing - GMFs to Hazard Curves')
    block_size = int(config.get('hazard', 'concurrent_tasks'))
    block_gen = block_splitter(gmf_post_process_arg_gen(job), block_size)

    hc = job.hazard_calculation

    # Stats for debug logging:
    n_imts = len(hc.intensity_measure_types_and_levels)
    n_sites = len(hc.points_to_compute())
    n_rlzs = models.LtRealization.objects.filter(hazard_calculation=hc).count()
    total_blocks = int(math.ceil(
        (n_imts * n_sites * n_rlzs) / float(block_size)))

    for i, block in enumerate(block_gen):
        logs.LOG.debug('> GMF post-processing block, %s of %s'
                       % (i + 1, total_blocks))

        # Run the tasks in blocks, to avoid overqueueing:
        tasks = []
        for the_args in block:
            tasks.append(gmf_to_hazard_curve_task.subtask(the_args))
        results = TaskSet(tasks=tasks).apply_async()

        # Check for Exceptions in the results and raise
        utils_tasks._check_exception(results)

        logs.LOG.debug('< Done GMF post-processing block, %s of %s'
                       % (i + 1, total_blocks))
    logs.LOG.debug('< Done post-processing - GMFs to Hazard Curves')