Пример #1
0
def touch_result_file(job_id, path, sites, n_samples, n_periods):
    """Given a path (including the file name), create an empty HDF5 result file
    containing 1 empty data set for each site. Each dataset will be a matrix
    with the number of rows = number of samples and number of cols = number of
    UHS periods.

    :param int job_id:
        ID of the job record in the DB/KVS.
    :param str path:
        Location (including a file name) on an NFS where the empty
        result file should be created.
    :param sites:
        List of :class:`openquake.shapes.Site` objects.
    :param int n_samples:
        Number of logic tree samples (the y-dimension of each dataset).
    :param int n_periods:
        Number of UHS periods (the x-dimension of each dataset).
    """
    utils_tasks.check_job_status(job_id)
    # TODO: Generate the sites, instead of pumping them through rabbit?
    with h5py.File(path, 'w') as h5_file:
        for site in sites:
            ds_name = 'lon:%s-lat:%s' % (site.longitude, site.latitude)
            ds_shape = (n_samples, n_periods)
            h5_file.create_dataset(ds_name, dtype=numpy.float64,
                                   shape=ds_shape)
Пример #2
0
def compute_disagg_matrix_task(job_id, site, realization, poe, result_dir):
    """ Compute a complete 5D Disaggregation matrix. This task leans heavily
    on the DisaggregationCalculator (in the OpenQuake Java lib) to handle this
    computation.

    :param job_id: id of the job record in the KVS
    :type job_id: `str`
    :param site: a single site of interest
    :type site: :class:`openquake.shapes.Site` instance`
    :param int realization: logic tree sample iteration number
    :param poe: Probability of Exceedence
    :type poe: `float`
    :param result_dir: location for the Java code to write the matrix in an
        HDF5 file (in a distributed environment, this should be the path of a
        mounted NFS)

    :returns: 2-tuple of (ground_motion_value, path_to_h5_matrix_file)
    """
    # check and see if the job is still valid (i.e., not complete or failed)
    check_job_status(job_id)

    log_msg = (
        "Computing full disaggregation matrix for job_id=%s, site=%s, "
        "realization=%s, PoE=%s. Matrix results will be serialized to `%s`.")
    log_msg %= (job_id, site, realization, poe, result_dir)
    LOG.info(log_msg)

    return compute_disagg_matrix(job_id, site, poe, result_dir)
Пример #3
0
def compute_hazard_curve(job_id, sites, realization):
    """ Generate hazard curve for a given site list. """
    check_job_status(job_id)
    hazengine = job.Job.from_kvs(job_id)
    with mixins.Mixin(hazengine, hazjob.HazJobMixin):
        keys = hazengine.compute_hazard_curve(sites, realization)
        return keys
Пример #4
0
def compute_ground_motion_fields(job_id, sites, history, realization, seed):
    """ Generate ground motion fields """
    # TODO(JMC): Use a block_id instead of a sites list
    check_job_status(job_id)
    hazengine = job.Job.from_kvs(job_id)
    with mixins.Mixin(hazengine, hazjob.HazJobMixin):
        hazengine.compute_ground_motion_fields(sites, history, realization,
                                               seed)
Пример #5
0
def compute_quantile_curves(job_id, sites, realizations, quantiles):
    """Compute the quantile hazard curve for each site given."""

    check_job_status(job_id)
    HAZARD_LOG.info("Computing QUANTILE curves for %s sites (job_id %s)"
            % (len(sites), job_id))

    return classical_psha.compute_quantile_hazard_curves(job_id, sites,
        realizations, quantiles)
Пример #6
0
def compute_mean_curves(job_id, sites, realizations):
    """Compute the mean hazard curve for each site given."""

    check_job_status(job_id)
    HAZARD_LOG.info("Computing MEAN curves for %s sites (job_id %s)"
            % (len(sites), job_id))

    return classical_psha.compute_mean_hazard_curves(job_id, sites,
        realizations)
Пример #7
0
 def test_not_completed_with_true(self):
     with patch('openquake.job.Job.is_job_completed') as mock:
         mock.return_value = True
         try:
             tasks.check_job_status(31)
         except tasks.JobCompletedError as exc:
             self.assertEqual(exc.message, 31)
         else:
             self.fail("JobCompletedError wasn't raised")
         self.assertEqual(mock.call_args_list, [((31, ), {})])
Пример #8
0
def compute_hazard_curve(job_id, site_list, realization, callback=None):
    """ Generate hazard curve for a given site list. """
    check_job_status(job_id)
    hazengine = job.Job.from_kvs(job_id)
    with mixins.Mixin(hazengine, hazjob.HazJobMixin):
        keys = hazengine.compute_hazard_curve(site_list, realization)

        if callback:
            subtask(callback).delay(job_id, site_list)

        return keys
Пример #9
0
def generate_erf(job_id):
    """
    Stubbed ERF generator

    Takes a job_id, returns a job_id.

    Connects to the Java HazardEngine using hazardwrapper, waits for an ERF to
    be generated, and then writes it to KVS.
    """

    # TODO(JM): implement real ERF computation

    check_job_status(job_id)
    kvs.get_client().set(kvs.tokens.erf_key(job_id),
                         json.JSONEncoder().encode([job_id]))

    return job_id
Пример #10
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """
    Compute mean ground intensity for a specific site.
    """

    check_job_status(job_id)
    kvs_client = kvs.get_client()

    mgm_key = kvs.tokens.mgm_key(job_id, block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    if not mgm:
        # TODO(jm): implement hazardwrapper and make this work.
        # TODO(chris): uncomment below when hazardwapper is done

        # Synchronous execution.
        #result = hazardwrapper.apply(args=[job_id, block_id, site_id])
        #mgm = kvs_client.get(mgm_key)
        pass

    return json.JSONDecoder().decode(mgm)
Пример #11
0
def compute_uhs_task(job_id, realization, site, result_dir):
    """Compute Uniform Hazard Spectra for a given site of interest and 1 or
    more Probability of Exceedance values. The bulk of the computation will
    be done by utilizing the `UHSCalculator` class in the Java code.

    UHS results (for each poe) will be written as a 1D array into temporary
    HDF5 files. (The files will later be collected and 'reduced' into final
    result files.)

    :param int job_id:
        ID of the job record in the DB/KVS.
    :param realization:
        Logic tree sample number (from 1 to N, where N is the
        NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config.
    :param site:
        The site of interest (a :class:`openquake.shapes.Site` object).
    :param result_dir:
        NFS result directory path. For each poe, a subfolder will be created to
        contain intermediate calculation results. (Each call to this task will
        generate 1 result file per poe.)
    :returns:
        A list of the resulting file names (1 per poe).
    """
    utils_tasks.check_job_status(job_id)

    the_job = Job.from_kvs(job_id)

    log_msg = (
        "Computing UHS for job_id=%s, site=%s, realization=%s."
        " UHS results will be serialized to `%s`.")
    log_msg %= (the_job.job_id, site, realization, result_dir)
    LOG.info(log_msg)

    uhs_results = compute_uhs(the_job, site)

    return write_uhs_results(result_dir, realization, site, uhs_results)
Пример #12
0
def compute_risk(job_id, block_id, **kwargs):
    """ A task for computing risk, calls the mixed in compute_risk method """
    check_job_status(job_id)
    engine = job.Job.from_kvs(job_id)
    with mixins.Mixin(engine, RiskJobMixin) as mixed:
        return mixed.compute_risk(block_id, **kwargs)
Пример #13
0
 def test_not_completed(self):
     with patch('openquake.job.Job.is_job_completed') as mock:
         mock.return_value = False
         tasks.check_job_status(42)
         self.assertEqual(mock.call_args_list, [((42, ), {})])