Example #1
0
def touch_result_file(job_id, path, sites, realizations, n_periods):
    """Given a path (including the file name), create an empty HDF5 result file
    containing 1 empty data set for each site. Each dataset will be a matrix
    with the number of rows = number of samples and number of cols = number of
    UHS periods.

    :param int job_id:
        ID of the job record in the DB/KVS.
    :param str path:
        Location (including a file name) on an NFS where the empty
        result file should be created.
    :param sites:
        List of :class:`openquake.shapes.Site` objects.
    :param int realizations:
        Number of logic tree samples (the y-dimension of each dataset).
    :param int n_periods:
        Number of UHS periods (the x-dimension of each dataset).
    """
    utils_tasks.get_running_calculation(job_id)
    # TODO: Generate the sites, instead of pumping them through rabbit?
    with h5py.File(path, 'w') as h5_file:
        for site in sites:
            ds_name = 'lon:%s-lat:%s' % (site.longitude, site.latitude)
            ds_shape = (realizations, n_periods)
            h5_file.create_dataset(ds_name, dtype=numpy.float64,
                                   shape=ds_shape)
Example #2
0
    def test_completed_failure(self):
        self.calculation.status = 'failed'
        self.calculation.save()

        try:
            tasks.get_running_calculation(self.calculation.id)
        except tasks.JobCompletedError as exc:
            self.assertEqual(exc.message, self.calculation.id)
        else:
            self.fail("JobCompletedError wasn't raised")
Example #3
0
def compute_mean_curves(job_id, sites, realizations):
    """Compute the mean hazard curve for each site given."""

    # We don't actually need the CalculationProxy returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_calculation(job_id)

    HAZARD_LOG.info("Computing MEAN curves for %s sites (job_id %s)"
                    % (len(sites), job_id))

    return general.compute_mean_hazard_curves(job_id, sites, realizations)
Example #4
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """Compute mean ground intensity for a specific site."""

    # We don't actually need the CalculationProxy returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_calculation(job_id)
    kvs_client = kvs.get_client()

    mgm_key = kvs.tokens.mgm_key(job_id, block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    return json.JSONDecoder().decode(mgm)
Example #5
0
def compute_disagg_matrix_task(calculation_id, site, realization, poe,
                               result_dir):
    """ Compute a complete 5D Disaggregation matrix. This task leans heavily
    on the DisaggregationCalculator (in the OpenQuake Java lib) to handle this
    computation.

    :param calculation_id: id of the calculation record in the KVS
    :type calculation_id: `str`
    :param site: a single site of interest
    :type site: :class:`openquake.shapes.Site` instance`
    :param int realization: logic tree sample iteration number
    :param poe: Probability of Exceedence
    :type poe: `float`
    :param result_dir: location for the Java code to write the matrix in an
        HDF5 file (in a distributed environment, this should be the path of a
        mounted NFS)

    :returns: 2-tuple of (ground_motion_value, path_to_h5_matrix_file)
    """
    calc_proxy = get_running_calculation(calculation_id)

    log_msg = (
        "Computing full disaggregation matrix for job_id=%s, site=%s, "
        "realization=%s, PoE=%s. Matrix results will be serialized to `%s`.")
    log_msg %= (calc_proxy.job_id, site, realization, poe, result_dir)
    LOG.info(log_msg)

    return compute_disagg_matrix(calc_proxy, site, poe, result_dir)
Example #6
0
def compute_uhs_task(job_id, realization, site):
    """Compute Uniform Hazard Spectra for a given site of interest and 1 or
    more Probability of Exceedance values. The bulk of the computation will
    be done by utilizing the `UHSCalculator` class in the Java code.

    UHS results will be written directly to the database.

    :param int job_id:
        ID of the job record in the DB/KVS.
    :param realization:
        Logic tree sample number (from 1 to N, where N is the
        NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config.
    :param site:
        The site of interest (a :class:`openquake.shapes.Site` object).
    """
    calc_proxy = utils_tasks.get_running_calculation(job_id)

    log_msg = (
        "Computing UHS for job_id=%s, site=%s, realization=%s."
        " UHS results will be serialized to the database.")
    log_msg %= (calc_proxy.job_id, site, realization)
    LOG.info(log_msg)

    uhs_results = compute_uhs(calc_proxy, site)

    write_uhs_spectrum_data(calc_proxy, realization, site, uhs_results)
Example #7
0
    def test_get_running_calculation(self):
        self.calculation.status = 'pending'
        self.calculation.save()

        # No 'JobCompletedError' should be raised.
        calc_proxy = tasks.get_running_calculation(self.calculation.id)

        self.assertEqual(self.params, calc_proxy.params)
        self.assertTrue(model_equals(
            self.job_profile, calc_proxy.oq_job_profile,
            ignore=('_owner_cache',)))
        self.assertTrue(model_equals(
            self.calculation, calc_proxy.oq_calculation,
            ignore=('_owner_cache',)))