Ejemplo n.º 1
0
    def test_completed_failure(self):
        self.job.status = 'failed'
        self.job.save()

        try:
            tasks.get_running_job(self.job.id)
        except tasks.JobCompletedError as exc:
            self.assertEqual(exc.message, self.job.id)
        else:
            self.fail("JobCompletedError wasn't raised")
Ejemplo n.º 2
0
    def test_completed_failure(self):
        self.job.status = 'failed'
        self.job.save()

        try:
            tasks.get_running_job(self.job.id)
        except tasks.JobCompletedError as exc:
            self.assertEqual(exc.message, self.job.id)
        else:
            self.fail("JobCompletedError wasn't raised")
Ejemplo n.º 3
0
def compute_mean_curves(job_id, sites, realizations):
    """Compute the mean hazard curve for each site given."""

    # We don't actually need the JobContext returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_job(job_id)

    HAZARD_LOG.info("Computing MEAN curves for %s sites (job_id %s)"
                    % (len(sites), job_id))

    return general.compute_mean_hazard_curves(job_id, sites, realizations)
Ejemplo n.º 4
0
def compute_mean_curves(job_id, sites, realizations):
    """Compute the mean hazard curve for each site given."""

    # We don't actually need the JobContext returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_job(job_id)

    HAZARD_LOG.info("Computing MEAN curves for %s sites (job_id %s)" %
                    (len(sites), job_id))

    return general.compute_mean_hazard_curves(job_id, sites, realizations)
Ejemplo n.º 5
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """Compute mean ground intensity for a specific site."""

    # We don't actually need the JobContext returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_job(job_id)
    kvs_client = kvs.get_client()

    mgm_key = kvs.tokens.mgm_key(job_id, block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    return json.JSONDecoder().decode(mgm)
Ejemplo n.º 6
0
def compute_mgm_intensity(job_id, block_id, site_id):
    """Compute mean ground intensity for a specific site."""

    # We don't actually need the JobContext returned by this function
    # (yet) but this does check if the calculation is still in progress.
    utils_tasks.get_running_job(job_id)
    kvs_client = kvs.get_client()

    mgm_key = kvs.tokens.mgm_key(job_id, block_id, site_id)
    mgm = kvs_client.get(mgm_key)

    return json.JSONDecoder().decode(mgm)
Ejemplo n.º 7
0
def compute_disagg_matrix_task(job_id, site, realization, poe, result_dir):
    """ Compute a complete 5D Disaggregation matrix. This task leans heavily
    on the DisaggregationCalculator (in the OpenQuake Java lib) to handle this
    computation.

    :param job_id: id of the calculation record in the KVS
    :type job_id: `str`
    :param site: a single site of interest
    :type site: :class:`openquake.shapes.Site` instance`
    :param int realization: logic tree sample iteration number
    :param poe: Probability of Exceedence
    :type poe: `float`
    :param result_dir: location for the Java code to write the matrix in an
        HDF5 file (in a distributed environment, this should be the path of a
        mounted NFS)

    :returns: 2-tuple of (ground_motion_value, path_to_h5_matrix_file)
    """
    job_ctxt = get_running_job(job_id)

    log_msg = (
        "Computing full disaggregation matrix for job_id=%s, site=%s, "
        "realization=%s, PoE=%s. Matrix results will be serialized to `%s`.")
    log_msg %= (job_ctxt.job_id, site, realization, poe, result_dir)
    LOG.info(log_msg)

    return compute_disagg_matrix(job_ctxt, site, poe, result_dir)
Ejemplo n.º 8
0
def compute_disagg_matrix_task(job_id, realization, poe, result_dir, site):
    """ Compute a complete 5D Disaggregation matrix. This task leans heavily
    on the DisaggregationCalculator (in the OpenQuake Java lib) to handle this
    computation.

    :param job_id: id of the calculation record in the KVS
    :type job_id: `str`
    :param int realization: logic tree sample iteration number
    :param poe: Probability of Exceedence
    :type poe: `float`
    :param result_dir: location for the Java code to write the matrix in an
        HDF5 file (in a distributed environment, this should be the path of a
        mounted NFS)
    :param site: a single site of interest
    :type site: :class:`openquake.shapes.Site` instance`

    :returns: 2-tuple of (ground_motion_value, path_to_h5_matrix_file)
    """
    job_ctxt = get_running_job(job_id)

    log_msg = (
        "Computing full disaggregation matrix for job_id=%s, site=%s, "
        "realization=%s, PoE=%s. Matrix results will be serialized to `%s`.")
    log_msg %= (job_ctxt.job_id, site, realization, poe, result_dir)
    LOG.info(log_msg)

    return compute_disagg_matrix(job_ctxt, site, poe, result_dir)
Ejemplo n.º 9
0
def compute_uhs_task(job_id, realization, site):
    """Compute Uniform Hazard Spectra for a given site of interest and 1 or
    more Probability of Exceedance values. The bulk of the computation will
    be done by utilizing the `UHSCalculator` class in the Java code.

    UHS results will be written directly to the database.

    :param int job_id:
        ID of the job record in the DB/KVS.
    :param realization:
        Logic tree sample number (from 1 to N, where N is the
        NUMBER_OF_LOGIC_TREE_SAMPLES param defined in the job config.
    :param site:
        The site of interest (a :class:`openquake.shapes.Site` object).
    """
    job_ctxt = utils_tasks.get_running_job(job_id)

    log_msg = (
        "Computing UHS for job_id=%s, site=%s, realization=%s."
        " UHS results will be serialized to the database.")
    log_msg %= (job_ctxt.job_id, site, realization)
    LOG.info(log_msg)

    uhs_results = compute_uhs(job_ctxt, site)

    write_uhs_spectrum_data(job_ctxt, realization, site, uhs_results)
Ejemplo n.º 10
0
    def test_get_running_job(self):
        self.job.status = 'pending'
        self.job.save()

        # No 'JobCompletedError' should be raised.
        job_ctxt = tasks.get_running_job(self.job.id)

        self.assertEqual(self.params, job_ctxt.params)
        self.assertTrue(model_equals(
            self.job_profile, job_ctxt.oq_job_profile,
            ignore=('_owner_cache',)))
        self.assertTrue(model_equals(
            self.job, job_ctxt.oq_job,
            ignore=('_owner_cache',)))
Ejemplo n.º 11
0
    def test_get_running_job(self):
        self.job.status = 'pending'
        self.job.save()

        # No 'JobCompletedError' should be raised.
        job_ctxt = tasks.get_running_job(self.job.id)

        self.assertEqual(self.params, job_ctxt.params)
        self.assertTrue(
            model_equals(self.job_profile,
                         job_ctxt.oq_job_profile,
                         ignore=('_owner_cache', )))
        self.assertTrue(
            model_equals(self.job, job_ctxt.oq_job, ignore=('_owner_cache', )))