def test_filter(self):
        def extract_first_source(sources_sites):
            for source, _sites in sources_sites:
                yield source, None
                break
        fake_sites = [1, 2, 3]
        ses = list(
            stochastic_event_set_poissonian(
                [self.source1, self.source2],
                self.time_span, fake_sites, extract_first_source
            ))
        self.assertEqual(ses, [self.r1_1, self.r1_2, self.r1_2])

        def extract_first_rupture(ruptures_sites):
            for rupture, _sites in ruptures_sites:
                yield rupture, None
                break
        ses = list(
            stochastic_event_set_poissonian(
                [self.source1, self.source2],
                self.time_span, fake_sites,
                extract_first_source,
                extract_first_rupture
            ))
        self.assertEqual(ses, [self.r1_1])
        self.source1 = self.FakeSource(1, [self.r1_1, self.r1_0, self.r1_2],
                                       self.time_span)
        self.source2 = self.FakeSource(2, [self.r2_1], self.time_span)
 def test_no_filter(self):
     ses = list(
         stochastic_event_set_poissonian(
             [self.source1, self.source2],
             self.time_span
         ))
     self.assertEqual(ses, [self.r1_1, self.r1_2, self.r1_2, self.r2_1])
Beispiel #3
0
def compute_ses(job_id, src_ses_seeds, lt_rlz, ltp):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.engine.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    :param int job_id:
        ID of the currently running job.
    :param src_ses_seeds:
        List of triples (src_id, ses, seed)
        Stochastic Event Set object
    :param lt_rlz:
        Logic Tree realization object
    :param ltp:
        A :class:`openquake.engine.input.LogicTreeProcessor` instance
    """
    hc = models.HazardCalculation.objects.get(oqjob=job_id)
    apply_uncertainties = ltp.parse_source_model_logictree_path(lt_rlz.sm_lt_path)

    # complete_logic_tree_ses flag
    cmplt_lt_ses = None
    if hc.complete_logic_tree_ses:
        cmplt_lt_ses = models.SES.objects.get(ses_collection__output__oq_job=job_id, ordinal=None)

    with EnginePerformanceMonitor("reading sources", job_id, compute_ses):
        src_ids = set(src_id for src_id, ses, seed in src_ses_seeds)
        source = dict((s.id, apply_uncertainties(s.nrml)) for s in models.ParsedSource.objects.filter(pk__in=src_ids))

    # Compute and save stochastic event sets
    # For each rupture generated, we can optionally calculate a GMF
    with EnginePerformanceMonitor("computing ses", job_id, compute_ses):
        ruptures = []
        for src_id, ses, seed in src_ses_seeds:
            src = source[src_id]
            numpy.random.seed(seed)
            rupts = stochastic.stochastic_event_set_poissonian([src], hc.investigation_time)
            # set the tag for each copy
            for i, r in enumerate(rupts):
                rup = models.SESRupture(
                    ses=ses,
                    rupture=r,
                    tag="rlz=%02d|ses=%04d|src=%s|i=%03d" % (lt_rlz.ordinal, ses.ordinal, src.source_id, i),
                    hypocenter=r.hypocenter.wkt2d,
                    magnitude=r.mag,
                )
                ruptures.append(rup)
        if not ruptures:
            return
        source.clear()  # save a little memory

    with EnginePerformanceMonitor("saving ses", job_id, compute_ses):
        _save_ses_ruptures(ruptures, cmplt_lt_ses)
 def test(self):
     time_span = 15
     r1_1 = self.FakeRupture(1)
     r1_0 = self.FakeRupture(0)
     r1_2 = self.FakeRupture(2)
     r2_1 = self.FakeRupture(1)
     source1 = self.FakeSource([r1_1, r1_0, r1_2], time_span)
     source2 = self.FakeSource([r2_1], time_span)
     ses = list(stochastic_event_set_poissonian([source1, source2],
                                                time_span))
     self.assertEqual(ses, [r1_1, r1_2, r1_2, r2_1])
    def test_source_errors(self):
        # exercise the case where an error occurs while computing on a given
        # seismic source; in this case, we expect an error to be raised which
        # signals the id of the source in question
        fail_source = self.FailSource(2, [self.r2_1], self.time_span)
        with self.assertRaises(RuntimeError) as ae:
            list(stochastic_event_set_poissonian([self.source1, fail_source],
                                                 self.time_span))

        expected_error = (
            'An error occurred with source id=2. Error: Something bad happened'
        )
        self.assertEqual(expected_error, ae.exception.message)
Beispiel #6
0
def ses_and_gmfs(job_id, src_ids, lt_rlz_id, task_seed, result_grp_ordinal):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.engine.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    Once all of this work is complete, a signal will be sent via AMQP to let
    the control noe know that the work is complete. (If there is any work left
    to be dispatched, this signal will indicate to the control node that more
    work can be enqueued.)

    :param int job_id:
        ID of the currently running job.
    :param src_ids:
        List of ids of parsed source models from which we will generate
        stochastic event sets/ruptures.
    :param lt_rlz_id:
        Id of logic tree realization model to calculate for.
    :param int task_seed:
        Value for seeding numpy/scipy in the computation of stochastic event
        sets and ground motion fields.
    :param int result_grp_ordinal:
        The result group in which the calculation results will be placed.
        This ID basically corresponds to the sequence number of the task,
        in the context of the entire calculation.
    """
    logs.LOG.debug(('> starting `stochastic_event_sets` task: job_id=%s, '
                    'lt_realization_id=%s') % (job_id, lt_rlz_id))
    numpy.random.seed(task_seed)

    hc = models.HazardCalculation.objects.get(oqjob=job_id)

    cmplt_lt_ses = None
    if hc.complete_logic_tree_ses:
        cmplt_lt_ses = models.SES.objects.get(
            ses_collection__output__oq_job=job_id,
            complete_logic_tree_ses=True)

    if hc.ground_motion_fields:
        # For ground motion field calculation, we need the points of interest
        # for the calculation.
        points_to_compute = hc.points_to_compute()

        imts = [haz_general.imt_to_hazardlib(x)
                for x in hc.intensity_measure_types]

        correl_model = None
        if hc.ground_motion_correlation_model is not None:
            correl_model = haz_general.get_correl_model(hc)

    lt_rlz = models.LtRealization.objects.get(id=lt_rlz_id)
    ltp = logictree.LogicTreeProcessor(hc.id)

    apply_uncertainties = ltp.parse_source_model_logictree_path(
            lt_rlz.sm_lt_path)
    gsims = ltp.parse_gmpe_logictree_path(lt_rlz.gsim_lt_path)

    sources = list(haz_general.gen_sources(
        src_ids, apply_uncertainties, hc.rupture_mesh_spacing,
        hc.width_of_mfd_bin, hc.area_source_discretization))

    # Compute stochastic event sets
    # For each rupture generated, we can optionally calculate a GMF
    for ses_rlz_n in xrange(1, hc.ses_per_logic_tree_path + 1):
        logs.LOG.debug('> computing stochastic event set %s of %s'
                       % (ses_rlz_n, hc.ses_per_logic_tree_path))

        # This is the container for all ruptures for this stochastic event set
        # (specified by `ordinal` and the logic tree realization).
        # NOTE: Many tasks can contribute ruptures to this SES.
        ses = models.SES.objects.get(
            ses_collection__lt_realization=lt_rlz, ordinal=ses_rlz_n)

        sources_sites = ((src, hc.site_collection) for src in sources)
        ssd_filter = filters.source_site_distance_filter(hc.maximum_distance)
        # Get the filtered sources, ignore the site collection:
        filtered_sources = (src for src, _ in ssd_filter(sources_sites))
        # Calculate stochastic event sets:
        logs.LOG.debug('> computing stochastic event sets')
        if hc.ground_motion_fields:
            gmf_cache = _create_gmf_cache(len(points_to_compute), imts)

            logs.LOG.debug('> computing also ground motion fields')
            # This will be the "container" for all computed ground motion field
            # results for this stochastic event set.
            gmf_set = models.GmfSet.objects.get(
                gmf_collection__lt_realization=lt_rlz, ses_ordinal=ses_rlz_n)

        ses_poissonian = stochastic.stochastic_event_set_poissonian(
            filtered_sources, hc.investigation_time)

        logs.LOG.debug('> looping over ruptures')
        rupture_ordinal = 0
        for rupture in ses_poissonian:
            rupture_ordinal += 1

            # Prepare and save SES ruptures to the db:
            logs.LOG.debug('> saving SES rupture to DB')
            _save_ses_rupture(
                ses, rupture, cmplt_lt_ses, result_grp_ordinal,
                rupture_ordinal)
            logs.LOG.debug('> done saving SES rupture to DB')

            # Compute ground motion fields (if requested)
            logs.LOG.debug('compute ground motion fields?  %s'
                           % hc.ground_motion_fields)
            if hc.ground_motion_fields:
                # Compute and save ground motion fields

                gmf_calc_kwargs = {
                    'rupture': rupture,
                    'sites': hc.site_collection,
                    'imts': imts,
                    'gsim': gsims[rupture.tectonic_region_type],
                    'truncation_level': hc.truncation_level,
                    'realizations': DEFAULT_GMF_REALIZATIONS,
                    'correlation_model': correl_model,
                    'rupture_site_filter':
                        filters.rupture_site_distance_filter(
                            hc.maximum_distance),
                }
                logs.LOG.debug('> computing ground motion fields')
                gmf_dict = gmf_calc.ground_motion_fields(**gmf_calc_kwargs)
                logs.LOG.debug('< done computing ground motion fields')

                # update the gmf cache:
                for k, v in gmf_dict.iteritems():
                    gmf_cache[k] = numpy.append(
                        gmf_cache[k], v, axis=1)

        logs.LOG.debug('< Done looping over ruptures')
        logs.LOG.debug('%s ruptures computed for SES realization %s of %s'
                       % (rupture_ordinal, ses_rlz_n,
                          hc.ses_per_logic_tree_path))
        logs.LOG.debug('< done computing stochastic event set %s of %s'
                       % (ses_rlz_n, hc.ses_per_logic_tree_path))

        if hc.ground_motion_fields:
            # save the GMFs to the DB
            logs.LOG.debug('> saving GMF results to DB')
            _save_gmfs(
                gmf_set, gmf_cache, points_to_compute, result_grp_ordinal)
            logs.LOG.debug('< done saving GMF results to DB')

    logs.LOG.debug('< task complete, signalling completion')
    base.signal_task_complete(job_id=job_id, num_items=len(src_ids))
Beispiel #7
0
def ses_and_gmfs(job_id, src_ids, ses, task_seed):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.engine.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    :param int job_id:
        ID of the currently running job.
    :param src_ids:
        List of ids of parsed source models from which we will generate
        stochastic event sets/ruptures.
    :param lt_rlz_id:
        Id of logic tree realization model to calculate for.
    :param int task_seed:
        Value for seeding numpy/scipy in the computation of stochastic event
        sets and ground motion fields.
    """
    numpy.random.seed(task_seed)

    hc = models.HazardCalculation.objects.get(oqjob=job_id)

    # complete_logic_tree_ses flag
    cmplt_lt_ses = None
    if hc.complete_logic_tree_ses:
        cmplt_lt_ses = models.SES.objects.get(
            ses_collection__output__oq_job=job_id,
            ordinal=None)

    # preparing sources

    ltp = logictree.LogicTreeProcessor(hc.id)
    lt_rlz = ses.ses_collection.lt_realization

    apply_uncertainties = ltp.parse_source_model_logictree_path(
        lt_rlz.sm_lt_path)

    gsims = ltp.parse_gmpe_logictree_path(lt_rlz.gsim_lt_path)

    source_iter = haz_general.gen_sources(
        src_ids, apply_uncertainties, hc.rupture_mesh_spacing,
        hc.width_of_mfd_bin, hc.area_source_discretization)

    src_filter = filters.source_site_distance_filter(hc.maximum_distance)
    rup_filter = filters.rupture_site_distance_filter(hc.maximum_distance)

    with EnginePerformanceMonitor(
            'reading site collection', job_id, ses_and_gmfs):
        site_collection = hc.site_collection

    # Compute and save stochastic event sets
    # For each rupture generated, we can optionally calculate a GMF
    with EnginePerformanceMonitor('computing ses', job_id, ses_and_gmfs):
        ruptures = list(stochastic.stochastic_event_set_poissonian(
                        source_iter, hc.investigation_time, site_collection,
                        src_filter, rup_filter))
        if not ruptures:
            return

    with EnginePerformanceMonitor('saving ses', job_id, ses_and_gmfs):
        rupture_ids = _save_ses_ruptures(ses, ruptures, cmplt_lt_ses)

    if hc.ground_motion_fields:
        with EnginePerformanceMonitor(
                'computing gmfs', job_id, ses_and_gmfs):
            gmf_cache = compute_gmf_cache(
                hc, gsims, ruptures, rupture_ids)

        with EnginePerformanceMonitor('saving gmfs', job_id, ses_and_gmfs):
            _save_gmfs(ses, gmf_cache, site_collection)
Beispiel #8
0
def compute_ses(job_id, src_ids, ses, src_seeds, ltp):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.engine.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    :param int job_id:
        ID of the currently running job.
    :param src_ids:
        List of ids of parsed source models from which we will generate
        stochastic event sets/ruptures.
    :param ses:
        Stochastic Event Set object
    :param int src_seeds:
        Values for seeding numpy/scipy in the computation of stochastic event
        sets and ground motion fields from the sources
    :param ltp:
        a :class:`openquake.engine.input.LogicTreeProcessor` instance
    """
    hc = models.HazardCalculation.objects.get(oqjob=job_id)
    lt_rlz = ses.ses_collection.lt_realization
    apply_uncertainties = ltp.parse_source_model_logictree_path(
        lt_rlz.sm_lt_path)

    # complete_logic_tree_ses flag
    cmplt_lt_ses = None
    if hc.complete_logic_tree_ses:
        cmplt_lt_ses = models.SES.objects.get(
            ses_collection__output__oq_job=job_id,
            ordinal=None)

    with EnginePerformanceMonitor(
            'reading sources', job_id, compute_ses):
        sources = [apply_uncertainties(s.nrml)
                   for s in models.ParsedSource.objects.filter(pk__in=src_ids)]

    # Compute and save stochastic event sets
    # For each rupture generated, we can optionally calculate a GMF
    with EnginePerformanceMonitor('computing ses', job_id, compute_ses):
        ruptures = []
        for src_seed, src in zip(src_seeds, sources):
            # first set the seed for the specific source
            numpy.random.seed(src_seed)
            # then make copies of the hazardlib ruptures (which may contain
            # duplicates): the copy is needed to keep the tags distinct
            rupts = map(copy.copy, stochastic.stochastic_event_set_poissonian(
                        [src], hc.investigation_time))
            # set the tag for each copy
            for i, r in enumerate(rupts):
                r.tag = 'rlz=%02d|ses=%04d|src=%s|i=%03d' % (
                    lt_rlz.ordinal, ses.ordinal, src.source_id, i)
            ruptures.extend(rupts)
        if not ruptures:
            return

    with EnginePerformanceMonitor('saving ses', job_id, compute_ses):
        _save_ses_ruptures(ses, ruptures, cmplt_lt_ses)