Ejemplo n.º 1
0
    def test(self):
        class FakeRupture(object):
            def __init__(self, integration_distance, sites_mapping):
                self.integration_distance = integration_distance
                self.sites_mapping = sites_mapping
            @property
            def source_typology(self):
                return self
            def filter_sites_by_distance_to_rupture(self, rupture,
                                                    integration_distance,
                                                    sites):
                assert rupture is self
                assert integration_distance is self.integration_distance
                return self.sites_mapping[sites]
        sites1 = object()
        sites2 = object()
        sites3 = object()

        ruptures = [FakeRupture(13, {sites1: None}),  # all filtered out
                    FakeRupture(13, {sites2: sites3}),  # partial filtering
                    FakeRupture(13, {sites1: sites1})]  # nothing filtered out
        sites = [sites1, sites2, sites1]
        filter_func = filters.rupture_site_distance_filter(13)
        filtered = filter_func(izip(ruptures, sites))
        self.assertIsInstance(filtered, GeneratorType)

        source, sites = next(filtered)
        self.assertIs(source, ruptures[1])
        self.assertIs(sites, sites3)

        source, sites = next(filtered)
        self.assertIs(source, ruptures[2])
        self.assertIs(sites, sites1)

        self.assertEqual(list(filtered), [])
Ejemplo n.º 2
0
def ses_and_gmfs(job_id, src_ids, lt_rlz_id, task_seed):
    """
    Celery task for the stochastic event set calculator.

    Samples logic trees and calls the stochastic event set calculator.

    Once stochastic event sets are calculated, results will be saved to the
    database. See :class:`openquake.db.models.SESCollection`.

    Optionally (specified in the job configuration using the
    `ground_motion_fields` parameter), GMFs can be computed from each rupture
    in each stochastic event set. GMFs are also saved to the database.

    Once all of this work is complete, a signal will be sent via AMQP to let
    the control noe know that the work is complete. (If there is any work left
    to be dispatched, this signal will indicate to the control node that more
    work can be enqueued.)

    :param int job_id:
        ID of the currently running job.
    :param src_ids:
        List of ids of parsed source models from which we will generate
        stochastic event sets/ruptures.
    :param lt_rlz_id:
        Id of logic tree realization model to calculate for.
    :param int task_seed:
        Value for seeding numpy/scipy in the computation of stochastic event
        sets and ground motion fields.
    """
    logs.LOG.debug(
        ("> starting `stochastic_event_sets` task: job_id=%s, " "lt_realization_id=%s") % (job_id, lt_rlz_id)
    )
    numpy.random.seed(task_seed)

    hc = models.HazardCalculation.objects.get(oqjob=job_id)

    cmplt_lt_ses = None
    if hc.complete_logic_tree_ses:
        cmplt_lt_ses = models.SES.objects.get(ses_collection__output__oq_job=job_id, complete_logic_tree_ses=True)

    cmplt_lt_gmf = None
    if hc.complete_logic_tree_gmf:
        cmplt_lt_gmf = models.GmfSet.objects.get(gmf_collection__output__oq_job=job_id, complete_logic_tree_gmf=True)

    if hc.ground_motion_fields:
        # For ground motion field calculation, we need the points of interest
        # for the calculation.
        points_to_compute = hc.points_to_compute()

    lt_rlz = models.LtRealization.objects.get(id=lt_rlz_id)
    ltp = logictree.LogicTreeProcessor(hc.id)

    apply_uncertainties = ltp.parse_source_model_logictree_path(lt_rlz.sm_lt_path)
    gsims = ltp.parse_gmpe_logictree_path(lt_rlz.gsim_lt_path)

    sources = list(
        haz_general.gen_sources(
            src_ids, apply_uncertainties, hc.rupture_mesh_spacing, hc.width_of_mfd_bin, hc.area_source_discretization
        )
    )

    logs.LOG.debug("> creating site collection")
    site_coll = haz_general.get_site_collection(hc)
    logs.LOG.debug("< done creating site collection")

    if hc.ground_motion_fields:
        imts = [haz_general.imt_to_nhlib(x) for x in hc.intensity_measure_types]

        correl_model = None
        if hc.ground_motion_correlation_model is not None:
            correl_model = _get_correl_model(hc)

    # Compute stochastic event sets
    # For each rupture generated, we can optionally calculate a GMF
    for ses_rlz_n in xrange(1, hc.ses_per_logic_tree_path + 1):
        logs.LOG.debug("> computing stochastic event set %s of %s" % (ses_rlz_n, hc.ses_per_logic_tree_path))

        # This is the container for all ruptures for this stochastic event set
        # (specified by `ordinal` and the logic tree realization).
        # NOTE: Many tasks can contribute ruptures to this SES.
        ses = models.SES.objects.get(ses_collection__lt_realization=lt_rlz, ordinal=ses_rlz_n)

        sources_sites = ((src, site_coll) for src in sources)
        ssd_filter = filters.source_site_distance_filter(hc.maximum_distance)
        # Get the filtered sources, ignore the site collection:
        filtered_sources = (src for src, _ in ssd_filter(sources_sites))
        # Calculate stochastic event sets:
        logs.LOG.debug("> computing stochastic event sets")
        if hc.ground_motion_fields:
            logs.LOG.debug("> computing also ground motion fields")
            # This will be the "container" for all computed ground motion field
            # results for this stochastic event set.
            gmf_set = models.GmfSet.objects.get(gmf_collection__lt_realization=lt_rlz, ses_ordinal=ses_rlz_n)

        ses_poissonian = stochastic.stochastic_event_set_poissonian(filtered_sources, hc.investigation_time)

        logs.LOG.debug("> looping over ruptures")
        rupture_ctr = 0
        for rupture in ses_poissonian:
            # Prepare and save SES ruptures to the db:
            logs.LOG.debug("> saving SES rupture to DB")
            _save_ses_rupture(ses, rupture, cmplt_lt_ses)
            logs.LOG.debug("> done saving SES rupture to DB")

            # Compute ground motion fields (if requested)
            logs.LOG.debug("compute ground motion fields?  %s" % hc.ground_motion_fields)
            if hc.ground_motion_fields:
                # Compute and save ground motion fields

                gmf_calc_kwargs = {
                    "rupture": rupture,
                    "sites": site_coll,
                    "imts": imts,
                    "gsim": gsims[rupture.tectonic_region_type],
                    "truncation_level": hc.truncation_level,
                    "realizations": DEFAULT_GMF_REALIZATIONS,
                    "correlation_model": correl_model,
                    "rupture_site_filter": filters.rupture_site_distance_filter(hc.maximum_distance),
                }
                logs.LOG.debug("> computing ground motion fields")
                gmf_dict = gmf_calc.ground_motion_fields(**gmf_calc_kwargs)
                logs.LOG.debug("< done computing ground motion fields")

                logs.LOG.debug("> saving GMF results to DB")
                _save_gmf_nodes(gmf_set, gmf_dict, points_to_compute, cmplt_lt_gmf)
                logs.LOG.debug("< done saving GMF results to DB")
            rupture_ctr += 1

        logs.LOG.debug("< Done looping over ruptures")
        logs.LOG.debug(
            "%s ruptures computed for SES realization %s of %s" % (rupture_ctr, ses_rlz_n, hc.ses_per_logic_tree_path)
        )
        logs.LOG.debug("< done computing stochastic event set %s of %s" % (ses_rlz_n, hc.ses_per_logic_tree_path))

    logs.LOG.debug("< task complete, signalling completion")
    haz_general.signal_task_complete(job_id, len(src_ids))
Ejemplo n.º 3
0
    def test_point_sources(self):
        sources = [
            nhlib.source.PointSource(source_id='point1', name='point1',
                tectonic_region_type=const.TRT.ACTIVE_SHALLOW_CRUST,
                mfd=nhlib.mfd.EvenlyDiscretizedMFD(min_mag=4, bin_width=1,
                                                   occurrence_rates=[5]),
                nodal_plane_distribution=nhlib.pmf.PMF([
                    (1, nhlib.geo.NodalPlane(strike=0.0, dip=90.0, rake=0.0))
                ]),
                hypocenter_distribution=nhlib.pmf.PMF([(1, 10)]),
                upper_seismogenic_depth=0.0,
                lower_seismogenic_depth=10.0,
                magnitude_scaling_relationship = nhlib.scalerel.PeerMSR(),
                rupture_aspect_ratio=2,
                rupture_mesh_spacing=1.0,
                location=Point(10, 10)
            ),
            nhlib.source.PointSource(source_id='point2', name='point2',
                tectonic_region_type=const.TRT.ACTIVE_SHALLOW_CRUST,
                mfd=nhlib.mfd.EvenlyDiscretizedMFD(min_mag=4, bin_width=2,
                                                   occurrence_rates=[5, 6, 7]),
                nodal_plane_distribution=nhlib.pmf.PMF([
                    (1, nhlib.geo.NodalPlane(strike=0, dip=90, rake=0.0)),
                ]),
                hypocenter_distribution=nhlib.pmf.PMF([(1, 10)]),
                upper_seismogenic_depth=0.0,
                lower_seismogenic_depth=10.0,
                magnitude_scaling_relationship = nhlib.scalerel.PeerMSR(),
                rupture_aspect_ratio=2,
                rupture_mesh_spacing=1.0,
                location=Point(10, 11)
            ),
        ]
        sites = [nhlib.site.Site(Point(11, 10), 1, True, 2, 3),
                 nhlib.site.Site(Point(10, 16), 2, True, 2, 3),
                 nhlib.site.Site(Point(10, 10.6), 3, True, 2, 3),
                 nhlib.site.Site(Point(10, 10.7), 4, True, 2, 3)]
        sitecol = nhlib.site.SiteCollection(sites)

        from nhlib.gsim.sadigh_1997 import SadighEtAl1997
        gsims = {const.TRT.ACTIVE_SHALLOW_CRUST: SadighEtAl1997()}
        truncation_level = 1
        time_span = 1.0
        imts = {nhlib.imt.PGA(): [0.1, 0.5, 1.3]}

        from nhlib.calc import filters
        source_site_filter = self.SitesCounterSourceFilter(
            filters.source_site_distance_filter(30)
        )
        rupture_site_filter = self.SitesCounterRuptureFilter(
            filters.rupture_site_distance_filter(30)
        )
        hazard_curves_poissonian(
            iter(sources), sitecol, imts, time_span, gsims, truncation_level,
            source_site_filter=source_site_filter,
            rupture_site_filter=rupture_site_filter
        )
        # there are two sources and four sites. first source should
        # be filtered completely since it is too far from all the sites.
        # the second one should take only three sites -- all except (10, 16).
        # it generates three ruptures with magnitudes 4, 6 and 8, from which
        # the first one doesn't affect any of sites and should be ignored,
        # second only affects site (10, 10.7) and the last one affects all
        # three.
        self.assertEqual(source_site_filter.counts,
                         [('point2', [1, 3, 4])])
        self.assertEqual(rupture_site_filter.counts,
                         [(6, [4]), (8, [1, 3, 4])])