コード例 #1
0
def sample_ruptures(sources, cmaker, sitecol=None, monitor=Monitor()):
    """
    :param sources:
        a sequence of sources of the same group
    :param cmaker:
        a ContextMaker instance with ses_per_logic_tree_path, ses_seed
    :param sitecol:
        SiteCollection instance used for filtering (None for no filtering)
    :param monitor:
        monitor instance
    :yields:
        dictionaries with keys rup_array, calc_times
    """
    srcfilter = SourceFilter(sitecol, cmaker.maximum_distance)
    # AccumDict of arrays with 3 elements num_ruptures, num_sites, calc_time
    calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
    # Compute and save stochastic event sets
    num_ses = cmaker.ses_per_logic_tree_path
    grp_id = sources[0].grp_id
    # Compute the number of occurrences of the source group. This is used
    # for cluster groups or groups with mutually exclusive sources.
    if (getattr(sources, 'atomic', False) and
            getattr(sources, 'cluster', False)):
        eb_ruptures, calc_times = sample_cluster(
            sources, srcfilter, num_ses, vars(cmaker))

        # Yield ruptures
        er = sum(src.num_ruptures for src, _ in srcfilter.filter(sources))
        yield AccumDict(dict(rup_array=get_rup_array(eb_ruptures, srcfilter),
                             calc_times=calc_times, eff_ruptures={grp_id: er}))
    else:
        eb_ruptures = []
        eff_ruptures = 0
        # AccumDict of arrays with 2 elements weight, calc_time
        calc_times = AccumDict(accum=numpy.zeros(3, numpy.float32))
        for src, _ in srcfilter.filter(sources):
            nr = src.num_ruptures
            eff_ruptures += nr
            t0 = time.time()
            if len(eb_ruptures) > MAX_RUPTURES:
                # yield partial result to avoid running out of memory
                yield AccumDict(dict(rup_array=get_rup_array(eb_ruptures,
                                                             srcfilter),
                                     calc_times={}, eff_ruptures={}))
                eb_ruptures.clear()
            samples = getattr(src, 'samples', 1)
            for rup, trt_smr, n_occ in src.sample_ruptures(
                    samples * num_ses, cmaker.ses_seed):
                ebr = EBRupture(rup, src.source_id, trt_smr, n_occ)
                eb_ruptures.append(ebr)
            dt = time.time() - t0
            calc_times[src.id] += numpy.array([nr, src.nsites, dt])
        rup_array = get_rup_array(eb_ruptures, srcfilter)
        yield AccumDict(dict(rup_array=rup_array, calc_times=calc_times,
                             eff_ruptures={grp_id: eff_ruptures}))
コード例 #2
0
ファイル: filters_test.py プロジェクト: tieganh/oq-engine
 def test_international_date_line_2(self):
     # from a bug affecting a calculation in New Zealand
     fname = gettemp(characteric_source)
     [[src]] = nrml.to_python(fname)
     os.remove(fname)
     maxdist = IntegrationDistance({'default': 200})
     sitecol = SiteCollection([
         Site(location=Point(176.919, -39.489),
              vs30=760, vs30measured=True, z1pt0=100, z2pt5=5)])
     srcfilter = SourceFilter(sitecol, maxdist)
     sites = srcfilter.get_close_sites(src)
     self.assertIsNotNone(sites)
コード例 #3
0
 def test_international_date_line_2(self):
     # from a bug affecting a calculation in New Zealand
     fname = gettemp(characteric_source)
     [[src]] = nrml.to_python(fname)
     os.remove(fname)
     maxdist = IntegrationDistance({'default': 200})
     sitecol = SiteCollection([
         Site(location=Point(176.919, -39.489),
              vs30=760, vs30measured=True, z1pt0=100, z2pt5=5)])
     srcfilter = SourceFilter(sitecol, maxdist)
     sites = srcfilter.get_close_sites(src)
     self.assertIsNotNone(sites)
コード例 #4
0
 def test_international_date_line(self):
     maxdist = IntegrationDistance({'default': [
         (3, 30), (4, 40), (5, 100), (6, 200), (7, 300), (8, 400)]})
     sitecol = SiteCollection([
         Site(location=Point(179, 80),
              vs30=1.2, vs30measured=True,
              z1pt0=3.4, z2pt5=5.6, backarc=True),
         Site(location=Point(-179, 80),
              vs30=55.4, vs30measured=False,
              z1pt0=66.7, z2pt5=88.9, backarc=False)])
     srcfilter = SourceFilter(sitecol, maxdist)
     bb1, bb2 = srcfilter.get_bounding_boxes(mag=4.5)
     # bounding boxes in the form min_lon, min_lat, max_lon, max_lat
     aae(bb1, (173.8210225, 79.10068, 184.1789775, 80.89932))
     aae(bb2, (-184.1789775, 79.10068, -173.8210225, 80.89932))
コード例 #5
0
ファイル: filters_test.py プロジェクト: tieganh/oq-engine
 def test_get_bounding_boxes(self):
     maxdist = IntegrationDistance({'default': [
         (3, 30), (4, 40), (5, 100), (6, 200), (7, 300), (8, 400)]})
     sitecol = SiteCollection([
         Site(location=Point(10, 20, 30),
              vs30=1.2, vs30measured=True,
              z1pt0=3.4, z2pt5=5.6, backarc=True),
         Site(location=Point(-1.2, -3.4, -5.6),
              vs30=55.4, vs30measured=False,
              z1pt0=66.7, z2pt5=88.9, backarc=False)])
     srcfilter = SourceFilter(sitecol, maxdist)
     bb1, bb2 = srcfilter.get_bounding_boxes(mag=4)
     # bounding boxes in the form min_lon, min_lat, max_lon, max_lat
     aae(bb1, (9.6171855, 19.640272, 10.3828145, 20.359728))
     aae(bb2, (-1.5603623, -3.759728, -0.8396377, -3.040272))
コード例 #6
0
ファイル: filters_test.py プロジェクト: tieganh/oq-engine
 def test_international_date_line(self):
     maxdist = IntegrationDistance({'default': [
         (3, 30), (4, 40), (5, 100), (6, 200), (7, 300), (8, 400)]})
     sitecol = SiteCollection([
         Site(location=Point(179, 80),
              vs30=1.2, vs30measured=True,
              z1pt0=3.4, z2pt5=5.6, backarc=True),
         Site(location=Point(-179, 80),
              vs30=55.4, vs30measured=False,
              z1pt0=66.7, z2pt5=88.9, backarc=False)])
     srcfilter = SourceFilter(sitecol, maxdist)
     bb1, bb2 = srcfilter.get_bounding_boxes(mag=4)
     # bounding boxes in the form min_lon, min_lat, max_lon, max_lat
     aae(bb1, (176.928409, 79.640272, 181.071591, 80.359728))
     aae(bb2, (-181.071591, 79.640272, -176.928409, 80.359728))
コード例 #7
0
 def test_get_bounding_boxes(self):
     maxdist = IntegrationDistance({'default': [
         (3, 30), (4, 40), (5, 100), (6, 200), (7, 300), (8, 400)]})
     sitecol = SiteCollection([
         Site(location=Point(10, 20, 30),
              vs30=1.2, vs30measured=True,
              z1pt0=3.4, z2pt5=5.6, backarc=True),
         Site(location=Point(-1.2, -3.4, -5.6),
              vs30=55.4, vs30measured=False,
              z1pt0=66.7, z2pt5=88.9, backarc=False)])
     srcfilter = SourceFilter(sitecol, maxdist)
     bb1, bb2 = srcfilter.get_bounding_boxes(mag=4.5)
     # bounding boxes in the form min_lon, min_lat, max_lon, max_lat
     aae(bb1, (9.0429636, 19.10068, 10.9570364, 20.89932))
     aae(bb2, (-2.1009057, -4.29932, -0.2990943, -2.50068))
コード例 #8
0
    def gen_args(self, csm, monitor):
        """
        Used in the case of large source model logic trees.

        :param monitor: a :class:`openquake.baselib.performance.Monitor`
        :param csm: a reduced CompositeSourceModel
        :yields: (sources, sites, gsims, monitor) tuples
        """
        oq = self.oqparam
        csm = self.csm.filter(SourceFilter(self.sitecol, oq.maximum_distance))
        maxweight = csm.get_maxweight(oq.concurrent_tasks)
        numheavy = len(csm.get_sources('heavy', maxweight))
        logging.info('Using maxweight=%d, numheavy=%d', maxweight, numheavy)
        param = dict(truncation_level=oq.truncation_level,
                     imtls=oq.imtls,
                     seed=oq.ses_seed,
                     maximum_distance=oq.maximum_distance,
                     ses_per_logic_tree_path=oq.ses_per_logic_tree_path)

        num_tasks = 0
        num_sources = 0
        for sm in csm.source_models:
            for sg in sm.src_groups:
                gsims = csm.info.gsim_lt.get_gsims(sg.trt)
                csm.add_infos(sg.sources)
                for block in csm.split_in_blocks(maxweight, sg.sources):
                    block.samples = sm.samples
                    yield block, csm.src_filter, gsims, param, monitor
                    num_tasks += 1
                    num_sources += len(block)
        logging.info('Sent %d sources in %d tasks', num_sources, num_tasks)
コード例 #9
0
ファイル: classical.py プロジェクト: jbyronar/oq-engine
def run_preclassical(csm, oqparam, h5):
    """
    :param csm: a CompositeSourceModel with attribute .srcfilter
    :param oqparam: the parameters in job.ini file
    :param h5: a DataStore instance
    """
    logging.info('Sending %s', csm.sitecol)

    # do nothing for atomic sources except counting the ruptures
    for src in csm.get_sources(atomic=True):
        src.num_ruptures = src.count_ruptures()
        src.nsites = len(csm.sitecol)

    # run preclassical for non-atomic sources
    sources_by_grp = groupby(
        csm.get_sources(atomic=False),
        lambda src: (src.grp_id, msr_name(src)))
    param = dict(maximum_distance=oqparam.maximum_distance,
                 pointsource_distance=oqparam.pointsource_distance,
                 ps_grid_spacing=oqparam.ps_grid_spacing,
                 split_sources=oqparam.split_sources)
    srcfilter = SourceFilter(
        csm.sitecol.reduce(10000) if csm.sitecol else None,
        oqparam.maximum_distance)
    res = parallel.Starmap(
        preclassical,
        ((srcs, srcfilter, param) for srcs in sources_by_grp.values()),
        h5=h5, distribute=None if len(sources_by_grp) > 1 else 'no').reduce()

    if res and res['before'] != res['after']:
        logging.info('Reduced the number of sources from {:_d} -> {:_d}'.
                     format(res['before'], res['after']))

    if res and h5:
        csm.update_source_info(res['calc_times'], nsites=True)

    for grp_id, srcs in res.items():
        # srcs can be empty if the minimum_magnitude filter is on
        if srcs and not isinstance(grp_id, str):
            newsg = SourceGroup(srcs[0].tectonic_region_type)
            newsg.sources = srcs
            csm.src_groups[grp_id] = newsg

    # sanity check
    for sg in csm.src_groups:
        for src in sg:
            assert src.num_ruptures
            assert src.nsites

    # store ps_grid data, if any
    for key, sources in res.items():
        if isinstance(key, str) and key.startswith('ps_grid/'):
            arrays = []
            for ps in sources:
                if hasattr(ps, 'location'):
                    lonlats = [ps.location.x, ps.location.y]
                    for src in getattr(ps, 'pointsources', []):
                        lonlats.extend([src.location.x, src.location.y])
                    arrays.append(F32(lonlats))
            h5[key] = arrays
コード例 #10
0
    def test_nankai(self):
        # source model for the Nankai region provided by M. Pagani
        source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
        # it has a single group containing 15 mutex sources
        [group] = nrml.to_python(source_model)
        aae(group.srcs_weights, [
            0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125,
            0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1
        ])
        rup_serial = numpy.arange(group.tot_ruptures, dtype=numpy.uint32)
        start = 0
        for i, src in enumerate(group):
            src.id = i
            nr = src.num_ruptures
            src.serial = rup_serial[start:start + nr]
            start += nr
        group.samples = 1
        lonlat = 135.68, 35.68
        site = Site(geo.Point(*lonlat), 800, True, z1pt0=100., z2pt5=1.)
        s_filter = SourceFilter(SiteCollection([site]), {})
        param = dict(ses_per_logic_tree_path=10, seed=42)
        gsims = [SiMidorikawa1999SInter()]
        dic = sample_ruptures(group, s_filter, gsims, param)
        self.assertEqual(dic['num_ruptures'], 19)  # total ruptures
        self.assertEqual(dic['num_events'], 16)
        self.assertEqual(len(dic['eb_ruptures']), 8)
        self.assertEqual(len(dic['calc_times']), 15)  # mutex sources

        # test export
        mesh = numpy.array([lonlat], [('lon', float), ('lat', float)])
        ebr = dic['eb_ruptures'][0]
        ebr.export(mesh)
コード例 #11
0
    def test_nankai(self):
        # source model for the Nankai region provided by M. Pagani
        source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
        # it has a single group containing 15 mutex sources
        [group] = nrml.to_python(source_model)
        aae([src.mutex_weight for src in group],
            [0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125,
             0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1])
        seed = 42
        rup_serial = numpy.arange(seed, seed + group.tot_ruptures,
                                  dtype=numpy.uint32)
        start = 0
        for i, src in enumerate(group):
            src.id = i
            nr = src.num_ruptures
            src.serial = rup_serial[start:start + nr]
            start += nr
        lonlat = 135.68, 35.68
        site = Site(geo.Point(*lonlat), 800, z1pt0=100., z2pt5=1.)
        s_filter = SourceFilter(SiteCollection([site]), {})
        param = dict(ses_per_logic_tree_path=10, filter_distance='rjb',
                     gsims=[SiMidorikawa1999SInter()])
        dic = sum(sample_ruptures(group, param, s_filter), {})
        self.assertEqual(len(dic['rup_array']), 5)
        self.assertEqual(len(dic['calc_times']), 15)  # mutex sources

        # test no filtering 1
        ruptures = list(stochastic_event_set(group))
        self.assertEqual(len(ruptures), 19)

        # test no filtering 2
        ruptures = sum(sample_ruptures(group, param), {})['rup_array']
        self.assertEqual(len(ruptures), 5)
コード例 #12
0
ファイル: calc.py プロジェクト: guyomd/vengine
 def __init__(self, oqparam, sites_col, correlation_model):
     self.oqparam = oqparam
     self.ssm_lt = get_source_model_lt(oqparam) # Read the SSC logic tree
     self.hc = mdhc.MultiDimensionalHazardCurve(oqparam.imtls,
                                                sites_col, correlation_model,
                                                oqparam.maximum_distance)
     self.ndims = len(oqparam.imtls.keys())
     self.periods = get_imts(oqparam)
     self.sites = sites_col
     self.cm = correlation_model
     self.srcfilter = SourceFilter(sites_col, oqparam.maximum_distance)
     self.integration_prms = {'truncation_level': oqparam.truncation_level,
                              'abseps': 0.0001,  # Documentation: Optimal value is 1E-6
                              'maxpts': self.ndims*10  # Documentation: Optimal value is len(lnSA)*1000
                             }
     self.integration_prms.update({'trunc_norm': self._truncation_normalization_factor()})
コード例 #13
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        monitor = self.monitor(self.core_task.__name__)
        monitor.oqparam = oq = self.oqparam
        self.src_filter = SourceFilter(self.sitecol, oq.maximum_distance)
        self.nsites = []
        acc = AccumDict({
            grp_id: ProbabilityMap(len(oq.imtls.array), len(gsims))
            for grp_id, gsims in self.gsims_by_grp.items()
        })
        acc.calc_times = {}
        acc.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
        acc.bb_dict = {}  # just for API compatibility
        param = dict(imtls=oq.imtls, truncation_level=oq.truncation_level)
        for sm in self.csm.source_models:  # one branch at the time
            grp_id = sm.ordinal
            gsims = self.gsims_by_grp[grp_id]
            [[ucerf_source]] = sm.src_groups
            ucerf_source.nsites = len(self.sitecol)
            self.csm.infos[ucerf_source.source_id] = source.SourceInfo(
                ucerf_source)
            logging.info('Getting the background point sources')
            bckgnd_sources = ucerf_source.get_background_sources(
                self.src_filter)

            # since there are two kinds of tasks (background and rupture_set)
            # we divide the concurrent_tasks parameter by 2;
            # notice the "or 1" below, to avoid issues when
            # self.oqparam.concurrent_tasks is 0 or 1
            ct2 = (self.oqparam.concurrent_tasks // 2) or 1

            # parallelize on the background sources, small tasks
            args = (bckgnd_sources, self.src_filter, gsims, param, monitor)
            bg_res = parallel.Starmap.apply(classical,
                                            args,
                                            name='background_sources_%d' %
                                            grp_id,
                                            concurrent_tasks=ct2)

            # parallelize by rupture subsets
            rup_sets = numpy.arange(ucerf_source.num_ruptures)
            taskname = 'ucerf_classical_%d' % grp_id
            acc = parallel.Starmap.apply(
                ucerf_classical,
                (rup_sets, ucerf_source, self.src_filter, gsims, monitor),
                concurrent_tasks=ct2,
                name=taskname).reduce(self.agg_dicts, acc)

            # compose probabilities from background sources
            for pmap in bg_res:
                acc[grp_id] |= pmap[grp_id]

        with self.monitor('store source_info', autoflush=True):
            self.store_source_info(self.csm.infos, acc)
        return acc  # {grp_id: pmap}
コード例 #14
0
ファイル: hazard_curve.py プロジェクト: vhmar/oq-engine
def calc_hazard_curves(
        groups, ss_filter, imtls, gsim_by_trt, truncation_level=None,
        apply=Sequential.apply):
    """
    Compute hazard curves on a list of sites, given a set of seismic source
    groups and a dictionary of ground shaking intensity models (one per
    tectonic region type).

    Probability of ground motion exceedance is computed in different ways
    depending if the sources are independent or mutually exclusive.

    :param groups:
        A sequence of groups of seismic sources objects (instances of
        of :class:`~openquake.hazardlib.source.base.BaseSeismicSource`).
    :param ss_filter:
        A source filter over the site collection or the site collection itself
    :param imtls:
        Dictionary mapping intensity measure type strings
        to lists of intensity measure levels.
    :param gsim_by_trt:
        Dictionary mapping tectonic region types (members
        of :class:`openquake.hazardlib.const.TRT`) to
        :class:`~openquake.hazardlib.gsim.base.GMPE` or
        :class:`~openquake.hazardlib.gsim.base.IPE` objects.
    :param truncation_level:
        Float, number of standard deviations for truncation of the intensity
        distribution.
    :param maximum_distance:
        The integration distance, if any
    :returns:
        An array of size N, where N is the number of sites, which elements
        are records with fields given by the intensity measure types; the
        size of each field is given by the number of levels in ``imtls``.
    """
    # This is ensuring backward compatibility i.e. processing a list of
    # sources
    if not isinstance(groups[0], SourceGroup):  # sent a list of sources
        dic = groupby(groups, operator.attrgetter('tectonic_region_type'))
        groups = [SourceGroup(trt, dic[trt], 'src_group', 'indep', 'indep')
                  for trt in dic]
    if hasattr(ss_filter, 'sitecol'):  # a filter, as it should be
        sitecol = ss_filter.sitecol
    else:  # backward compatibility, a site collection was passed
        sitecol = ss_filter
        ss_filter = SourceFilter(sitecol, {})

    imtls = DictArray(imtls)
    pmap = ProbabilityMap(len(imtls.array), 1)
    # Processing groups with homogeneous tectonic region
    for group in groups:
        if group.src_interdep == 'mutex':  # do not split the group
            pmap |= pmap_from_grp(
                group, ss_filter, imtls, gsim_by_trt, truncation_level)
        else:  # split the group and apply `pmap_from_grp` in parallel
            pmap |= apply(
                pmap_from_grp,
                (group, ss_filter, imtls, gsim_by_trt, truncation_level),
                weight=operator.attrgetter('weight')).reduce(operator.or_)
    return pmap.convert(imtls, len(sitecol.complete))
コード例 #15
0
ファイル: readinput.py プロジェクト: mascandola/oq-engine
def _check_csm(csm, oqparam, h5):
    # checks
    csm.gsim_lt.check_imts(oqparam.imtls)

    srcs = csm.get_sources()
    if not srcs:
        raise RuntimeError('All sources were discarded!?')

    if os.environ.get('OQ_CHECK_INPUT'):
        source.check_complex_faults(srcs)

    # build a smart SourceFilter
    try:
        sitecol = get_site_collection(oqparam, h5)  # already stored
    except Exception:  # missing sites.csv in test_case_1_ruptures
        sitecol = None
    csm.sitecol = sitecol
    if sitecol is None:
        return
    srcfilter = SourceFilter(sitecol, oqparam.maximum_distance)
    logging.info('Checking the sources bounding box')
    lons = []
    lats = []
    for src in srcs:
        try:
            box = srcfilter.get_enlarged_box(src)
        except BBoxError as exc:
            logging.error(exc)
            continue
        lons.append(box[0])
        lats.append(box[1])
        lons.append(box[2])
        lats.append(box[3])
    if cross_idl(*(list(sitecol.lons) + lons)):
        lons = numpy.array(lons) % 360
    else:
        lons = numpy.array(lons)
    bbox = (lons.min(), min(lats), lons.max(), max(lats))
    if bbox[2] - bbox[0] > 180:
        raise BBoxError(
            'The bounding box of the sources is larger than half '
            'the globe: %d degrees' % (bbox[2] - bbox[0]))
    sids = sitecol.within_bbox(bbox)
    if len(sids) == 0:
        raise RuntimeError('All sources were discarded!?')
コード例 #16
0
 def filter_csm(self):
     """
     :returns: (filtered CompositeSourceModel, SourceFilter)
     """
     oq = self.oqparam
     mon = self.monitor('prefilter')
     self.hdf5cache = self.datastore.hdf5cache()
     src_filter = SourceFilter(self.sitecol.complete, oq.maximum_distance,
                               self.hdf5cache)
     if (oq.prefilter_sources == 'numpy' or rtree is None):
         csm = self.csm.filter(src_filter, mon)
     elif oq.prefilter_sources == 'rtree':
         prefilter = RtreeFilter(self.sitecol.complete, oq.maximum_distance,
                                 self.hdf5cache)
         csm = self.csm.filter(prefilter, mon)
     else:  # prefilter_sources='no'
         csm = self.csm.filter(SourceFilter(None, {}), mon)
     return csm, src_filter
コード例 #17
0
def classical(group, src_filter, gsims, param, monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.
    The arguments are the same as in :func:`calc_hazard_curves`, except
    for ``gsims``, which is a list of GSIM instances.

    :returns:
        a dictionary with keys pmap, calc_times, rup_data, extra
    """
    if not hasattr(src_filter, 'sitecol'):  # do not filter
        src_filter = SourceFilter(src_filter, {})

    # Get the parameters assigned to the group
    src_mutex = getattr(group, 'src_interdep', None) == 'mutex'
    cluster = getattr(group, 'cluster', None)
    trts = set()
    maxradius = 0
    for src in group:
        if not src.num_ruptures:
            # src.num_ruptures may not be set, so it is set here
            src.num_ruptures = src.count_ruptures()
        # set the proper TOM in case of a cluster
        if cluster:
            src.temporal_occurrence_model = FatedTOM(time_span=1)
        trts.add(src.tectonic_region_type)
        if hasattr(src, 'radius'):  # for prefiltered point sources
            maxradius = max(maxradius, src.radius)

    param['maximum_distance'] = src_filter.integration_distance
    [trt] = trts  # there must be a single tectonic region type
    cmaker = ContextMaker(trt, gsims, param, monitor)
    try:
        cmaker.tom = group.temporal_occurrence_model
    except AttributeError:  # got a list of sources, not a group
        time_span = param.get('investigation_time')  # None for nonparametric
        cmaker.tom = PoissonTOM(time_span) if time_span else None
    if cluster:
        cmaker.tom = FatedTOM(time_span=1)
    pmap, rup_data, calc_times = PmapMaker(cmaker, src_filter, group).make()
    extra = {}
    extra['task_no'] = getattr(monitor, 'task_no', 0)
    extra['trt'] = trt
    extra['source_id'] = src.source_id
    extra['grp_id'] = src.grp_id
    extra['maxradius'] = maxradius
    group_probability = getattr(group, 'grp_probability', None)
    if src_mutex and group_probability:
        pmap *= group_probability

    if cluster:
        tom = getattr(group, 'temporal_occurrence_model')
        pmap = _cluster(param['imtls'], tom, gsims, pmap)
    return dict(pmap=pmap,
                calc_times=calc_times,
                rup_data=rup_data,
                extra=extra)
コード例 #18
0
def preclassical(srcs, sites, cmaker, monitor):
    """
    Weight the sources. Also split them if split_sources is true. If
    ps_grid_spacing is set, grid the point sources before weighting them.

    NB: srcfilter can be on a reduced site collection for performance reasons
    """
    split_sources = []
    spacing = cmaker.ps_grid_spacing
    grp_id = srcs[0].grp_id
    if sites is None:
        # in csm2rup just split the sources and count the ruptures
        for src in srcs:
            ss = split_source(src)
            if len(ss) > 1:
                for ss_ in ss:
                    ss_.nsites = 1
            split_sources.extend(ss)
            src.num_ruptures = src.count_ruptures()
        dic = {grp_id: split_sources}
        dic['before'] = len(srcs)
        dic['after'] = len(dic[grp_id])
        return dic

    with monitor('splitting sources'):
        sf = SourceFilter(sites, cmaker.maximum_distance)
        for src in srcs:
            # NB: this is approximate, since the sites are sampled
            src.nsites = len(sf.close_sids(src))  # can be 0
            # NB: it is crucial to split only the close sources, for
            # performance reasons (think of Ecuador in SAM)
            splits = split_source(src) if (cmaker.split_sources
                                           and src.nsites) else [src]
            split_sources.extend(splits)
    dic = grid_point_sources(split_sources, spacing, monitor)
    with monitor('weighting sources'):
        # this is also prefiltering again, to have a good representative
        # of what will be done during the classical phase
        cmaker.set_weight(dic[grp_id], sf)
    dic['before'] = len(split_sources)
    dic['after'] = len(dic[grp_id])
    if spacing:
        dic['ps_grid/%02d' % monitor.task_no] = dic[grp_id]
    return dic
コード例 #19
0
ファイル: classical.py プロジェクト: mehmousavi61/oq-engine
    def gen_args(self, monitor):
        """
        Used in the case of large source model logic trees.

        :param monitor: a :class:`openquake.baselib.performance.Monitor`
        :yields: (sources, sites, gsims, monitor) tuples
        """
        oq = self.oqparam
        opt = self.oqparam.optimize_same_id_sources
        num_tiles = math.ceil(len(self.sitecol) / oq.sites_per_tile)
        tasks_per_tile = math.ceil(oq.concurrent_tasks / math.sqrt(num_tiles))
        if num_tiles > 1:
            tiles = self.sitecol.split_in_tiles(num_tiles)
        else:
            tiles = [self.sitecol]
        param = dict(truncation_level=oq.truncation_level, imtls=oq.imtls)
        minweight = source.MINWEIGHT * math.sqrt(len(self.sitecol))
        totweight = 0
        for tile_i, tile in enumerate(tiles, 1):
            num_tasks = 0
            num_sources = 0
            if num_tiles > 1:
                logging.info('Prefiltering tile %d of %d', tile_i, len(tiles))
            else:
                logging.info('Prefiltering sources')
            with self.monitor('prefiltering'):
                src_filter = SourceFilter(tile, oq.maximum_distance)
                csm = self.csm.filter(src_filter)
            if tile_i == 1:  # set it only on the first tile
                maxweight = csm.get_maxweight(weight, tasks_per_tile,
                                              minweight)
                if maxweight == minweight:
                    logging.info('Using minweight=%d', minweight)
                else:
                    logging.info('Using maxweight=%d', maxweight)
                totweight += csm.info.tot_weight
            else:
                totweight += csm.get_weight(weight)
            if csm.has_dupl_sources and not opt:
                logging.warn('Found %d duplicated sources',
                             csm.has_dupl_sources)
            for sg in csm.src_groups:
                if sg.src_interdep == 'mutex':
                    gsims = self.csm.info.gsim_lt.get_gsims(sg.trt)
                    yield sg, csm.src_filter, gsims, param, monitor
                    num_tasks += 1
                    num_sources += len(sg.sources)
            # NB: csm.get_sources_by_trt discards the mutex sources
            for trt, sources in csm.get_sources_by_trt().items():
                gsims = self.csm.info.gsim_lt.get_gsims(trt)
                for block in block_splitter(sources, maxweight, weight):
                    yield block, src_filter, gsims, param, monitor
                    num_tasks += 1
                    num_sources += len(block)
            logging.info('Sent %d sources in %d tasks', num_sources, num_tasks)
        self.csm.info.tot_weight = totweight
コード例 #20
0
 def src_filter(self):
     """
     :returns: a SourceFilter/UcerfFilter
     """
     oq = self.oqparam
     self.hdf5cache = self.datastore.hdf5cache()
     sitecol = self.sitecol.complete if self.sitecol else None
     if 'ucerf' in oq.calculation_mode:
         return UcerfFilter(sitecol, oq.maximum_distance, self.hdf5cache)
     return SourceFilter(sitecol, oq.maximum_distance, self.hdf5cache)
コード例 #21
0
ファイル: base.py プロジェクト: ARosemary/oq-engine
 def src_filter(self):
     """
     :returns: a SourceFilter
     """
     oq = self.oqparam
     if getattr(self, 'sitecol', None):
         sitecol = self.sitecol.complete
     else:  # can happen to the ruptures-only calculator
         sitecol = None
     return SourceFilter(sitecol, oq.maximum_distance)
コード例 #22
0
ファイル: classical.py プロジェクト: oneconcern/oq-engine
    def gen_args(self, csm, monitor):
        """
        Used in the case of large source model logic trees.

        :param csm: a CompositeSourceModel instance
        :param monitor: a :class:`openquake.baselib.performance.Monitor`
        :yields: (sources, sites, gsims, monitor) tuples
        """
        oq = self.oqparam
        if self.is_stochastic:  # disable tiling
            num_tiles = 1
        else:
            num_tiles = math.ceil(len(self.sitecol) / oq.sites_per_tile)
        if num_tiles > 1:
            tiles = self.sitecol.split_in_tiles(num_tiles)
        else:
            tiles = [self.sitecol]
        maxweight = self.csm.get_maxweight(oq.concurrent_tasks)
        if oq.optimize_same_id_sources:
            self.dic = csm.get_sources_by_trt(True)  # redefine csm.weight
        if oq.split_sources is False:
            maxweight = numpy.inf  # do not split the sources
        else:
            numheavy = len(self.csm.get_sources('heavy', maxweight))
            logging.info('Using maxweight=%d, numheavy=%d, numtiles=%d',
                         maxweight, numheavy, len(tiles))
        param = dict(truncation_level=oq.truncation_level,
                     imtls=oq.imtls,
                     seed=oq.ses_seed,
                     maximum_distance=oq.maximum_distance,
                     disagg=oq.poes_disagg or oq.iml_disagg,
                     ses_per_logic_tree_path=oq.ses_per_logic_tree_path)
        for t, tile in enumerate(tiles):
            if num_tiles > 1:
                with self.monitor('prefiltering source model', autoflush=True):
                    logging.info('Instantiating src_filter for tile %d', t + 1)
                    src_filter = SourceFilter(tile, oq.maximum_distance)
                    csm = self.csm.filter(src_filter)
            else:
                src_filter = self.src_filter
            if oq.optimize_same_id_sources:
                iterargs = self._args_by_trt(csm, src_filter, param, num_tiles,
                                             maxweight)
            else:
                iterargs = self._args_by_grp(csm, src_filter, param, num_tiles,
                                             maxweight)
            num_tasks = 0
            num_sources = 0
            for args in iterargs:
                num_tasks += 1
                num_sources += len(args[0])
                yield args + (monitor, )
            logging.info('Sent %d sources in %d tasks', num_sources, num_tasks)
        source.split_map.clear()
コード例 #23
0
def plot_sites(calc_id):
    """
    Plot the sites and the bounding boxes of the sources, enlarged by
    the maximum distance
    """
    import matplotlib.pyplot as p
    from matplotlib.patches import Rectangle
    dstore = datastore.read(calc_id)
    sitecol = dstore['sitecol']
    csm = dstore['composite_source_model']
    oq = dstore['oqparam']
    rfilter = SourceFilter(sitecol, oq.maximum_distance)
    fig = p.figure()
    ax = fig.add_subplot(111)
    ax.grid(True)
    for src in csm.get_sources():
        llcorner, width, height = rfilter.get_rectangle(src)
        ax.add_patch(Rectangle(llcorner, width, height, fill=False))
    p.scatter(sitecol.lons, sitecol.lats, marker='+')
    p.show()
コード例 #24
0
 def test(self):
     source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
     groups = nrml.parse(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(135.68, 35.68), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), None)
     imtls = DictArray({'PGV': [20, 40, 80]})
     gsim_by_trt = {'Subduction Interface': SiMidorikawa1999SInter()}
     hcurves = calc_hazard_curves_ext(groups, s_filter, imtls, gsim_by_trt)
     npt.assert_almost_equal([0.91149953, 0.12548556, 0.00177583],
                             hcurves['PGV'][0])
コード例 #25
0
 def test(self):
     source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml')
     groups = nrml.to_python(source_model, SourceConverter(
         investigation_time=50., rupture_mesh_spacing=2.))
     site = Site(Point(135.68, 35.68), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), {})
     imtls = DictArray({'PGV': [20, 40, 80]})
     gsim_by_trt = {'Subduction Interface': SiMidorikawa1999SInter()}
     hcurves = calc_hazard_curves(groups, s_filter, imtls, gsim_by_trt)
     npt.assert_almost_equal(
         [1.1262869e-01, 3.9968668e-03, 3.1005840e-05],
         hcurves['PGV'][0])
コード例 #26
0
def plot_sites(calc_id):
    """
    Plot the sites and the bounding boxes of the sources, enlarged by
    the maximum distance
    """
    # NB: matplotlib is imported inside since it is a costly import
    import matplotlib.pyplot as p
    from matplotlib.patches import Rectangle
    dstore = datastore.read(calc_id)
    sitecol = dstore['sitecol']
    csm = dstore['composite_source_model']
    oq = dstore['oqparam']
    rfilter = SourceFilter(sitecol, oq.maximum_distance)
    fig = p.figure()
    ax = fig.add_subplot(111)
    ax.grid(True)
    for src in csm.get_sources():
        llcorner, width, height = rfilter.get_rectangle(src)
        ax.add_patch(Rectangle(llcorner, width, height, fill=False))
    p.scatter(sitecol.lons, sitecol.lats, marker='+')
    p.show()
コード例 #27
0
 def src_filter(self, filename=None):
     """
     :returns: a SourceFilter/UcerfFilter
     """
     oq = self.oqparam
     if getattr(self, 'sitecol', None):
         sitecol = self.sitecol.complete
     else:  # can happen to the ruptures-only calculator
         sitecol = None
         filename = None
     if 'ucerf' in oq.calculation_mode:
         return UcerfFilter(sitecol, oq.maximum_distance, filename)
     return SourceFilter(sitecol, oq.maximum_distance, filename)
コード例 #28
0
ファイル: plot_sites.py プロジェクト: vikasjena/oq-engine
def plot_sites(calc_id=-1):
    """
    Plot the sites and the bounding boxes of the sources, enlarged by
    the maximum distance
    """
    # NB: matplotlib is imported inside since it is a costly import
    import matplotlib.pyplot as p
    from matplotlib.patches import Rectangle
    logging.basicConfig(level=logging.INFO)
    dstore = datastore.read(calc_id)
    oq = dstore['oqparam']
    sitecol = dstore['sitecol']
    lons, lats = sitecol.lons, sitecol.lats
    srcfilter = SourceFilter(sitecol.complete, oq.maximum_distance)
    csm = readinput.get_composite_source_model(oq).pfilter(
        srcfilter, oq.concurrent_tasks)
    sources = csm.get_sources()
    if len(sources) > 100:
        logging.info('Sampling 100 sources of %d', len(sources))
        sources = random.Random(42).sample(sources, 100)
    fig, ax = p.subplots()
    ax.grid(True)
    rects = [srcfilter.get_rectangle(src) for src in sources]
    lonset = set(lons)
    for ((lon, lat), width, height) in rects:
        lonset.add(lon)
        lonset.add(fix_lon(lon + width))
    idl = cross_idl(min(lonset), max(lonset))
    if idl:
        lons = lons % 360
    for src, ((lon, lat), width, height) in zip(sources, rects):
        lonlat = (lon % 360 if idl else lon, lat)
        ax.add_patch(Rectangle(lonlat, width, height, fill=False))
        if hasattr(src.__class__, 'polygon'):
            xs, ys = fix_polygon(src.polygon, idl)
            p.plot(xs, ys, marker='.')

    p.scatter(lons, lats, marker='+')
    p.show()
コード例 #29
0
 def setUp(self):
     self.src1 = _create_non_param_sourceA(15., 6.3,
                                           PMF([(0.6, 0), (0.4, 1)]))
     self.src2 = _create_non_param_sourceA(10., 6.0,
                                           PMF([(0.7, 0), (0.3, 1)]))
     self.src3 = _create_non_param_sourceA(10., 6.0,
                                           PMF([(0.7, 0), (0.3, 1)]),
                                           TRT.GEOTHERMAL)
     site = Site(Point(0.0, 0.0), 800, True, z1pt0=100., z2pt5=1.)
     s_filter = SourceFilter(SiteCollection([site]), {})
     self.sites = s_filter
     self.imtls = DictArray({'PGA': [0.01, 0.1, 0.3]})
     self.gsim_by_trt = {TRT.ACTIVE_SHALLOW_CRUST: SadighEtAl1997()}
コード例 #30
0
def gen_rupture_getters(dstore, slc=slice(None), maxweight=1E5, filename=None):
    """
    :yields: RuptureGetters
    """
    try:
        e0s = dstore['eslices'][:, 0]
    except KeyError:
        e0s = None
    if dstore.parent:
        dstore = dstore.parent
    csm_info = dstore['csm_info']
    trt_by_grp = csm_info.grp_by("trt")
    samples = csm_info.get_samples_by_grp()
    rlzs_by_gsim = csm_info.get_rlzs_by_gsim_grp()
    rup_array = dstore['ruptures'][slc]
    nr, ne = 0, 0
    maxdist = dstore['oqparam'].maximum_distance
    if 'sitecol' in dstore:
        srcfilter = SourceFilter(dstore['sitecol'], maxdist)
        kdt = cKDTree(srcfilter.sitecol.xyz)
    for grp_id, arr in general.group_array(rup_array, 'grp_id').items():
        if not rlzs_by_gsim[grp_id]:
            # this may happen if a source model has no sources, like
            # in event_based_risk/case_3
            continue

        if 'sitecol' in dstore:

            def weight(rec, md=getdefault(maxdist, trt_by_grp[grp_id])):
                xyz = spherical_to_cartesian(*rec['hypo'])
                nsites = len(kdt.query_ball_point(xyz, md, eps=.001))
                return rec['n_occ'] * numpy.ceil((nsites + 1) / 1000)
        else:

            def weight(rec):
                return rec['n_occ']

        for block in general.block_splitter(arr, maxweight, weight):
            if e0s is None:
                e0 = numpy.zeros(len(block), U32)
            else:
                e0 = e0s[nr:nr + len(block)]
            rgetter = RuptureGetter(numpy.array(block), filename
                                    or dstore.filename, grp_id,
                                    trt_by_grp[grp_id], samples[grp_id],
                                    rlzs_by_gsim[grp_id], e0)
            rgetter.weight = block.weight
            yield rgetter
            nr += len(block)
            ne += rgetter.num_events
コード例 #31
0
 def test_two_sites(self):
     site1 = Site(Point(0, 0), vs30=760., z1pt0=48.0, z2pt5=0.607,
                  vs30measured=True)
     site2 = Site(Point(0, 0.5), vs30=760., z1pt0=48.0, z2pt5=0.607,
                  vs30measured=True)
     sitecol = SiteCollection([site1, site2])
     srcfilter = SourceFilter(sitecol, IntegrationDistance.new('200'))
     imtls = {"PGA": [.123]}
     for period in numpy.arange(.1, .5, .1):
         imtls['SA(%.2f)' % period] = [.123]
     assert len(imtls) == 5  # 5 periods
     gsim_by_trt = {'Stable Continental Crust': ExampleA2021()}
     hcurves = calc_hazard_curves(
         [asource], srcfilter, DictArray(imtls), gsim_by_trt)
     print(hcurves)
コード例 #32
0
 def test_international_date_line(self):
     maxdist = IntegrationDistance({
         'default': [(3, 30), (4, 40), (5, 100), (6, 200), (7, 300),
                     (8, 400)]
     })
     sitecol = SiteCollection([
         Site(location=Point(179, 80),
              vs30=1.2,
              vs30measured=True,
              z1pt0=3.4,
              z2pt5=5.6,
              backarc=True),
         Site(location=Point(-179, 80),
              vs30=55.4,
              vs30measured=False,
              z1pt0=66.7,
              z2pt5=88.9,
              backarc=False)
     ])
     srcfilter = SourceFilter(sitecol, maxdist)
     bb1, bb2 = srcfilter.get_bounding_boxes(mag=4.5)
     # bounding boxes in the form min_lon, min_lat, max_lon, max_lat
     aae(bb1, (173.8210225, 79.10068, 184.1789775, 80.89932))
     aae(bb2, (175.8210225, 79.10068, 186.1789775, 80.89932))
コード例 #33
0
ファイル: filters_test.py プロジェクト: ventycn/oq-engine
 def test_get_bounding_boxes(self):
     maxdist = IntegrationDistance({
         'default': [(3, 30), (4, 40), (5, 100), (6, 200), (7, 300),
                     (8, 400)]
     })
     sitecol = SiteCollection([
         Site(location=Point(10, 20, 30),
              vs30=1.2,
              vs30measured=True,
              z1pt0=3.4,
              z2pt5=5.6,
              backarc=True),
         Site(location=Point(-1.2, -3.4, -5.6),
              vs30=55.4,
              vs30measured=False,
              z1pt0=66.7,
              z2pt5=88.9,
              backarc=False)
     ])
     srcfilter = SourceFilter(sitecol, maxdist)
     bb1, bb2 = srcfilter.get_bounding_boxes(mag=4.5)
     # bounding boxes in the form min_lon, min_lat, max_lon, max_lat
     aae(bb1, (9.0429636, 19.10068, 10.9570364, 20.89932))
     aae(bb2, (-2.1009057, -4.29932, -0.2990943, -2.50068))
コード例 #34
0
ファイル: contexts.py プロジェクト: g-weatherill/oq-engine
 def set_weight(self, sources, srcfilter, mon=Monitor()):
     """
     Set the weight attribute on each prefiltered source
     """
     if hasattr(srcfilter, 'array'):  # a SiteCollection was passed
         srcfilter = SourceFilter(srcfilter, self.maximum_distance)
     for src in sources:
         src.num_ruptures = src.count_ruptures()
         if src.nsites == 0:  # was discarded by the prefiltering
             src.weight = .001
         else:
             with mon:
                 src.weight = 1. + self.estimate_weight(src, srcfilter)
             if src.code == b'F':  # hack for China model
                 src.weight *= 10
コード例 #35
0
    def full_disaggregation(self, curves):
        """
        Run the disaggregation phase.

        :param curves: a list of hazard curves, one per site

        The curves can be all None if iml_disagg is set in the job.ini
        """
        oq = self.oqparam
        tl = oq.truncation_level
        src_filter = SourceFilter(self.sitecol, oq.maximum_distance)
        csm = self.csm
        for sg in csm.src_groups:
            if sg.atomic:
                raise NotImplemented('Atomic groups are not supported yet')
        if not csm.get_sources():
            raise RuntimeError('All sources were filtered away!')

        R = len(self.rlzs_assoc.realizations)
        M = len(oq.imtls)
        P = len(oq.poes_disagg) or 1
        if R * M * P > 10:
            logging.warning(
                'You have %d realizations, %d IMTs and %d poes_disagg: the '
                'disaggregation will be heavy and memory consuming', R, M, P)
        iml4 = disagg.make_iml4(
            R, oq.iml_disagg, oq.imtls, oq.poes_disagg or (None,), curves)
        if oq.disagg_by_src:
            if R == 1:
                self.build_disagg_by_src(iml4)
            else:
                logging.warning('disagg_by_src works only with 1 realization, '
                                'you have %d', R)

        eps_edges = numpy.linspace(-tl, tl, oq.num_epsilon_bins + 1)
        self.bin_edges = {}

        # build trt_edges
        trts = tuple(sorted(set(sg.trt for smodel in csm.source_models
                                for sg in smodel.src_groups)))
        trt_num = {trt: i for i, trt in enumerate(trts)}
        self.trts = trts

        # build mag_edges
        mmm = numpy.array([src.get_min_max_mag() for src in csm.get_sources()])
        min_mag = mmm[:, 0].min()
        max_mag = mmm[:, 1].max()
        mag_edges = oq.mag_bin_width * numpy.arange(
            int(numpy.floor(min_mag / oq.mag_bin_width)),
            int(numpy.ceil(max_mag / oq.mag_bin_width) + 1))

        # build dist_edges
        maxdist = max(oq.maximum_distance(trt, max_mag) for trt in trts)
        dist_edges = oq.distance_bin_width * numpy.arange(
            0, int(numpy.ceil(maxdist / oq.distance_bin_width) + 1))

        # build eps_edges
        eps_edges = numpy.linspace(-tl, tl, oq.num_epsilon_bins + 1)

        # build lon_edges, lat_edges per sid
        bbs = src_filter.get_bounding_boxes(mag=max_mag)
        lon_edges, lat_edges = {}, {}  # by sid
        for sid, bb in zip(self.sitecol.sids, bbs):
            lon_edges[sid], lat_edges[sid] = disagg.lon_lat_bins(
                bb, oq.coordinate_bin_width)
        self.bin_edges = mag_edges, dist_edges, lon_edges, lat_edges, eps_edges
        self.save_bin_edges()

        # build all_args
        all_args = []
        maxweight = csm.get_maxweight(weight, oq.concurrent_tasks)
        R = iml4.shape[1]
        self.imldict = {}  # sid, rlzi, poe, imt -> iml
        for s in self.sitecol.sids:
            for r in range(R):
                for p, poe in enumerate(oq.poes_disagg or [None]):
                    for m, imt in enumerate(oq.imtls):
                        self.imldict[s, r, poe, imt] = iml4[s, r, m, p]

        for smodel in csm.source_models:
            sm_id = smodel.ordinal
            for trt, groups in groupby(
                    smodel.src_groups, operator.attrgetter('trt')).items():
                trti = trt_num[trt]
                sources = sum([grp.sources for grp in groups], [])
                rlzs_by_gsim = self.rlzs_assoc.get_rlzs_by_gsim(trt, sm_id)
                cmaker = ContextMaker(
                    trt, rlzs_by_gsim, src_filter.integration_distance,
                    {'filter_distance': oq.filter_distance})
                for block in block_splitter(sources, maxweight, weight):
                    all_args.append(
                        (src_filter.sitecol, block, cmaker, iml4, trti,
                         self.bin_edges, oq))

        self.num_ruptures = [0] * len(self.trts)
        self.cache_info = numpy.zeros(3)  # operations, cache_hits, num_zeros
        results = parallel.Starmap(
            compute_disagg, all_args, self.monitor()
        ).reduce(self.agg_result, AccumDict(accum={}))

        # set eff_ruptures
        trti = csm.info.trt2i()
        for smodel in csm.info.source_models:
            for sg in smodel.src_groups:
                sg.eff_ruptures = self.num_ruptures[trti[sg.trt]]
        self.datastore['csm_info'] = csm.info

        ops, hits, num_zeros = self.cache_info
        logging.info('Cache speedup %s', ops / (ops - hits))
        logging.info('Discarded zero matrices: %d', num_zeros)
        return results
コード例 #36
0
    def test_point_sources(self):
        sources = [
            openquake.hazardlib.source.PointSource(
                source_id='point1', name='point1',
                tectonic_region_type=const.TRT.ACTIVE_SHALLOW_CRUST,
                mfd=openquake.hazardlib.mfd.EvenlyDiscretizedMFD(
                    min_mag=4, bin_width=1, occurrence_rates=[5]
                ),
                nodal_plane_distribution=openquake.hazardlib.pmf.PMF([
                    (1, openquake.hazardlib.geo.NodalPlane(strike=0.0,
                                                           dip=90.0,
                                                           rake=0.0))
                ]),
                hypocenter_distribution=openquake.hazardlib.pmf.PMF([(1, 10)]),
                upper_seismogenic_depth=0.0,
                lower_seismogenic_depth=10.0,
                magnitude_scaling_relationship=
                openquake.hazardlib.scalerel.PeerMSR(),
                rupture_aspect_ratio=2,
                temporal_occurrence_model=PoissonTOM(1.),
                rupture_mesh_spacing=1.0,
                location=Point(10, 10)
            ),
            openquake.hazardlib.source.PointSource(
                source_id='point2', name='point2',
                tectonic_region_type=const.TRT.ACTIVE_SHALLOW_CRUST,
                mfd=openquake.hazardlib.mfd.EvenlyDiscretizedMFD(
                    min_mag=4, bin_width=2, occurrence_rates=[5, 6, 7]
                ),
                nodal_plane_distribution=openquake.hazardlib.pmf.PMF([
                    (1, openquake.hazardlib.geo.NodalPlane(strike=0,
                                                           dip=90,
                                                           rake=0.0)),
                ]),
                hypocenter_distribution=openquake.hazardlib.pmf.PMF([(1, 10)]),
                upper_seismogenic_depth=0.0,
                lower_seismogenic_depth=10.0,
                magnitude_scaling_relationship=
                openquake.hazardlib.scalerel.PeerMSR(),
                rupture_aspect_ratio=2,
                temporal_occurrence_model=PoissonTOM(1.),
                rupture_mesh_spacing=1.0,
                location=Point(10, 11)
            ),
        ]
        sites = [openquake.hazardlib.site.Site(Point(11, 10), 1, 2, 3),
                 openquake.hazardlib.site.Site(Point(10, 16), 2, 2, 3),
                 openquake.hazardlib.site.Site(Point(10, 10.6, 1), 3, 2, 3),
                 openquake.hazardlib.site.Site(Point(10, 10.7, -1), 4, 2, 3)]
        sitecol = openquake.hazardlib.site.SiteCollection(sites)
        gsims = {const.TRT.ACTIVE_SHALLOW_CRUST: SadighEtAl1997()}
        truncation_level = 1
        imts = {'PGA': [0.1, 0.5, 1.3]}
        s_filter = SourceFilter(sitecol, {const.TRT.ACTIVE_SHALLOW_CRUST: 30})
        result = calc_hazard_curves(
            sources, s_filter, imts, gsims, truncation_level)['PGA']
        # there are two sources and four sites. The first source contains only
        # one rupture, the second source contains three ruptures.
        #
        # the first source has 'maximum projection radius' of 0.707 km
        # the second source has 'maximum projection radius' of 500.0 km
        #
        # the epicentral distances for source 1 are: [ 109.50558394,
        # 667.16955987,   66.71695599,   77.83644865]
        # the epicentral distances for source 2 are: [ 155.9412148 ,
        # 555.97463322,   44.47797066,   33.35847799]
        #
        # Considering that the source site filtering distance is set to 30 km,
        # for source 1, all sites have epicentral distance larger than
        # 0.707 + 30 km. This means that source 1 ('point 1') is not considered
        # in the calculation because too far.
        # for source 2, the 1st, 3rd and 4th sites have epicentral distances
        # smaller than 500.0 + 30 km. This means that source 2 ('point 2') is
        # considered in the calculation for site 1, 3, and 4.
        #
        # JB distances for rupture 1 in source 2 are: [ 155.43860273,
        #  555.26752644,   43.77086388,   32.65137121]
        # JB distances for rupture 2 in source 2 are: [ 150.98882575,
        #  548.90356541,   37.40690285,   26.28741018]
        # JB distances for rupture 3 in source 2 are: [ 109.50545819,
        # 55.97463322,    0.        ,    0.        ]
        #
        # Considering that the rupture site filtering distance is set to 30 km,
        # rupture 1 (magnitude 4) is not considered because too far, rupture 2
        # (magnitude 6) affect only the 4th site, rupture 3 (magnitude 8)
        # affect the 3rd and 4th sites.

        self.assertEqual(result.shape, (4, 3))  # 4 sites, 3 levels
        numpy.testing.assert_allclose(result[0], 0)  # no contrib to site 1
        numpy.testing.assert_allclose(result[1], 0)  # no contrib to site 2

        # test that depths are kept after filtering (sites 3 and 4 remain)
        s_filter = SourceFilter(sitecol, {'default': 100})
        numpy.testing.assert_array_equal(
            s_filter.get_close_sites(sources[0]).depths, ([1, -1]))