Esempio n. 1
0
    def split_in_blocks(self, maxweight, sources):
        """
        Split a set of sources in blocks of weight up to maxweight; heavy
        sources (i.e. with weight > maxweight) are split.

        :param maxweight: maximum weight of a block
        :param sources: sources of the same source group
        :yields: blocks of sources of weight around maxweight
        """
        sources.sort(key=weight)

        # yield light sources in blocks
        light = [src for src in sources if src.weight <= maxweight]
        for block in block_splitter(light, maxweight, weight):
            yield block

        # yield heavy sources in blocks
        heavy = [src for src in sources if src.weight > maxweight]
        for src in heavy:
            srcs = [
                s for s in source.split_source(src)
                if self.src_filter.get_close_sites(s) is not None
            ]
            for block in block_splitter(srcs, maxweight, weight):
                yield block
Esempio n. 2
0
    def test(self):
        npd = PMF([(0.5, NodalPlane(1, 20, 3)), (0.5, NodalPlane(2, 2, 4))])
        hd = PMF([(1, 14)])
        mesh = Mesh(numpy.array([0, 1]), numpy.array([0.5, 1]))
        tom = PoissonTOM(50.)
        mmfd = MultiMFD('incrementalMFD',
                        size=2,
                        min_mag=[4.5],
                        bin_width=[2.0],
                        occurRates=[[.3, .1], [.4, .2, .1]])
        mps = MultiPointSource('mp1', 'multi point source',
                               'Active Shallow Crust', mmfd, 2.0, PeerMSR(),
                               1.0, tom, 10, 20, npd, hd, mesh)
        mps.src_group_id = 1

        # test the splitting
        splits = list(split_source(mps))
        self.assertEqual(len(splits), 2)
        for split in splits:
            self.assertEqual(split.src_group_id, mps.src_group_id)

        got = obj_to_node(mps).to_str()
        print(got)
        self.assertEqual(
            got, '''\
multiPointSource{id='mp1', name='multi point source', tectonicRegion='Active Shallow Crust'}
  multiPointGeometry
    gml:posList [0, 0.5, 1, 1.0]
    upperSeismoDepth 10
    lowerSeismoDepth 20
  magScaleRel 'PeerMSR'
  ruptAspectRatio 1.0
  multiMFD{kind='incrementalMFD', size=2}
    bin_width [2.0]
    min_mag [4.5]
    occurRates [0.29999999999999999, 0.10000000000000001, 0.40000000000000002, 0.20000000000000001, 0.10000000000000001]
    lengths [2, 3]
  nodalPlaneDist
    nodalPlane{dip=20, probability=0.5, rake=3, strike=1}
    nodalPlane{dip=2, probability=0.5, rake=4, strike=2}
  hypoDepthDist
    hypoDepth{depth=14, probability=1.0}
''')
Esempio n. 3
0
def pmap_from_trt(sources, src_filter, gsims, param, monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.

    :returns:
        a dictionary {grp_id: pmap} with attributes .grp_ids, .calc_times,
        .eff_ruptures
    """
    grp_ids = set()
    for src in sources:
        grp_ids.update(src.src_group_ids)
    maxdist = src_filter.integration_distance
    srcs = sum([split_source(src) for src in sources], [])  # split first
    with GroundShakingIntensityModel.forbid_instantiation():
        imtls = param['imtls']
        trunclevel = param.get('truncation_level')
        cmaker = ContextMaker(gsims, maxdist)
        ctx_mon = monitor('make_contexts', measuremem=False)
        poe_mon = monitor('get_poes', measuremem=False)
        pmap = AccumDict({
            grp_id: ProbabilityMap(len(imtls.array), len(gsims))
            for grp_id in grp_ids
        })
        pmap.calc_times = []  # pairs (src_id, delta_t)
        pmap.eff_ruptures = AccumDict()  # grp_id -> num_ruptures
        for src, s_sites in src_filter(srcs):  # filter now
            t0 = time.time()
            poemap = cmaker.poe_map(src, s_sites, imtls, trunclevel, ctx_mon,
                                    poe_mon)
            if poemap:
                for grp_id in src.src_group_ids:
                    pmap[grp_id] |= poemap
            pmap.calc_times.append(
                (src.source_id, src.weight, len(s_sites), time.time() - t0))
            # storing the number of contributing ruptures too
            pmap.eff_ruptures += {
                grp_id: getattr(poemap, 'eff_ruptures', 0)
                for grp_id in src.src_group_ids
            }
        return pmap
Esempio n. 4
0
    def filter(self, src_filter):  # called once per tile
        """
        Generate a new CompositeSourceModel by filtering the sources on
        the given site collection.

        :param sitecol: a SiteCollection instance
        :para src_filter: a SourceFilter instance
        """
        ngsims = {trt: len(gs) for trt, gs in self.gsim_lt.values.items()}
        source_models = []
        weight = 0
        for sm in self.source_models:
            src_groups = []
            for src_group in sm.src_groups:
                mutex = getattr(src_group, 'src_interdep', None) == 'mutex'
                self.add_infos(src_group.sources)  # unsplit sources
                sources = []
                for src in src_group.sources:
                    if hasattr(src, '__iter__') and not mutex:
                        # MultiPoint, AreaSource, NonParametric
                        # NB: source.split_source is cached
                        sources.extend(source.split_source(src))
                    else:
                        # mutex sources cannot be split
                        sources.append(src)
                sg = copy.copy(src_group)
                sg.sources = []
                for src, _sites in src_filter(sources):
                    sg.sources.append(src)
                    src.ngsims = ngsims[src.tectonic_region_type]
                    weight += src.weight
                src_groups.append(sg)
            newsm = logictree.SourceModel(sm.names, sm.weight, sm.path,
                                          src_groups, sm.num_gsim_paths,
                                          sm.ordinal, sm.samples)
            source_models.append(newsm)
        new = self.__class__(self.gsim_lt, self.source_model_lt, source_models)
        new.weight = weight
        new.src_filter = src_filter
        return new
Esempio n. 5
0
def pmap_from_grp(group, src_filter, gsims, param, monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.
    The arguments are the same as in :func:`calc_hazard_curves`, except
    for ``gsims``, which is a list of GSIM instances.

    :returns: a dictionary {grp_id: ProbabilityMap instance}
    """
    mutex_weight = {
        src.source_id: weight
        for src, weight in zip(group.sources, group.srcs_weights)
    }
    maxdist = src_filter.integration_distance
    srcs = sum([split_source(src) for src in group.sources], [])
    with GroundShakingIntensityModel.forbid_instantiation():
        imtls = param['imtls']
        trunclevel = param.get('truncation_level')
        cmaker = ContextMaker(gsims, maxdist)
        ctx_mon = monitor('make_contexts', measuremem=False)
        poe_mon = monitor('get_poes', measuremem=False)
        pmap = ProbabilityMap(len(imtls.array), len(gsims))
        calc_times = []  # pairs (src_id, delta_t)
        for src, s_sites in src_filter(srcs):
            t0 = time.time()
            poemap = cmaker.poe_map(src, s_sites, imtls, trunclevel, ctx_mon,
                                    poe_mon, group.rup_interdep == 'indep')
            weight = mutex_weight[src.source_id]
            for sid in poemap:
                pcurve = pmap.setdefault(sid, 0)
                pcurve += poemap[sid] * weight
            calc_times.append(
                (src.source_id, src.weight, len(s_sites), time.time() - t0))
        if group.grp_probability is not None:
            pmap *= group.grp_probability
        acc = AccumDict({group.id: pmap})
        # adding the number of contributing ruptures too
        acc.eff_ruptures = {group.id: ctx_mon.counts}
        acc.calc_times = calc_times
        return acc
Esempio n. 6
0
def classical(group, src_filter, gsims, param, monitor=Monitor()):
    """
    Compute the hazard curves for a set of sources belonging to the same
    tectonic region type for all the GSIMs associated to that TRT.
    The arguments are the same as in :func:`calc_hazard_curves`, except
    for ``gsims``, which is a list of GSIM instances.

    :returns:
        a dictionary {grp_id: pmap} with attributes .grp_ids, .calc_times,
        .eff_ruptures
    """
    if getattr(group, 'src_interdep', None) == 'mutex':
        mutex_weight = {
            src.source_id: weight
            for src, weight in zip(group.sources, group.srcs_weights)
        }
        srcs = group.sources
    else:
        mutex_weight = None
        srcs = sum([split_source(src) for src in group], [])
    grp_ids = set()
    for src in group:
        grp_ids.update(src.src_group_ids)
    maxdist = src_filter.integration_distance
    with GroundShakingIntensityModel.forbid_instantiation():
        imtls = param['imtls']
        trunclevel = param.get('truncation_level')
        cmaker = ContextMaker(gsims, maxdist)
        ctx_mon = monitor('make_contexts', measuremem=False)
        poe_mon = monitor('get_poes', measuremem=False)
        pmap = AccumDict({
            grp_id: ProbabilityMap(len(imtls.array), len(gsims))
            for grp_id in grp_ids
        })
        # AccumDict of arrays with 4 elements weight, nsites, calc_time, split
        pmap.calc_times = AccumDict(accum=numpy.zeros(4))
        pmap.eff_ruptures = AccumDict()  # grp_id -> num_ruptures
        for src, s_sites in src_filter(srcs):  # filter now
            t0 = time.time()
            indep = group.rup_interdep == 'indep' if mutex_weight else True
            poemap = cmaker.poe_map(src, s_sites, imtls, trunclevel, ctx_mon,
                                    poe_mon, indep)
            if mutex_weight:  # mutex sources
                weight = mutex_weight[src.source_id]
                for sid in poemap:
                    pcurve = pmap[group.id].setdefault(sid, 0)
                    pcurve += poemap[sid] * weight
            elif poemap:
                for grp_id in src.src_group_ids:
                    pmap[grp_id] |= poemap
            src_id = src.source_id.split(':', 1)[0]
            pmap.calc_times[src_id] += numpy.array(
                [src.weight, len(s_sites),
                 time.time() - t0, 1])
            # storing the number of contributing ruptures too
            pmap.eff_ruptures += {
                grp_id: getattr(poemap, 'eff_ruptures', 0)
                for grp_id in src.src_group_ids
            }
        if mutex_weight and group.grp_probability is not None:
            pmap[group.id] *= group.grp_probability
        return pmap