Пример #1
0
def preclassical(srcs, sites, cmaker, monitor):
    """
    Weight the sources. Also split them if split_sources is true. If
    ps_grid_spacing is set, grid the point sources before weighting them.

    NB: srcfilter can be on a reduced site collection for performance reasons
    """
    split_sources = []
    spacing = cmaker.ps_grid_spacing
    grp_id = srcs[0].grp_id
    if sites is None:
        # in csm2rup just split the sources and count the ruptures
        for src in srcs:
            ss = split_source(src)
            if len(ss) > 1:
                for ss_ in ss:
                    ss_.nsites = 1
            split_sources.extend(ss)
            src.num_ruptures = src.count_ruptures()
        dic = {grp_id: split_sources}
        dic['before'] = len(srcs)
        dic['after'] = len(dic[grp_id])
        return dic

    with monitor('splitting sources'):
        sf = SourceFilter(sites, cmaker.maximum_distance)
        for src in srcs:
            # NB: this is approximate, since the sites are sampled
            src.nsites = len(sf.close_sids(src))  # can be 0
            # NB: it is crucial to split only the close sources, for
            # performance reasons (think of Ecuador in SAM)
            splits = split_source(src) if (cmaker.split_sources
                                           and src.nsites) else [src]
            split_sources.extend(splits)
    dic = grid_point_sources(split_sources, spacing, monitor)
    with monitor('weighting sources'):
        # this is also prefiltering again, to have a good representative
        # of what will be done during the classical phase
        cmaker.set_weight(dic[grp_id], sf)
    dic['before'] = len(split_sources)
    dic['after'] = len(dic[grp_id])
    if spacing:
        dic['ps_grid/%02d' % monitor.task_no] = dic[grp_id]
    return dic
Пример #2
0
def event_based(proxies, full_lt, oqparam, dstore, monitor):
    """
    Compute GMFs and optionally hazard curves
    """
    alldata = AccumDict(accum=[])
    sig_eps = []
    times = []  # rup_id, nsites, dt
    hcurves = {}  # key -> poes
    trt_smr = proxies[0]['trt_smr']
    fmon = monitor('filtering ruptures', measuremem=False)
    cmon = monitor('computing gmfs', measuremem=False)
    with dstore:
        trt = full_lt.trts[trt_smr // len(full_lt.sm_rlzs)]
        srcfilter = SourceFilter(dstore['sitecol'],
                                 oqparam.maximum_distance(trt))
        rupgeoms = dstore['rupgeoms']
        rlzs_by_gsim = full_lt._rlzs_by_gsim(trt_smr)
        param = vars(oqparam).copy()
        param['imtls'] = oqparam.imtls
        param['min_iml'] = oqparam.min_iml
        param['maximum_distance'] = oqparam.maximum_distance(trt)
        cmaker = ContextMaker(trt, rlzs_by_gsim, param)
        min_mag = getdefault(oqparam.minimum_magnitude, trt)
        for proxy in proxies:
            t0 = time.time()
            with fmon:
                if proxy['mag'] < min_mag:
                    continue
                sids = srcfilter.close_sids(proxy, trt)
                if len(sids) == 0:  # filtered away
                    continue
                proxy.geom = rupgeoms[proxy['geom_id']]
                ebr = proxy.to_ebr(cmaker.trt)  # after the geometry is set
                try:
                    computer = GmfComputer(ebr,
                                           srcfilter.sitecol.filtered(sids),
                                           cmaker, oqparam.correl_model,
                                           oqparam.cross_correl,
                                           oqparam._amplifier,
                                           oqparam._sec_perils)
                except FarAwayRupture:
                    continue
            with cmon:
                data = computer.compute_all(sig_eps)
            dt = time.time() - t0
            times.append((computer.ebrupture.id, len(computer.ctx.sids), dt))
            for key in data:
                alldata[key].extend(data[key])
    for key, val in sorted(alldata.items()):
        if key in 'eid sid rlz':
            alldata[key] = U32(alldata[key])
        else:
            alldata[key] = F32(alldata[key])
    gmfdata = strip_zeros(pandas.DataFrame(alldata))
    if len(gmfdata) and oqparam.hazard_curves_from_gmfs:
        hc_mon = monitor('building hazard curves', measuremem=False)
        for (sid, rlz), df in gmfdata.groupby(['sid', 'rlz']):
            with hc_mon:
                poes = calc.gmvs_to_poes(df, oqparam.imtls,
                                         oqparam.ses_per_logic_tree_path)
                for m, imt in enumerate(oqparam.imtls):
                    hcurves[rsi2str(rlz, sid, imt)] = poes[m]
    times = numpy.array([tup + (monitor.task_no, ) for tup in times], time_dt)
    times.sort(order='rup_id')
    if not oqparam.ground_motion_fields:
        gmfdata = ()
    return dict(gmfdata=gmfdata,
                hcurves=hcurves,
                times=times,
                sig_eps=numpy.array(sig_eps, sig_eps_dt(oqparam.imtls)))