def test_ses_generation_from_parametric_source_with_filtering(self):
        # generate stochastic event set (SES) from 2 area sources (area1,
        # area2). However, by including a single site co-located with the
        # area1 center, and with source site filtering of 100 km (exactly
        # the radius of area1), the second source (area2), which is centered
        # at 5., 5. (that is about 500 km from center of area1), will be
        # excluded. the MFD from the SES will be therefore approximately equal
        # to the one of area1 only.
        numpy.random.seed(123)
        sites = SiteCollection([
            Site(location=Point(0., 0.),
                 vs30=760,
                 vs30measured=True,
                 z1pt0=40.,
                 z2pt5=2.)
        ])
        ses = stochastic_event_set([self.area1, self.area2],
                                   filters.SourceFilter(
                                       sites,
                                       filters.MagDepDistance.new('100')))

        rates = self._extract_rates(ses,
                                    time_span=self.time_span,
                                    bins=numpy.arange(5., 6.6, 0.1))

        expect_rates = numpy.array(
            [r for m, r in self.mfd.get_annual_occurrence_rates()])

        numpy.testing.assert_allclose(rates, expect_rates, rtol=0, atol=1e-4)
Exemple #2
0
def export_ruptures_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat '
              'centroid_depth trt strike dip rake boundary').split()
    rows = []
    sf = filters.SourceFilter(dstore['sitecol'], oq.maximum_distance)
    for rgetter in gen_rupture_getters(dstore):
        rups = rgetter.get_ruptures(sf)
        rup_data = calc.RuptureData(rgetter.trt, rgetter.rlzs_by_gsim)
        for r, rup in zip(rup_data.to_array(rups), rups):
            rows.append((r['rup_id'], r['multiplicity'], r['mag'], r['lon'],
                         r['lat'], r['depth'], rgetter.trt, r['strike'],
                         r['dip'], r['rake'], r['boundary']))
    rows.sort()  # by rupture rup_id
    comment = dstore.metadata
    comment.update(investigation_time=oq.investigation_time,
                   ses_per_logic_tree_path=oq.ses_per_logic_tree_path)
    writers.write_csv(dest, rows, header=header, comment=comment)
    return [dest]
Exemple #3
0
def extract_rupture_info(dstore, what):
    """
    Extract some information about the ruptures, including the boundary.
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/rupture_info?min_mag=6
    """
    qdict = parse(what)
    if 'min_mag' in qdict:
        [min_mag] = qdict['min_mag']
    else:
        min_mag = 0
    oq = dstore['oqparam']
    dtlist = [('rupid', U32), ('multiplicity', U16), ('mag', F32),
              ('centroid_lon', F32), ('centroid_lat', F32),
              ('centroid_depth', F32), ('trt', '<S50'), ('strike', F32),
              ('dip', F32), ('rake', F32)]
    rows = []
    boundaries = []
    sf = filters.SourceFilter(dstore['sitecol'], oq.maximum_distance)
    for rgetter in getters.gen_rupture_getters(dstore):
        rups = rgetter.get_ruptures(sf, min_mag)
        rup_data = RuptureData(rgetter.trt, rgetter.rlzs_by_gsim)
        for r, rup in zip(rup_data.to_array(rups), rups):
            coords = ['%.5f %.5f' % xyz[:2] for xyz in zip(*r['boundaries'])]
            boundaries.append('POLYGON((%s))' % ', '.join(coords))
            rows.append(
                (r['rup_id'], r['multiplicity'], r['mag'], r['lon'], r['lat'],
                 r['depth'], rgetter.trt, r['strike'], r['dip'], r['rake']))
    arr = numpy.array(rows, dtlist)
    geoms = gzip.compress('\n'.join(boundaries).encode('utf-8'))
    return ArrayWrapper(
        arr, dict(investigation_time=oq.investigation_time, boundaries=geoms))
Exemple #4
0
def get_gmfgetter(dstore, rup_id):
    """
    :returns: GmfGetter associated to the given rupture
    """
    oq = dstore['oqparam']
    srcfilter = filters.SourceFilter(dstore['sitecol'], oq.maximum_distance)
    for rgetter in get_rupture_getters(dstore, slc=slice(rup_id, rup_id + 1)):
        gg = GmfGetter(rgetter, srcfilter, oq)
        break
    return gg
Exemple #5
0
def export_gmf_scenario_csv(ekey, dstore):
    what = ekey[0].split('/')
    if len(what) == 1:
        raise ValueError(r'Missing "/rup-\d+"')
    oq = dstore['oqparam']
    csm_info = dstore['csm_info']
    rlzs_assoc = csm_info.get_rlzs_assoc()
    num_ruptures = len(dstore['ruptures'])
    imts = list(oq.imtls)
    mo = re.match(r'rup-(\d+)$', what[1])
    if mo is None:
        raise ValueError(r"Invalid format: %r does not match 'rup-(\d+)$'" %
                         what[1])
    ridx = int(mo.group(1))
    assert 0 <= ridx < num_ruptures, ridx
    # for scenario there is an unique grp_id=0
    [rgetter] = gen_rupture_getters(dstore, slice(ridx, ridx + 1))
    [ebr] = rgetter.get_ruptures()
    sitecol = dstore['sitecol'].complete
    srcfilter = filters.SourceFilter(sitecol, oq.maximum_distance)
    getter = GmfGetter(rgetter, srcfilter, oq)
    getter.init()
    eids = rgetter.get_eid_rlz()['eid']
    sids = getter.computers[0].sids
    rlzs = rlzs_assoc.realizations
    hazardr = [collections.defaultdict(list) for rlz in rlzs]
    for sid, haz in getter.get_hazard().items():
        for rec in haz:
            hazardr[rec['rlzi']][sid].append(rec)
    fields = ['eid-%03d' % eid for eid in eids]
    dt = numpy.dtype([(f, F32) for f in fields])
    mesh = numpy.zeros(len(sids), [('lon', F64), ('lat', F64)])
    mesh['lon'] = sitecol.lons[sids]
    mesh['lat'] = sitecol.lats[sids]
    writer = writers.CsvWriter(fmt='%.5f')
    for rlzi in range(len(rlzs)):
        hazard = hazardr[rlzi]
        for imti, imt in enumerate(imts):
            gmfs = numpy.zeros(len(sids), dt)
            for s, sid in enumerate(sids):
                for rec in hazard[sid]:
                    event = 'eid-%03d' % rec['eid']
                    gmfs[s][event] = rec['gmv'][imti]
            dest = dstore.build_fname(
                'gmf', 'rup-%s-rlz-%s-%s' % (ebr.serial, rlzi, imt), 'csv')
            data = util.compose_arrays(mesh, gmfs)
            writer.save(data, dest)
    return writer.getsaved()
Exemple #6
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    sf = filters.SourceFilter(dstore['sitecol'], oq.maximum_distance)
    num_ses = oq.ses_per_logic_tree_path
    ruptures_by_grp = {}
    for rgetter in gen_rupture_getters(dstore):
        ebrs = [ebr.export(rgetter.rlzs_by_gsim, num_ses)
                for ebr in rgetter.get_ruptures(sf)]
        if ebrs:
            ruptures_by_grp[rgetter.grp_id] = ebrs
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures_by_grp, oq.investigation_time)
    return [dest]