Пример #1
0
def extract_ruptures(dstore, what):
    """
    Extract some information about the ruptures, including the boundary.
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/ruptures?min_mag=6
    """
    qdict = parse(what)
    if 'min_mag' in qdict:
        [min_mag] = qdict['min_mag']
    else:
        min_mag = 0
    bio = io.StringIO()
    first = True
    trts = list(dstore.getitem('full_lt').attrs['trts'])
    for rgetter in getters.gen_rgetters(dstore):
        rups = [
            rupture._get_rupture(proxy.rec, proxy.geom, rgetter.trt)
            for proxy in rgetter.get_proxies(min_mag)
        ]
        arr = rupture.to_csv_array(rups)
        if first:
            header = None
            comment = dict(trts=trts)
            first = False
        else:
            header = 'no-header'
            comment = None
        writers.write_csv(bio, arr, header=header, comment=comment)
    return bio.getvalue()
Пример #2
0
    def save_events(self, rup_array):
        """
        :param rup_array: an array of ruptures with fields grp_id
        :returns: a list of RuptureGetters
        """
        # this is very fast compared to saving the ruptures

        eids = rupture.get_eids(rup_array, self.samples_by_grp,
                                self.num_rlzs_by_grp)

        self.check_overflow()  # check the number of events
        events = numpy.zeros(len(eids), rupture.events_dt)
        # when computing the events all ruptures must be considered,
        # including the ones far away that will be discarded later on
        rgetters = gen_rgetters(self.datastore)
        # build the associations eid -> rlz sequentially or in parallel
        # this is very fast: I saw 30 million events associated in 1 minute!

        logging.info('Building assocs event_id -> rlz_id for {:_d} events'
                     ' and {:_d} ruptures'.format(len(events), len(rup_array)))
        if len(events) < 1E5:
            it = map(RuptureGetter.get_eid_rlz, rgetters)
        else:

            # parallel composite array with the associations eid->rlz
            it = parallel.Starmap(RuptureGetter.get_eid_rlz,
                                  ((rgetter, ) for rgetter in rgetters),
                                  progress=logging.debug,
                                  h5=self.datastore.hdf5)
        i = 0
        for eid_rlz in it:
            for er in eid_rlz:
                events[i] = er
                i += 1
                if i >= TWO32:
                    raise ValueError('There are more than %d events!' % i)
        events.sort(order='rup_id')  # fast too
        # sanity check
        n_unique_events = len(numpy.unique(events[['id', 'rup_id']]))

        assert n_unique_events == len(events), (n_unique_events, len(events))

        events['id'] = numpy.arange(len(events))
        # set event year and event ses starting from 1
        itime = int(self.oqparam.investigation_time)
        nses = self.oqparam.ses_per_logic_tree_path
        extra = numpy.zeros(len(events), [('year', U16), ('ses_id', U16)])

        # seed for year and ses_id
        numpy.random.seed(self.oqparam.ses_seed)
        extra['year'] = numpy.random.choice(itime, len(events)) + 1
        extra['ses_id'] = numpy.random.choice(nses, len(events)) + 1
        self.datastore['events'] = util.compose_arrays(events, extra)
        eindices = get_indices(events['rup_id'])
        arr = numpy.array(list(eindices.values()))[:, 0, :]

        self.datastore['ruptures']['e0'] = arr[:, 0]
        self.datastore['ruptures']['e1'] = arr[:, 1]
Пример #3
0
def extract_rupture(dstore, rup_id):
    """
    Extract information about the given event index.
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/rupture/1066
    """
    ridx = list(dstore['ruptures']['id']).index(int(rup_id))
    [getter] = getters.gen_rgetters(dstore, slice(ridx, ridx + 1))
    [ebr] = getter.get_ruptures()
    return ArrayWrapper((), ebr.rupture.todict())
Пример #4
0
    def save_events(self, rup_array):
        """
        :param rup_array: an array of ruptures with fields grp_id
        :returns: a list of RuptureGetters
        """
        # this is very fast compared to saving the ruptures

        eids = rupture.get_eids_for_erf_based(rup_array, self.num_rlzs_by_grp)
        #eids = rupture.get_eids(rup_array, self.samples_by_grp, self.num_rlzs_by_grp)

        self.check_overflow()  # check the number of events
        events = numpy.zeros(len(eids), rupture.events_dt)
        # when computing the events all ruptures must be considered,
        # including the ones far away that will be discarded later on
        rgetters = gen_rgetters(self.datastore)
        # build the associations eid -> rlz sequentially or in parallel
        # this is very fast: I saw 30 million events associated in 1 minute!

        logging.info('Building assocs event_id -> rlz_id for {:_d} events'
                     ' and {:_d} ruptures'.format(len(events), len(rup_array)))
        if len(events) < 1E5:
            it = map(RuptureGetter.get_eid_rlz, rgetters)
        else:

            # parallel composite array with the associations eid->rlz
            it = parallel.Starmap(RuptureGetter.get_eid_rlz,
                                  ((rgetter, ) for rgetter in rgetters),
                                  progress=logging.debug,
                                  h5=self.datastore.hdf5)
        i = 0
        for eid_rlz in it:
            for er in eid_rlz:
                events[i] = er
                i += 1
                if i >= TWO32:
                    raise ValueError('There are more than %d events!' % i)
        events.sort(order='rup_id')  # fast too

        events['id'] = numpy.arange(
            len(events))  # one event per event -> same id of ruptures

        self.datastore['events'] = events

        eindices = get_indices(events['rup_id'])
        arr = numpy.array(list(eindices.values()))[:, 0, :]

        self.datastore['ruptures']['e0'] = arr[:, 0]
        self.datastore['ruptures']['e1'] = arr[:, 1]
Пример #5
0
def extract_event_info(dstore, eidx):
    """
    Extract information about the given event index.
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/event_info/0
    """
    event = dstore['events'][int(eidx)]
    ridx = event['rup_id']
    [getter] = getters.gen_rgetters(dstore, slice(ridx, ridx + 1))
    rupdict = getter.get_rupdict()
    rlzi = event['rlz_id']
    rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
    gsim = rlzs_assoc.gsim_by_trt[rlzi][rupdict['trt']]
    for key, val in rupdict.items():
        yield key, val
    yield 'rlzi', rlzi
    yield 'gsim', repr(gsim)
Пример #6
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    num_ses = oq.ses_per_logic_tree_path
    ruptures_by_grp = AccumDict(accum=[])
    for rgetter in gen_rgetters(dstore):
        ebrs = [ebr.export(rgetter.rlzs_by_gsim, num_ses)
                for ebr in rgetter.get_ruptures()]
        ruptures_by_grp[rgetter.grp_id].extend(ebrs)
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures_by_grp, oq.investigation_time)
    return [dest]
Пример #7
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    events = group_array(dstore['events'][()], 'rup_id')
    ruptures_by_grp = AccumDict(accum=[])
    for rgetter in gen_rgetters(dstore):
        ebrs = []
        for proxy in rgetter.get_proxies():
            events_by_ses = group_array(events[proxy['id']], 'ses_id')
            ebr = proxy.to_ebr(rgetter.trt, rgetter.samples)
            ebrs.append(ebr.export(events_by_ses))
        ruptures_by_grp[rgetter.grp_id].extend(ebrs)
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures_by_grp, oq.investigation_time)
    return [dest]
Пример #8
0
def extract_rupture_info(dstore, what):
    """
    Extract some information about the ruptures, including the boundary.
    Example:
    http://127.0.0.1:8800/v1/calc/30/extract/rupture_info?min_mag=6
    """
    qdict = parse(what)
    if 'min_mag' in qdict:
        [min_mag] = qdict['min_mag']
    else:
        min_mag = 0
    oq = dstore['oqparam']

    dtlist = [('rup_id', U32), ('occurrence_rate', F32), ('multiplicity', U16),
              ('mag', F32), ('centroid_lon', F32), ('centroid_lat', F32),
              ('centroid_depth', F32), ('trt', '<S50'), ('strike', F32),
              ('dip', F32), ('rake', F32)]

    rows = []
    boundaries = []
    for rgetter in getters.gen_rgetters(dstore):
        proxies = rgetter.get_proxies(min_mag)
        rup_data = RuptureData(rgetter.trt, rgetter.samples,
                               rgetter.rlzs_by_gsim)
        for r in rup_data.to_array(proxies):
            coords = ['%.5f %.5f' % xyz[:2] for xyz in zip(*r['boundaries'])]
            coordset = sorted(set(coords))
            if len(coordset) < 4:  # degenerate to line
                boundaries.append('LINESTRING(%s)' % ', '.join(coordset))
            else:  # good polygon
                boundaries.append('POLYGON((%s))' % ', '.join(coords))

            rows.append((r['rup_id'], r['occurrence_rate'], r['multiplicity'],
                         r['mag'], r['lon'], r['lat'], r['depth'], rgetter.trt,
                         r['strike'], r['dip'], r['rake']))

    arr = numpy.array(rows, dtlist)
    geoms = gzip.compress('\n'.join(boundaries).encode('utf-8'))
    return ArrayWrapper(
        arr, dict(investigation_time=oq.investigation_time, boundaries=geoms))