Exemplo n.º 1
0
def export_ruptures_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat '
              'centroid_depth trt strike dip rake boundary').split()
    csm_info = dstore['csm_info']
    grp_trt = csm_info.grp_by("trt")
    rows = []
    ruptures_by_grp = get_ruptures_by_grp(dstore)
    for grp_id, trt in sorted(grp_trt.items()):
        rups = ruptures_by_grp.get(grp_id, [])
        rup_data = calc.RuptureData(trt, csm_info.get_gsims(grp_id))
        for r in rup_data.to_array(rups):
            rows.append((r['rup_id'], r['multiplicity'], r['mag'], r['lon'],
                         r['lat'], r['depth'], trt, r['strike'], r['dip'],
                         r['rake'], r['boundary']))
    rows.sort()  # by rupture serial
    comment = 'investigation_time=%s, ses_per_logic_tree_path=%s' % (
        oq.investigation_time, oq.ses_per_logic_tree_path)
    writers.write_csv(dest, rows, header=header, sep='\t', comment=comment)
    return [dest]
Exemplo n.º 2
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    if 'ruptures' not in dstore:
        logging.warn('There are no ruptures in the datastore')
        return []
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore[name]
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32),
                  ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    lti = oq.lti
    dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32),
               ('rlzi', U16)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    elt = numpy.zeros(len(agg_losses), elt_dt)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    events_by_rupid = group_array(dstore['events'].value, 'rup_id')
    rup_data = {}
    event_by_eid = {}  # eid -> event
    # populate rup_data and event_by_eid
    ruptures_by_grp = getters.get_ruptures_by_grp(dstore)
    # TODO: avoid reading the events twice
    for grp_id, ruptures in ruptures_by_grp.items():
        for ebr in ruptures:
            for event in events_by_rupid[ebr.serial]:
                event_by_eid[event['eid']] = event
        if has_rup_data:
            rup_data.update(get_rup_data(ruptures))
    for r, row in enumerate(agg_losses):
        rec = elt[r]
        event = event_by_eid[row['eid']]
        rec['event_id'] = event['eid']
        rec['year'] = event['year']
        rec['rlzi'] = row['rlzi']
        if rup_data:
            rec['rup_id'] = rup_id = event['rup_id']
            (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'],
             rec['centroid_depth']) = rup_data[rup_id]
        for lt, i in lti.items():
            rec[lt] = row['loss'][i]
    elt.sort(order=['year', 'event_id', 'rlzi'])
    dest = dstore.build_fname('agg_losses', 'all', 'csv')
    writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 3
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    mesh = get_mesh(dstore['sitecol'])
    ruptures_by_grp = {}
    for grp_id, ruptures in get_ruptures_by_grp(dstore).items():
        ruptures_by_grp[grp_id] = [ebr.export(mesh) for ebr in ruptures]
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures_by_grp, oq.investigation_time)
    return [dest]