Exemple #1
0
def export_ruptures_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat '
              'centroid_depth trt strike dip rake boundary').split()
    rows = []
    for rgetter in get_rupture_getters(dstore):
        rups = list(rgetter)
        rup_data = calc.RuptureData(rgetter.trt, rgetter.rlzs_by_gsim)
        for r in rup_data.to_array(rups):
            rows.append(
                (r['rup_id'], r['multiplicity'], r['mag'],
                 r['lon'], r['lat'], r['depth'],
                 rgetter.trt, r['strike'], r['dip'], r['rake'],
                 r['boundary']))
    rows.sort()  # by rupture serial
    comment = 'investigation_time=%s, ses_per_logic_tree_path=%s' % (
        oq.investigation_time, oq.ses_per_logic_tree_path)
    writers.write_csv(dest, rows, header=header, sep='\t', comment=comment)
    return [dest]
Exemple #2
0
def export_ruptures_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat centroid_depth'
              ' trt strike dip rake boundary').split()
    csm_info = dstore['csm_info']
    grp_trt = csm_info.grp_trt()
    rows = []
    ruptures_by_grp = calc.get_ruptures_by_grp(dstore)
    for grp_id, trt in sorted(grp_trt.items()):
        rup_data = calc.RuptureData(trt, csm_info.get_gsims(grp_id))
        for r in rup_data.to_array(ruptures_by_grp.get(grp_id, [])):
            rows.append((r['rup_id'], r['multiplicity'], r['mag'], r['lon'],
                         r['lat'], r['depth'], trt, r['strike'], r['dip'],
                         r['rake'], r['boundary']))
    rows.sort()  # by rupture serial
    writers.write_csv(dest,
                      rows,
                      header=header,
                      sep='\t',
                      comment='investigation_time=%s' % oq.investigation_time)
    return [dest]
Exemple #3
0
def export_ruptures_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat '
              'centroid_depth trt strike dip rake boundary').split()
    rows = []
    sf = filters.SourceFilter(dstore['sitecol'], oq.maximum_distance)
    for rgetter in gen_rupture_getters(dstore):
        rups = rgetter.get_ruptures(sf)
        rup_data = calc.RuptureData(rgetter.trt, rgetter.rlzs_by_gsim)
        for r, rup in zip(rup_data.to_array(rups), rups):
            rows.append((r['rup_id'], r['multiplicity'], r['mag'], r['lon'],
                         r['lat'], r['depth'], rgetter.trt, r['strike'],
                         r['dip'], r['rake'], r['boundary']))
    rows.sort()  # by rupture rup_id
    comment = dstore.metadata
    comment.update(investigation_time=oq.investigation_time,
                   ses_per_logic_tree_path=oq.ses_per_logic_tree_path)
    writers.write_csv(dest, rows, header=header, comment=comment)
    return [dest]