Exemplo n.º 1
0
def export_ses_csv(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        return []
    dest = dstore.export_path('ruptures.csv')
    header = ('rupid multiplicity mag centroid_lon centroid_lat centroid_depth'
              ' trt strike dip rake boundary').split()
    csm_info = dstore['csm_info']
    grp_trt = csm_info.grp_trt()
    gsims = csm_info.get_rlzs_assoc().gsims_by_grp_id
    rows = []
    for grp_id, trt in sorted(grp_trt.items()):
        rup_data = calc.RuptureData(trt, gsims[grp_id]).to_array(
            calc.get_ruptures(dstore, grp_id))
        for r in rup_data:
            rows.append((r['rup_id'], r['multiplicity'], r['mag'], r['lon'],
                         r['lat'], r['depth'], trt, r['strike'], r['dip'],
                         r['rake'], r['boundary']))
    rows.sort()  # by rupture serial
    writers.write_csv(dest, rows, header=header, sep='\t')
    return [dest]
Exemplo n.º 2
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    events = dstore['events']
    sm_by_grp = dstore['csm_info'].get_sm_by_grp()
    mesh = get_mesh(dstore['sitecol'])
    ruptures = {}
    for grp in dstore['ruptures']:
        grp_id = int(grp[4:])  # strip grp-
        ruptures[grp_id] = []
        for ebr in calc.get_ruptures(dstore, events, grp_id):
            ruptures[grp_id].append(ebr.export(mesh, sm_by_grp))
            ruptures_dir = os.path.join(oq.base_path, oq.export_dir,
                                        'ruptures')
            if not os.path.exists(ruptures_dir):
                os.makedirs(ruptures_dir)
            rupFileLoc = os.path.join(
                ruptures_dir, 'rupture_' + str(ebr.serial) + '_' +
                str(dstore.calc_id) + '.p')
            pickle.dump(ebr.rupture, open(rupFileLoc, 'wb'))
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures, oq.investigation_time)
    return [dest]
Exemplo n.º 3
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    loss_types = dstore.get_attr('composite_risk_model', 'loss_types')
    L = len(loss_types)
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore[name]
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32), ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    dtlist = ([('event_id', U64), ('rup_id', U32),
               ('year', U32)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    csm_info = dstore['csm_info']
    rlzs_assoc = csm_info.get_rlzs_assoc()
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    for sm_id, rlzs in rlzs_assoc.rlzs_by_smodel.items():
        # populate rup_data and event_by_eid
        rup_data = {}
        event_by_grp = {}  # grp_id -> eid -> event
        for grp_id in csm_info.get_grp_ids(sm_id):
            event_by_grp[grp_id] = event_by_eid = {}
            try:
                events = dstore['events/grp-%02d' % grp_id]
            except KeyError:
                continue
            for event in events:
                event_by_eid[event['eid']] = event
            if has_rup_data:
                rup_data.update(get_rup_data(calc.get_ruptures(dstore,
                                                               grp_id)))

        for rlz in rlzs:
            rlzname = 'rlz-%03d' % rlz.ordinal
            if rlzname not in agg_losses:
                continue
            data = agg_losses[rlzname].value
            eids = data['eid']
            losses = data['loss']
            eids_, years, serials = get_eids_years_serials(event_by_grp, eids)
            elt = numpy.zeros(len(eids), elt_dt)
            elt['event_id'] = eids_
            elt['year'] = years
            if rup_data:
                copy_to(elt, rup_data, serials)
            for i, ins in enumerate(
                ['', '_ins'] if oq.insured_losses else ['']):
                for l, loss_type in enumerate(loss_types):
                    elt[loss_type + ins][:] = losses[:, l + L * i]
            elt.sort(order=['year', 'event_id'])
            dest = dstore.build_fname('agg_losses', rlz, 'csv')
            writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 4
0
def get_ruptures_by_grp(dstore):
    """
    Extracts the dictionary `ruptures_by_grp` from the given calculator
    """
    n = 0
    for grp in dstore['ruptures']:
        n += len(dstore['ruptures/' + grp])
    logging.info('Reading %d ruptures from the datastore', n)
    # disable check on PlaceSurface to support UCERF ruptures
    PlanarSurface.IMPERFECT_RECTANGLE_TOLERANCE = numpy.inf
    ruptures_by_grp = AccumDict(accum=[])
    for grp in dstore['ruptures']:
        grp_id = int(grp[4:])  # strip 'grp-'
        ruptures_by_grp[grp_id] = list(calc.get_ruptures(dstore, grp_id))
    return ruptures_by_grp
Exemplo n.º 5
0
def export_agg_losses_ebr(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    name, ext = export.keyfunc(ekey)
    agg_losses = dstore[name]
    has_rup_data = 'ruptures' in dstore
    extra_list = [('magnitude', F32), ('centroid_lon', F32),
                  ('centroid_lat', F32),
                  ('centroid_depth', F32)] if has_rup_data else []
    oq = dstore['oqparam']
    lti = oq.lti
    dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32),
               ('rlzi', U16)] + extra_list + oq.loss_dt_list())
    elt_dt = numpy.dtype(dtlist)
    elt = numpy.zeros(len(agg_losses), elt_dt)
    writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
    the_events = dstore['events'].value
    all_events = group_array(the_events, 'grp_id')
    rup_data = {}
    event_by_eid = {}  # eid -> event
    # populate rup_data and event_by_eid
    for grp_id, events in all_events.items():
        for event in events:
            event_by_eid[event['eid']] = event
        if has_rup_data:
            ruptures = calc.get_ruptures(dstore, the_events, grp_id)
            rup_data.update(get_rup_data(ruptures))
    for r, row in enumerate(agg_losses):
        rec = elt[r]
        event = event_by_eid[row['eid']]
        rec['event_id'] = event['eid']
        rec['year'] = event['year']
        rec['rlzi'] = row['rlzi']
        if rup_data:
            rec['rup_id'] = rup_id = event['rup_id']
            (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'],
             rec['centroid_depth']) = rup_data[rup_id]
        for lt, i in lti.items():
            rec[lt] = row['loss'][i]
    elt.sort(order=['year', 'event_id', 'rlzi'])
    dest = dstore.build_fname('agg_losses', 'all', 'csv')
    writer.save(elt, dest)
    return writer.getsaved()
Exemplo n.º 6
0
def get_ruptures_by_grp(dstore):
    """
    Extracts the dictionary `ruptures_by_grp` from the given calculator
    """
    events = dstore['events']
    n = 0
    for grp in dstore['ruptures']:
        n += len(dstore['ruptures/' + grp])
    logging.info('Reading %d ruptures from the datastore', n)
    # disable check on PlaceSurface to support UCERF ruptures
    with mock.patch(
            'openquake.hazardlib.geo.surface.PlanarSurface.'
            'IMPERFECT_RECTANGLE_TOLERANCE', numpy.inf):
        ruptures_by_grp = AccumDict(accum=[])
        for grp in dstore['ruptures']:
            grp_id = int(grp[4:])  # strip 'grp-'
            ruptures = list(calc.get_ruptures(dstore, events, grp_id))
            ruptures_by_grp[grp_id] = ruptures
    return ruptures_by_grp
Exemplo n.º 7
0
def export_ruptures_xml(ekey, dstore):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    """
    fmt = ekey[-1]
    oq = dstore['oqparam']
    sm_by_grp = dstore['csm_info'].get_sm_by_grp()
    mesh = get_mesh(dstore['sitecol'])
    ruptures = {}
    for grp in dstore['ruptures']:
        grp_id = int(grp[4:])  # strip grp-
        ruptures[grp_id] = []
        for ebr in calc.get_ruptures(dstore, grp_id):
            ruptures[grp_id].append(ebr.export(mesh, sm_by_grp))
    dest = dstore.export_path('ses.' + fmt)
    writer = hazard_writers.SESXMLWriter(dest)
    writer.serialize(ruptures, oq.investigation_time)
    return [dest]
Exemplo n.º 8
0
def export_gmf_scenario_npz(ekey, dstore):
    oq = dstore['oqparam']
    dic = {}
    fname = dstore.export_path('%s.%s' % ekey)
    if 'scenario' in oq.calculation_mode:
        # compute the GMFs on the fly from the stored rupture
        # NB: for visualization purposes we want to export the full mesh
        # of points, including the ones outside the maximum distance
        # NB2: in the future, I want to add a sitecol output, then the
        # visualization of the mesh will be possibile even without the GMFs;
        # in the future, here we will change
        # sitemesh = get_mesh(dstore['sitecol'], complete=False)
        sitecol = dstore['sitecol'].complete
        sitemesh = get_mesh(sitecol)
        rlzs_assoc = dstore['csm_info'].get_rlzs_assoc()
        gsims = rlzs_assoc.gsims_by_grp_id[0]  # there is a single grp_id
        E = oq.number_of_ground_motion_fields
        correl_model = oq.get_correl_model()
        [ebrupture] = calc.get_ruptures(dstore, 0)
        computer = gmf.GmfComputer(
            ebrupture, sitecol, oq.imtls,
            gsims, oq.truncation_level, correl_model)
        gmf_dt = numpy.dtype([(imt, (F32, (E,))) for imt in oq.imtls])
        imts = list(oq.imtls)
        for gsim in gsims:
            arr = computer.compute(gsim, E, oq.random_seed)
            I, S, E = arr.shape  # #IMTs, #sites, #events
            gmfa = numpy.zeros(S, gmf_dt)
            for imti, imt in enumerate(imts):
                gmfa[imt] = arr[imti]
            dic[str(gsim)] = util.compose_arrays(sitemesh, gmfa)
    elif 'event_based' in oq.calculation_mode:
        dic['sitemesh'] = get_mesh(dstore['sitecol'])
        for grp in sorted(dstore['gmf_data']):
            data_by_rlzi = group_array(dstore['gmf_data/' + grp].value, 'rlzi')
            for rlzi in data_by_rlzi:
                dic['rlz-%03d' % rlzi] = data_by_rlzi[rlzi]
    else:  # nothing to export
        return []
    savez(fname, **dic)
    return [fname]