def export_hcurves_xml(ekey, dstore): key, kind, fmt = get_kkf(ekey) len_ext = len(fmt) + 1 oq = dstore['oqparam'] sitemesh = get_mesh(dstore['sitecol']) rlzs = dstore['full_lt'].get_realizations() R = len(rlzs) fnames = [] writercls = hazard_writers.HazardCurveXMLWriter for kind in oq.get_kinds(kind, R): if kind.startswith('rlz-'): rlz = rlzs[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.pid else: smlt_path = '' gsimlt_path = '' name = hazard_curve_name(dstore, ekey, kind) for im in oq.imtls: key = 'hcurves?kind=%s&imt=%s' % (kind, im) hcurves = extract(dstore, key)[kind] # shape (N, 1, L1) imt = from_string(im) fname = name[:-len_ext] + '-' + im + '.' + fmt data = [HazardCurve(Location(site), poes[0]) for site, poes in zip(sitemesh, hcurves)] writer = writercls(fname, investigation_time=oq.investigation_time, imls=oq.imtls[im], imt=imt.name, sa_period=getattr(imt, 'period', None) or None, sa_damping=getattr(imt, 'damping', None), smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_uhs_xml(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitemesh = get_mesh(dstore['sitecol'].complete) key, kind, fmt = get_kkf(ekey) fnames = [] periods = [imt.period for imt in oq.imt_periods()] for kind in oq.get_kinds(kind, R): metadata = get_metadata(rlzs_assoc.realizations, kind) uhs = extract(dstore, 'uhs?kind=' + kind)[kind] for p, poe in enumerate(oq.poes): fname = hazard_curve_name(dstore, (key, fmt), kind + '-%s' % poe) writer = hazard_writers.UHSXMLWriter( fname, periods=periods, poe=poe, investigation_time=oq.investigation_time, **metadata) data = [] for site, curve in zip(sitemesh, uhs): data.append(UHS(curve[:, p], Location(site))) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] if oq.poes: pdic = DictArray({imt: oq.poes for imt in oq.imtls}) for kind, hcurves in PmapGetter(dstore, rlzs_assoc).items(kind): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(sitemesh)) writers.write_csv(fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: hmap = dstore['hmaps/' + kind].value fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap, pdic, comment)) elif key == 'hcurves': fnames.extend( export_hcurves_by_imt_csv(ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq)) return sorted(fnames)
def export_hcurves_xml(ekey, dstore): key, kind, fmt = get_kkf(ekey) len_ext = len(fmt) + 1 oq = dstore['oqparam'] sitemesh = get_mesh(dstore['sitecol']) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() fnames = [] writercls = hazard_writers.HazardCurveXMLWriter for kind, hcurves in PmapGetter(dstore, rlzs_assoc).items(kind): if hasattr(hcurves, 'array'): hcurves = hcurves.array[:, 0] if kind.startswith('rlz-'): rlz = rlzs_assoc.realizations[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.uid else: smlt_path = '' gsimlt_path = '' name = hazard_curve_name(dstore, ekey, kind, rlzs_assoc) for im in oq.imtls: slc = oq.imtls(im) imt = from_string(im) fname = name[:-len_ext] + '-' + imt.name + '.' + fmt data = [HazardCurve(Location(site), poes[slc]) for site, poes in zip(sitemesh, hcurves)] writer = writercls(fname, investigation_time=oq.investigation_time, imls=oq.imtls[im], imt=imt.name, sa_period=getattr(imt, 'period', None) or None, sa_damping=getattr(imt, 'damping', None), smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_uhs_xml(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() pgetter = PmapGetter(dstore, rlzs_assoc) sitemesh = get_mesh(dstore['sitecol'].complete) key, kind, fmt = get_kkf(ekey) fnames = [] periods = [imt for imt in oq.imtls if hasattr(imt, 'period')] for kind, hcurves in pgetter.items(kind): metadata = get_metadata(rlzs_assoc.realizations, kind) _, periods = calc.get_imts_periods(oq.imtls) uhs = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(sitemesh)) for poe in oq.poes: fname = hazard_curve_name(dstore, (key, fmt), kind + '-%s' % poe, rlzs_assoc) writer = hazard_writers.UHSXMLWriter( fname, periods=periods, poe=poe, investigation_time=oq.investigation_time, **metadata) data = [] for site, curve in zip(sitemesh, uhs[str(poe)]): data.append(UHS(curve, Location(site))) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_hcurves_xml_json(ekey, dstore): key, kind, fmt = get_kkf(ekey) len_ext = len(fmt) + 1 oq = dstore['oqparam'] sitemesh = get_mesh(dstore['sitecol']) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() fnames = [] writercls = hazard_writers.HazardCurveXMLWriter for kind, hcurves in PmapGetter(dstore).items(kind): if kind.startswith('rlz-'): rlz = rlzs_assoc.realizations[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.uid else: smlt_path = '' gsimlt_path = '' curves = hcurves.convert(oq.imtls, len(sitemesh)) name = hazard_curve_name(dstore, ekey, kind, rlzs_assoc) for imt in oq.imtls: imtype, sa_period, sa_damping = from_string(imt) fname = name[:-len_ext] + '-' + imt + '.' + fmt data = [HazardCurve(Location(site), poes[imt]) for site, poes in zip(sitemesh, curves)] writer = writercls(fname, investigation_time=oq.investigation_time, imls=oq.imtls[imt], imt=imtype, sa_period=sa_period, sa_damping=sa_damping, smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_gmf_data_csv(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() imts = list(oq.imtls) sitemesh = get_mesh(dstore['sitecol']) eid = int(ekey[0].split('/')[1]) if '/' in ekey[0] else None gmfa = dstore['gmf_data']['data'].value if eid is None: # we cannot use extract here f = dstore.build_fname('sitemesh', '', 'csv') sids = numpy.arange(len(sitemesh), dtype=U32) sites = util.compose_arrays(sids, sitemesh, 'site_id') writers.write_csv(f, sites) fname = dstore.build_fname('gmf', 'data', 'csv') gmfa.sort(order=['rlzi', 'sid', 'eid']) writers.write_csv(fname, _expand_gmv(gmfa, imts)) return [fname, f] # old format for single eid gmfa = gmfa[gmfa['eid'] == eid] fnames = [] for rlzi, array in group_array(gmfa, 'rlzi').items(): rlz = rlzs_assoc.realizations[rlzi] data, comment = _build_csv_data(array, rlz, dstore['sitecol'], imts, oq.investigation_time) fname = dstore.build_fname('gmf', '%d-rlz-%03d' % (eid, rlzi), 'csv') writers.write_csv(fname, data, comment=comment) fnames.append(fname) return fnames
def export_hmaps_xml(ekey, dstore): key, kind, fmt = get_kkf(ekey) oq = dstore['oqparam'] sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) fnames = [] writercls = hazard_writers.HazardMapXMLWriter for kind in oq.get_kinds(kind, R): # shape (N, M, P) hmaps = extract(dstore, 'hmaps?kind=' + kind)[kind] if kind.startswith('rlz-'): rlz = rlzs_assoc.realizations[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.uid else: smlt_path = '' gsimlt_path = '' for m, imt in enumerate(oq.imtls): for p, poe in enumerate(oq.poes): suffix = '-%s-%s' % (poe, imt) fname = hazard_curve_name(dstore, ekey, kind + suffix) data = [HazardMap(site[0], site[1], hmap[m, p]) for site, hmap in zip(sitemesh, hmaps)] writer = writercls( fname, investigation_time=oq.investigation_time, imt=imt, poe=poe, smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_hcurves_xml(ekey, dstore): key, kind, fmt = get_kkf(ekey) len_ext = len(fmt) + 1 oq = dstore['oqparam'] sitemesh = get_mesh(dstore['sitecol']) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) fnames = [] writercls = hazard_writers.HazardCurveXMLWriter for kind in oq.get_kinds(kind, R): if kind.startswith('rlz-'): rlz = rlzs_assoc.realizations[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.uid else: smlt_path = '' gsimlt_path = '' name = hazard_curve_name(dstore, ekey, kind) hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] for im in oq.imtls: slc = oq.imtls(im) imt = from_string(im) fname = name[:-len_ext] + '-' + im + '.' + fmt data = [HazardCurve(Location(site), poes[slc]) for site, poes in zip(sitemesh, hcurves)] writer = writercls(fname, investigation_time=oq.investigation_time, imls=oq.imtls[im], imt=imt.name, sa_period=getattr(imt, 'period', None) or None, sa_damping=getattr(imt, 'damping', None), smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_hmaps_xml_json(ekey, dstore): key, kind, fmt = get_kkf(ekey) oq = dstore['oqparam'] sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() fnames = [] writercls = hazard_writers.HazardMapXMLWriter nsites = len(sitemesh) for kind, hcurves in PmapGetter(dstore, rlzs_assoc).items(): hmaps = calc.make_hmap_array(hcurves, oq.imtls, oq.poes, nsites) if kind.startswith('rlz-'): rlz = rlzs_assoc.realizations[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.uid else: smlt_path = '' gsimlt_path = '' for imt in oq.imtls: for poe in oq.poes: suffix = '-%s-%s' % (poe, imt) fname = hazard_curve_name( dstore, ekey, kind + suffix, rlzs_assoc) data = [HazardMap(site[0], site[1], hmap['%s-%s' % (imt, poe)]) for site, hmap in zip(sitemesh, hmaps)] writer = writercls( fname, investigation_time=oq.investigation_time, imt=imt, poe=poe, smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_hmaps_xml(ekey, dstore): key, kind, fmt = get_kkf(ekey) oq = dstore['oqparam'] sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) rlzs = dstore['full_lt'].get_realizations() R = len(rlzs) fnames = [] writercls = hazard_writers.HazardMapXMLWriter for kind in oq.get_kinds(kind, R): # shape (N, M, P) hmaps = extract(dstore, 'hmaps?kind=' + kind)[kind] if kind.startswith('rlz-'): rlz = rlzs[int(kind[4:])] smlt_path = '_'.join(rlz.sm_lt_path) gsimlt_path = rlz.gsim_rlz.pid else: smlt_path = '' gsimlt_path = '' for m, imt in enumerate(oq.imtls): for p, poe in enumerate(oq.poes): suffix = '-%s-%s' % (poe, imt) fname = hazard_curve_name(dstore, ekey, kind + suffix) data = [HazardMap(site[0], site[1], hmap[m, p]) for site, hmap in zip(sitemesh, hmaps)] writer = writercls( fname, investigation_time=oq.investigation_time, imt=imt, poe=poe, smlt_path=smlt_path, gsimlt_path=gsimlt_path) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_ruptures_xml(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ fmt = ekey[-1] oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) ruptures_by_grp = {} for grp_id, ruptures in get_ruptures_by_grp(dstore).items(): ruptures_by_grp[grp_id] = [ebr.export(mesh) for ebr in ruptures] dest = dstore.export_path('ses.' + fmt) writer = hazard_writers.SESXMLWriter(dest) writer.serialize(ruptures_by_grp, oq.investigation_time) return [dest]
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) R = dstore['full_lt'].get_num_rlzs() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] comment = dstore.metadata hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind) comment.update(kind=kind, investigation_time=oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment)) elif key == 'hcurves': # shape (N, R|S, M, L1) if ('amplification' in oq.inputs and oq.amplification_method == 'convolution'): imtls = DictArray( {imt: oq.soil_intensities for imt in oq.imtls}) else: imtls = oq.imtls for imt, imls in imtls.items(): hcurves = extract( dstore, 'hcurves?kind=%s&imt=%s' % (kind, imt))[kind] fnames.append( export_hcurves_by_imt_csv( ekey, kind, fname, sitecol, hcurves, imt, imls, comment)) return sorted(fnames)
def export_gmf_scenario_npz(ekey, dstore): dic = {} oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) n = len(mesh) fname = dstore.export_path('%s.%s' % ekey) if 'gmf_data' in dstore: data_by_rlzi = group_array(dstore['gmf_data/data'].value, 'rlzi') for rlzi in data_by_rlzi: gmfa, e = _gmf_scenario(data_by_rlzi[rlzi], n, oq.imtls) logging.info('Exporting array of shape %s for rlz %d', (n, e), rlzi) dic['rlz-%03d' % rlzi] = util.compose_arrays(mesh, gmfa) else: # nothing to export return [] savez(fname, **dic) return [fname]
def export_ruptures_xml(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ fmt = ekey[-1] oq = dstore['oqparam'] num_ses = oq.ses_per_logic_tree_path mesh = get_mesh(dstore['sitecol']) ruptures_by_grp = {} for rgetter in gen_rupture_getters(dstore): ebrs = [ebr.export(mesh, rgetter.rlzs_by_gsim, num_ses) for ebr in rgetter.get_ruptures()] if ebrs: ruptures_by_grp[rgetter.grp_id] = ebrs dest = dstore.export_path('ses.' + fmt) writer = hazard_writers.SESXMLWriter(dest) writer.serialize(ruptures_by_grp, oq.investigation_time) return [dest]
def export_ruptures_xml(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ fmt = ekey[-1] oq = dstore['oqparam'] num_ses = oq.ses_per_logic_tree_path mesh = get_mesh(dstore['sitecol']) ruptures_by_grp = {} for rgetter in get_rupture_getters(dstore): ebrs = [ebr.export(mesh, rgetter.rlzs_by_gsim, num_ses) for ebr in rgetter] if ebrs: ruptures_by_grp[rgetter.grp_id] = ebrs dest = dstore.export_path('ses.' + fmt) writer = hazard_writers.SESXMLWriter(dest) writer.serialize(ruptures_by_grp, oq.investigation_time) return [dest]
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] checksum = dstore.get_attr('/', 'checksum32') hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment + ', checksum=%d' % checksum) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment + ', checksum=%d' % checksum)) elif key == 'hcurves': hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq, checksum)) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] checksum = dstore.get_attr('/', 'checksum32') hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = dict(extract(dstore, 'hmaps?kind=' + kind))[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, oq) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment + ', checksum=%d' % checksum) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment + ', checksum=%d' % checksum)) elif key == 'hcurves': hcurves = dict(extract(dstore, 'hcurves?kind=' + kind))[kind] fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq, checksum)) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) R = dstore['csm_info'].get_num_rlzs() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] comment = dstore.metadata hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind) comment.update(kind=kind, investigation_time=oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv(fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment)) elif key == 'hcurves': hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] fnames.extend( export_hcurves_by_imt_csv(ekey, kind, fname, sitecol, hcurves, oq.imtls, comment)) return sorted(fnames)