def make_figure(indices, n_sites, imtls, poes, pmaps): """ :param indices: the indices of the sites under analysis :param n_sites: total number of sites :param imtls: DictArray with the IMTs and levels :param poes: PoEs used to compute the hazard maps :param pmaps: a list of probability maps per realization """ # NB: matplotlib is imported inside since it is a costly import import matplotlib.pyplot as plt fig = plt.figure() n_poes = len(poes) uhs_by_rlz = [calc.make_uhs(pmap, imtls, poes, n_sites) for pmap in pmaps] _, periods = calc.get_imts_periods(imtls) for i, site in enumerate(indices): for j, poe in enumerate(poes): ax = fig.add_subplot(len(indices), n_poes, i * n_poes + j + 1) ax.grid(True) ax.set_xlim([periods[0], periods[-1]]) ax.set_xlabel( 'UHS on site %d, poe=%s, period in seconds' % (site, poe)) if j == 0: # set Y label only on the leftmost graph ax.set_ylabel('SA') for r, all_uhs in enumerate(uhs_by_rlz): uhs = list(all_uhs[str(poe)][site]) ax.plot(periods, uhs, label=r) plt.legend() return plt
def export_uhs_xml(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitemesh = dstore['sitemesh'].value key, fmt = ekey fnames = [] periods = [imt for imt in oq.imtls if imt.startswith('SA') or imt == 'PGA'] for kind, hmaps in dstore['hmaps'].items(): metadata = get_metadata(rlzs_assoc.realizations, kind) _, periods = calc.get_imts_periods(oq.imtls) uhs = calc.make_uhs(hmaps, oq.imtls, oq.poes) for poe in oq.poes: poe_str = 'poe~%s' % poe fname = hazard_curve_name( dstore, ekey, kind + '-%s' % poe, rlzs_assoc, oq.number_of_logic_tree_samples) writer = hazard_writers.UHSXMLWriter( fname, periods=periods, poe=poe, investigation_time=oq.investigation_time, **metadata) data = [] for site, curve in zip(sitemesh, uhs[poe_str]): data.append(UHS(curve, Location(site))) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitecol = dstore['sitecol'] sitemesh = dstore['sitemesh'] key, fmt = ekey fnames = [] items = dstore['hmaps' if key == 'uhs' else key].items() for kind, hcurves in sorted(items): fname = hazard_curve_name(dstore, ekey, kind, rlzs_assoc) if key == 'uhs': uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes) write_csv(fname, util.compose_arrays(sitemesh, uhs_curves)) elif key == 'hmaps': write_csv(fname, util.compose_arrays(sitemesh, hcurves)) else: export_hazard_curves_csv(ekey, fname, sitecol, hcurves, oq.imtls) fnames.append(fname) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitecol = dstore['sitecol'] sitemesh = dstore['sitemesh'] key, fmt = ekey fnames = [] items = dstore['hmaps' if key == 'uhs' else key].items() for kind, hcurves in sorted(items): fname = hazard_curve_name( dstore, ekey, kind, rlzs_assoc, oq.number_of_logic_tree_samples) if key == 'uhs': uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes) write_csv(fname, util.compose_arrays(sitemesh, uhs_curves)) elif key == 'hmaps': write_csv(fname, util.compose_arrays(sitemesh, hcurves)) else: export_hazard_curves_csv(ekey, fname, sitecol, hcurves, oq.imtls) fnames.append(fname) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] if oq.poes: pdic = DictArray({imt: oq.poes for imt in oq.imtls}) for kind, hcurves in calc.PmapGetter(dstore).items(kind): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(sitemesh)) writers.write_csv(fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes) fnames.extend( export_hazard_csv(ekey, fname, sitemesh, hmap, pdic, comment)) elif key == 'hcurves': fnames.extend( export_hcurves_by_imt_csv(ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq)) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, fmt = ekey fnames = [] items = dstore['hmaps' if key == 'uhs' else key].items() for kind, hcurves in sorted(items): fname = hazard_curve_name(dstore, ekey, kind, rlzs_assoc) if key == 'uhs': uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes) write_csv(fname, util.compose_arrays(sitemesh, uhs_curves)) fnames.append(fname) elif key == 'hmaps': write_csv(fname, util.compose_arrays(sitemesh, hcurves)) fnames.append(fname) else: if export.from_db: # called by export_from_db fnames.extend( export_hcurves_by_imt_csv( ekey, fname, sitecol, hcurves, oq.imtls)) else: # when exporting directly from the datastore fnames.extend( export_hazard_curves_csv( ekey, fname, sitecol, hcurves, oq.imtls)) return sorted(fnames)
def export_uhs_xml(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() pgetter = calc.PmapGetter(dstore) sitemesh = get_mesh(dstore['sitecol'].complete) key, kind, fmt = get_kkf(ekey) fnames = [] periods = [imt for imt in oq.imtls if imt.startswith('SA') or imt == 'PGA'] for kind, hcurves in pgetter.items(kind): metadata = get_metadata(rlzs_assoc.realizations, kind) _, periods = calc.get_imts_periods(oq.imtls) uhs = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(sitemesh)) for poe in oq.poes: fname = hazard_curve_name(dstore, (key, fmt), kind + '-%s' % poe, rlzs_assoc) writer = hazard_writers.UHSXMLWriter( fname, periods=periods, poe=poe, investigation_time=oq.investigation_time, **metadata) data = [] for site, curve in zip(sitemesh, uhs[str(poe)]): data.append(UHS(curve, Location(site))) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_uhs_np(ekey, dstore): oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) fname = dstore.export_path('%s.%s' % ekey) dic = {} for kind, hcurves in calc.PmapGetter(dstore).items(): dic[kind] = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh)) save_np(fname, dic, mesh, investigation_time=oq.investigation_time) return [fname]
def export_uhs_npz(ekey, dstore): oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) fname = dstore.export_path('%s.%s' % ekey) dic = {} for kind, hcurves in calc.PmapGetter(dstore).items(): uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh)) dic[kind] = util.compose_arrays(mesh, uhs_curves) savez(fname, **dic) return [fname]
def export_uhs_hdf5(ekey, dstore): oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) fname = dstore.export_path('%s.%s' % ekey) with hdf5.File(fname, 'w') as f: for dskey in dstore['hcurves']: hcurves = dstore['hcurves/%s' % dskey] uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh)) f['uhs/%s' % dskey] = util.compose_arrays(mesh, uhs_curves) return [fname]
def extract_uhs(dstore, what): """ Extracts uniform hazard spectra. Use it as /extract/uhs/mean or /extract/uhs/rlz-0, etc """ oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) dic = {} for kind, hcurves in getters.PmapGetter(dstore).items(what): dic[kind] = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh)) return hazard_items(dic, mesh, investigation_time=oq.investigation_time)
def export_uhs_npz(ekey, dstore): oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) fname = dstore.export_path('%s.%s' % ekey) dic = {} for dskey in dstore['hcurves']: hcurves = dstore['hcurves/%s' % dskey] uhs_curves = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(mesh)) dic[dskey] = util.compose_arrays(mesh, uhs_curves) savez(fname, **dic) return [fname]
def extract_uhs(dstore, what): """ Extracts uniform hazard spectra. Use it as /extract/uhs?kind=mean or /extract/uhs?kind=rlz-0, etc """ info = get_info(dstore) if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = {} for stat, s in info['stats'].items(): hmap = dstore['hmaps-stats'][:, s] # shape (N, M, P) dic[stat] = calc.make_uhs(hmap, info) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return for k, v in _items(dstore, 'hmaps', what, info): # shape (N, M, P) if hasattr(v, 'shape') and len(v.shape) == 3: yield k, calc.make_uhs(v, info) else: yield k, v
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) R = dstore['full_lt'].get_num_rlzs() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] comment = dstore.metadata hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind) comment.update(kind=kind, investigation_time=oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment)) elif key == 'hcurves': # shape (N, R|S, M, L1) if ('amplification' in oq.inputs and oq.amplification_method == 'convolution'): imtls = DictArray( {imt: oq.soil_intensities for imt in oq.imtls}) else: imtls = oq.imtls for imt, imls in imtls.items(): hcurves = extract( dstore, 'hcurves?kind=%s&imt=%s' % (kind, imt))[kind] fnames.append( export_hcurves_by_imt_csv( ekey, kind, fname, sitecol, hcurves, imt, imls, comment)) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] checksum = dstore.get_attr('/', 'checksum32') hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment + ', checksum=%d' % checksum) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment + ', checksum=%d' % checksum)) elif key == 'hcurves': hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq, checksum)) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, fmt = ekey fnames = [] if oq.poes: pdic = DictArray({imt: oq.poes for imt in oq.imtls}) for kind in sorted(dstore['hcurves']): hcurves = dstore['hcurves/' + kind] fname = hazard_curve_name(dstore, ekey, kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if key == 'uhs': uhs_curves = calc.make_uhs( hcurves, oq.imtls, oq.poes, len(sitemesh)) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps': hmap = calc.make_hmap(hcurves, oq.imtls, oq.poes) fnames.extend( export_hazard_csv(ekey, fname, sitemesh, hmap, pdic, comment)) else: if export.from_db: # called by export_from_db fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq)) else: # when exporting directly from the datastore fnames.extend( export_hazard_csv( ekey, fname, sitemesh, hcurves, oq.imtls, comment)) return sorted(fnames)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] checksum = dstore.get_attr('/', 'checksum32') hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = dict(extract(dstore, 'hmaps?kind=' + kind))[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, oq) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment + ', checksum=%d' % checksum) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment + ', checksum=%d' % checksum)) elif key == 'hcurves': hcurves = dict(extract(dstore, 'hcurves?kind=' + kind))[kind] fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq, checksum)) return sorted(fnames)
def extract_hazard_for_qgis(dstore, what): """ Extracts hazard curves and possibly hazard maps and/or uniform hazard spectra. Use it as /extract/qgis-hazard/rlz-0, etc """ oq = dstore['oqparam'] sitecol = dstore['sitecol'] yield 'sitecol', sitecol yield 'oqparam', oq yield 'realizations', dstore['csm_info'].rlzs yield 'checksum32', dstore['/'].attrs['checksum32'] N = len(sitecol) if oq.poes: pdic = {imt: oq.poes for imt in oq.imtls} for kind, hcurves in getters.PmapGetter(dstore).items(what): logging.info('extracting hazard/%s', kind) yield 'hcurves-' + kind, calc.convert_to_array(hcurves, N, oq.imtls) if oq.poes and oq.uniform_hazard_spectra: yield 'uhs-' + kind, calc.make_uhs(hcurves, oq.imtls, oq.poes, N) if oq.poes and oq.hazard_maps: hmaps = calc.make_hmap(hcurves, oq.imtls, oq.poes) yield 'hmaps-' + kind, calc.convert_to_array(hmaps, N, pdic)
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) R = dstore['csm_info'].get_num_rlzs() sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] comment = dstore.metadata hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind) comment.update(kind=kind, investigation_time=oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv(fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment)) elif key == 'hcurves': hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] fnames.extend( export_hcurves_by_imt_csv(ekey, kind, fname, sitecol, hcurves, oq.imtls, comment)) return sorted(fnames)
def extract_uhs(dstore, what): """ Extracts uniform hazard spectra. Use it as /extract/uhs?kind=mean or /extract/uhs?kind=rlz-0, etc """ info = get_info(dstore) if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = {} for stat, s in info['stats'].items(): hmap = dstore['hmaps-stats'][:, s] dic[stat] = calc.make_uhs(hmap, info) yield from hazard_items(dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) periods = [] for m, imt in enumerate(info['imtls']): if imt == 'PGA' or imt.startswith('SA'): periods.append(m) if 'site_id' in params: sids = params['site_id'] else: sids = ALL if params['rlzs']: dset = dstore['hmaps-rlzs'] for k in params['k']: yield ('rlz-%03d' % k, hdf5.extract(dset, sids, k, periods, ALL)[:, 0]) else: dset = dstore['hmaps-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, sids, k, periods, ALL)[:, 0] yield from params.items()
def extract_uhs(dstore, what): """ Extracts uniform hazard spectra. Use it as /extract/uhs?kind=mean or /extract/uhs?kind=rlz-0, etc """ info = get_info(dstore) if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = {} for stat, s in info['stats'].items(): hmap = dstore['hmaps-stats'][:, s] dic[stat] = calc.make_uhs(hmap, info) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) periods = [] for m, imt in enumerate(info['imtls']): if imt == 'PGA' or imt.startswith('SA'): periods.append(m) if 'site_id' in params: sids = params['site_id'] else: sids = ALL if params['rlzs']: dset = dstore['hmaps-rlzs'] for k in params['k']: yield ('rlz-%03d' % k, hdf5.extract(dset, sids, k, periods, ALL)[:, 0]) else: dset = dstore['hmaps-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, sids, k, periods, ALL)[:, 0] yield from params.items()
def extract_uhs(dstore, what): """ Extracts uniform hazard spectra. Use it as /extract/uhs?kind=mean or /extract/uhs?kind=rlz-0, etc """ oq = dstore['oqparam'] num_rlzs = len(dstore['weights']) stats = oq.hazard_stats() if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = {} for s, stat in enumerate(stats): hmap = dstore['hmaps-stats'][:, s] dic[stat] = calc.make_uhs(hmap, oq) yield from hazard_items(dic, mesh, investigation_time=oq.investigation_time) return params = parse(what, stats, num_rlzs) periods = [] for m, imt in enumerate(oq.imtls): if imt == 'PGA' or imt.startswith('SA'): periods.append(m) if 'site_id' in params: sids = params['site_id'] else: sids = ALL for k, i in params['kind'].items(): if k.startswith('rlz-'): yield k, hdf5.extract(dstore['hmaps-rlzs'], sids, i, periods, ALL)[:, 0] else: yield k, hdf5.extract(dstore['hmaps-stats'], sids, i, periods, ALL)[:, 0] yield from params.items()
def export_uhs_xml(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() sitemesh = get_mesh(dstore['sitecol']) key, fmt = ekey fnames = [] periods = [imt for imt in oq.imtls if imt.startswith('SA') or imt == 'PGA'] for kind in dstore['hcurves']: hcurves = dstore['hcurves/' + kind] metadata = get_metadata(rlzs_assoc.realizations, kind) _, periods = calc.get_imts_periods(oq.imtls) uhs = calc.make_uhs(hcurves, oq.imtls, oq.poes, len(sitemesh)) for poe in oq.poes: fname = hazard_curve_name( dstore, ekey, kind + '-%s' % poe, rlzs_assoc) writer = hazard_writers.UHSXMLWriter( fname, periods=periods, poe=poe, investigation_time=oq.investigation_time, **metadata) data = [] for site, curve in zip(sitemesh, uhs[str(poe)]): data.append(UHS(curve, Location(site))) writer.serialize(data) fnames.append(fname) return sorted(fnames)