def export_ruptures_xml(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ fmt = ekey[-1] oq = dstore['oqparam'] events = dstore['events'] sm_by_grp = dstore['csm_info'].get_sm_by_grp() mesh = get_mesh(dstore['sitecol']) ruptures = {} for grp in dstore['ruptures']: grp_id = int(grp[4:]) # strip grp- ruptures[grp_id] = [] for ebr in calc.get_ruptures(dstore, events, grp_id): ruptures[grp_id].append(ebr.export(mesh, sm_by_grp)) ruptures_dir = os.path.join(oq.base_path, oq.export_dir, 'ruptures') if not os.path.exists(ruptures_dir): os.makedirs(ruptures_dir) rupFileLoc = os.path.join( ruptures_dir, 'rupture_' + str(ebr.serial) + '_' + str(dstore.calc_id) + '.p') pickle.dump(ebr.rupture, open(rupFileLoc, 'wb')) dest = dstore.export_path('ses.' + fmt) writer = hazard_writers.SESXMLWriter(dest) writer.serialize(ruptures, oq.investigation_time) return [dest]
def export_ruptures_xml(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ fmt = ekey[-1] oq = dstore['oqparam'] num_ses = oq.ses_per_logic_tree_path ruptures_by_grp = AccumDict(accum=[]) for rgetter in gen_rgetters(dstore): ebrs = [ebr.export(rgetter.rlzs_by_gsim, num_ses) for ebr in rgetter.get_ruptures()] ruptures_by_grp[rgetter.grp_id].extend(ebrs) dest = dstore.export_path('ses.' + fmt) writer = hazard_writers.SESXMLWriter(dest) writer.serialize(ruptures_by_grp, oq.investigation_time) return [dest]
def _export_ses_xml(dest, ses_coll): writer = hazard_writers.SESXMLWriter(dest) writer.serialize(ses_coll)