def run(self): if not self.query(): sys.stderr.write("No database connection!\n") return False xarc = IO.XMLArchive() if not xarc.create(self._outfile, True, True): sys.stderr.write("Could not create xml output file %s!\n" % self._outfile) return False xarc.setFormattedOutput(self.commandline().hasOption("formatted")) (net, sta, loc, cha) = self._streamID.split(".") it = self.query().getWaveformQuality( DataModel.WaveformStreamID(net, sta, loc, cha, ""), self._parameter, Core.Time.FromString(self._start, "%Y-%m-%d %H:%M:%S"), Core.Time.FromString(self._end, "%Y-%m-%d %H:%M:%S")) while it.get(): wfq = DataModel.WaveformQuality.Cast(it.get()) xarc.writeObject(wfq) it.step() xarc.close() return True
def main(): if len(sys.argv) < 1 or len(sys.argv) > 3: sys.stderr.write("Usage inv2dlsv [in_xml [out_dataless]]\n") return 1 if len(sys.argv) > 1: inFile = sys.argv[1] else: inFile = "-" if len(sys.argv) > 2: out = sys.argv[2] else: out = "" sc3wrap.dbQuery = None ar = IO.XMLArchive() if ar.open(inFile) == False: raise IOError(inFile + ": unable to open") obj = ar.readObject() if obj is None: raise TypeError(inFile + ": invalid format") sc3inv = DataModel.Inventory.Cast(obj) if sc3inv is None: raise TypeError(inFile + ": invalid format") inv = Inventory(sc3inv) inv.load_stations("*", "*", "*", "*") inv.load_instruments() vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False) for net in iterinv(inv.network): for sta in iterinv(net.station): for loc in iterinv(sta.sensorLocation): for strm in iterinv(loc.stream): try: vol.add_chan(net.code, sta.code, loc.code, strm.code, strm.start, strm.end) except SEEDError as e: sys.stderr.write("Error (%s,%s,%s,%s): %s\n" % ( net.code, sta.code, loc.code, strm.code, str(e))) if not out or out == "-": output = io.BytesIO() vol.output(output) stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout stdout.write(output.getvalue()) stdout.flush() output.close() else: with open(sys.argv[2], "wb") as fd: vol.output(fd) return 0
def run(self): try: if self.dcid is None: print("Please specify datacenter/archive ID", file=sys.stderr) return False nettab = Nettab(self.dcid) instdb = Instruments(self.dcid) try: self.__load_file(instdb.load_db, self.inst_db_file) self.__load_file(nettab.load_statmap, self.stat_map_file) self.__load_file(nettab.load_access_net, self.access_net_file) self.__load_file(nettab.load_access_stat, self.access_stat_file) self.__load_file(instdb.load_sensor_attr, self.sensor_attr_file) self.__load_file(instdb.load_datalogger_attr, self.datalogger_attr_file) self.__load_file(nettab.load_network_attr, self.network_attr_file) self.__load_file(nettab.load_station_attr, self.station_attr_file) inv = SC3Inventory(DataModel.Inventory()) idx = 1 for tab in sorted(self.tab_files): print("Loading %s (%d/%d)" % (tab, idx, len(self.tab_files)), file=sys.stderr) self.__load_file(nettab.load_tab, tab) print("Generating data structures", file=sys.stderr) nettab.update_inventory(instdb, inv) idx = idx + 1 if self.isExitRequested(): print("Exit requested, abort", file=sys.stderr) return False print("Generating output", file=sys.stderr) ar = IO.XMLArchive() ar.setFormattedOutput( self.commandline().hasOption("formatted")) ar.create(self.out_file) ar.writeObject(inv.obj) ar.close() print("Finished", file=sys.stderr) except (IOError, NettabError) as e: logs.error("fatal error: " + str(e)) return False except Exception: logs.print_exc() return False return True
def __init__(self, xmlOut): self.ar = scio.XMLArchive() # seiscomp xml creator self.ar.setFormattedOutput(True) # output formatted xml file self.xmlOut = xmlOut self.eparams = scdatamodel.EventParameters() self.eparams.SetIdGeneration(True) self.eparams.SetIdPattern('@classname@#@time/%Y%m%d%H%M%S.%f@.@id@') self._stage = 2 self.Eid = '0'
def done(self): if self.outputFile: ar = IO.XMLArchive() ar.create(self.outputFile) ar.setFormattedOutput(True) ar.writeObject(Client.Inventory.Instance().inventory()) ar.close() else: self.send_notifiers("INVENTORY") Client.Application.done(self)
def _readEventParametersFromXML(self): ar = IO.XMLArchive() if ar.open(self._xmlFile) == False: raise IOError, self._xmlFile + ": unable to open" obj = ar.readObject() if obj is None: raise TypeError, self._xmlFile + ": invalid format" ep = DataModel.EventParameters.Cast(obj) if ep is None: raise TypeError, self._xmlFile + ": no eventparameters found" return ep
def _loadXml(self, folder): print >> sys.stderr, " Loading inventory from XML file ... ", for f in glob.glob(os.path.join(folder, "*.xml")): ar = IO.XMLArchive() ar.open(f) inventory = DataModel.Inventory_Cast(ar.readObject()) ar.close() if inventory: self.stationResolver.collectStations(inventory) print >> sys.stderr, "Done."
def datafromxml(filename): data = None ar = IO.XMLArchive() ar.open(filename) obj = ar.readObject() ar.close() ep = DataModel.EventParameters.Cast(obj) if type(ep) == type(None): print >> sys.stderr, "File (%s) is no event, skipping." % filename return data if ep.eventCount == 0: print >> sys.stderr, "File (%s) has no events, skipping." % filename return data evt = ep.event(0) evt = DataModel.Event.Cast(evt) if type(evt) == type(None): print >> sys.stderr, "Cannot get event from file (%s), skipping." % filename return data if evt.preferredOriginID() == "": print >> sys.stderr, "No origin (%s), skipping." % filename return data if evt.preferredMagnitudeID() == "": print >> sys.stderr, "No magnitude (%s)" % filename ori = ep.findOrigin(evt.preferredOriginID()) mag = ori.findMagnitude(evt.preferredMagnitudeID()) data = {} data['time'] = ori.time().value().toString("%Y-%m-%dT%H:%M:%SZ") data['lat'] = ori.latitude().value() data['lon'] = ori.longitude().value() data['dep'] = ori.depth().value() data['arc'] = ori.arrivalCount() data['mag'] = None data['magt'] = None if type(mag) != type(None): data['mag'] = mag.magnitude().value() data['magt'] = mag.type() data['desc'] = None if evt.eventDescriptionCount() != 0: data['desc'] = evt.eventDescription(0).text() return data
def run(self): publicID = self.commandline().optionString("public-id") obj = self.loadEventParametersObject(publicID) if obj is None: obj = self.loadInventoryObject(publicID) if obj is None: raise ValueError("unknown object '" + publicID + "'") # dump formatted XML archive to stdout ar = IO.XMLArchive() ar.setFormattedOutput(True) ar.create("-") ar.writeObject(obj) ar.close() return True
def main(): if len(sys.argv) < 1 or len(sys.argv) > 3: print "Usage inv2dlsv [in_xml [out_dataless]]" return 1 if len(sys.argv) > 1: inFile = sys.argv[1] else: inFile = "-" if len(sys.argv) > 2: out = sys.argv[2] else: out = "" sc3wrap.dbQuery = None ar = IO.XMLArchive() if ar.open(inFile) == False: raise IOError, inFile + ": unable to open" obj = ar.readObject() if obj is None: raise TypeError, inFile + ": invalid format" sc3inv = DataModel.Inventory.Cast(obj) if sc3inv is None: raise TypeError, inFile + ": invalid format" inv = Inventory(sc3inv) inv.load_stations("*", "*", "*", "*") inv.load_instruments() vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False) for net in sum([i.values() for i in inv.network.itervalues()], []): for sta in sum([i.values() for i in net.station.itervalues()], []): for loc in sum( [i.values() for i in sta.sensorLocation.itervalues()], []): for strm in sum([i.values() for i in loc.stream.itervalues()], []): try: vol.add_chan(net.code, sta.code, loc.code, strm.code, strm.start, strm.end) except SEEDError, e: print >> sys.stderr, "Error (%s,%s,%s,%s):" % ( net.code, sta.code, loc.code, strm.code), str(e)
def __init__(self, filename): self.inventory = None self.selection = {} ar = IO.XMLArchive() err = ar.open(filename) if err == False: print >> sys.stderr, "Filename '%s' is not accessible." % ( filename) return obj = ar.readObject() ar.close() self.inventory = DataModel.Inventory.Cast(obj) self.selection = {}
def writeSCML(filename, objects): """Given seiscomp3.DataModel objects (as produced by createObjects), write them to an XML file. :param str filename: path to output file :param objects dict: same as return type of createObjects""" # create SeisComP3 XML Archive used to serialize objects ar = IO.XMLArchive() # enable formatted output ar.setFormattedOutput(True) # try to create the output file ar.create(filename) # Serialize the objects for x in objects.values(): if isinstance(x, DM.PublicObject): ar.writeObject(x) ar.close() return True
def run(self): self.loadStreams() try: if self.inputFile == '-': f = sys.stdin else: f = open(self.inputFile) except IOError as e: Logging.error(str(e)) return False ep = self.sh2proc(f) if ep is None: return False ar = IO.XMLArchive() ar.create('-') ar.setFormattedOutput(True) ar.writeObject(ep) ar.close() return True
def main(): # Creating the parser parser = OptionParser(usage="Tab to Inventory (sc3) converter", version="1.0", add_help_option=True) parser.add_option("-i", "--ip", type="string", help="Prefix to be added to each instrument generated.", dest="instrumentPrefix", default=None) parser.add_option( "-f", "--filterf", type="string", help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None) parser.add_option( "-x", "--xmlf", type="string", help= "Indicates a folder containing the XML inventory files (needed for station group support)", dest="xfolder", default=None) parser.add_option( "-D", "--database", type="string", help="Database URL for inventory (needed for station group support)", dest="database", default=None) parser.add_option("", "--force", action="store_true", help="Don't stop on error of individual files", dest="force", default=False) parser.add_option("-g", "--generate", action="store_true", help="Generate XML file at the end", dest="generate", default=False) parser.add_option("-c", "--check", action="store_true", help="Check the loaded files", dest="check", default=False) parser.add_option("-d", "--default", type="string", help="Indicates the default file", dest="defaultFile", default=None) parser.add_option("-o", "--output", type="string", help="Indicates the output file", dest="outFile", default="-") # Parsing & Error check (options, args) = parser.parse_args() error = False if len(args) < 1: print >> sys.stderr, "No input file(s) to digest" error = True if error: print >> sys.stderr, "Use -h for help on usage" return 1 # Execution try: inv = None t = Tab(options.instrumentPrefix, options.defaultFile, options.ffolder, options.xfolder, options.database) for f in args: try: t.digest(f) except Exception, e: print >> sys.stderr, "Error digesting %s:\n %s" % (f, e) if not options.force: raise e if options.check: t.check() return if options.generate: inv = t.sc3Obj() if inv: ar = IO.XMLArchive() print >> sys.stderr, "Generating file: %s" % options.outFile ar.create(options.outFile) ar.setFormattedOutput(True) ar.setCompression(False) ar.writeObject(inv) ar.close()
def run(self): """ Things to do: * load event * load preferred origin without arrivals * load at least the preferred magnitude if available, all magnitudes if requested * load focal mechanism incl. moment tensor depending on availability, incl. Mw from derived origin """ evid = self.commandline().optionString("event") # Load event and preferred origin. This is the minimum # required info and if it can't be loaded, give up. event = self._loadEvent(evid) if event is None: raise ValueError, "unknown event '" + evid + "'" # preferredOrigin = self._loadOrigin(event.preferredOriginID()) preferredOrigin = self.query().getObject(DataModel.Origin.TypeInfo(), event.preferredOriginID()) preferredOrigin = DataModel.Origin.Cast(preferredOrigin) if preferredOrigin is None: raise ValueError, "unknown origin '" + event.preferredOriginID( ) + "'" # take care of origin references and leave just one for the preferred origin while (event.originReferenceCount() > 0): event.removeOriginReference(0) if preferredOrigin: event.add(DataModel.OriginReference(preferredOrigin.publicID())) if self.commandline().hasOption("comments"): self.query().loadComments(preferredOrigin) # load all magnitudes for preferredOrigin if self.commandline().hasOption("all-magnitudes"): self.query().loadMagnitudes(preferredOrigin) magnitudes = [ preferredOrigin.magnitude(i) for i in range(preferredOrigin.magnitudeCount()) ] else: magnitudes = [] if event.preferredMagnitudeID(): # try to load from memory for mag in magnitudes: if mag.publicID() == event.preferredMagnitudeID(): preferredMagnitude = mag break # preferredMagnitude = DataModel.Magnitude.Find(event.preferredMagnitudeID()) else: # try to load it from database preferredMagnitude = self._loadMagnitude( event.preferredMagnitudeID()) else: preferredMagnitude = None # try to load focal mechanism, moment tensor, moment magnitude and related origins momentTensor = momentMagnitude = derivedOrigin = triggeringOrigin = None # default focalMechanism = self._loadFocalMechanism( event.preferredFocalMechanismID()) if focalMechanism: if focalMechanism.triggeringOriginID(): if event.preferredOriginID( ) == focalMechanism.triggeringOriginID(): triggeringOrigin = preferredOrigin else: triggeringOrigin = self.query().getObject( DataModel.Origin.TypeInfo(), focalMechanism.triggeringOriginID()) triggeringOrigin = DataModel.Origin.Cast(triggeringOrigin) if focalMechanism.momentTensorCount() > 0: momentTensor = focalMechanism.momentTensor( 0) # FIXME What if there is more than one MT? if momentTensor.derivedOriginID(): derivedOrigin = self.query().getObject( DataModel.Origin.TypeInfo(), momentTensor.derivedOriginID()) derivedOrigin = DataModel.Origin.Cast(derivedOrigin) if momentTensor.momentMagnitudeID(): if momentTensor.momentMagnitudeID( ) == event.preferredMagnitudeID(): momentMagnitude = preferredMagnitude else: momentMagnitude = self._loadMagnitude( momentTensor.momentMagnitudeID()) # take care of FocalMechanism and related references if derivedOrigin: event.add(DataModel.OriginReference(derivedOrigin.publicID())) if triggeringOrigin: if event.preferredOriginID() != triggeringOrigin.publicID(): event.add( DataModel.OriginReference(triggeringOrigin.publicID())) while (event.focalMechanismReferenceCount() > 0): event.removeFocalMechanismReference(0) if focalMechanism: event.add( DataModel.FocalMechanismReference( focalMechanism.publicID())) self._removeCommentsIfRequested(focalMechanism) # strip creation info if not self.commandline().hasOption("include-full-creation-info"): self._stripCreationInfo(event) if focalMechanism: self._stripCreationInfo(focalMechanism) for i in xrange(focalMechanism.momentTensorCount()): self._stripCreationInfo(focalMechanism.momentTensor(i)) for org in [preferredOrigin, triggeringOrigin, derivedOrigin]: if org is not None: self._stripCreationInfo(org) for i in xrange(org.magnitudeCount()): self._stripCreationInfo(org.magnitude(i)) # populate EventParameters instance ep = DataModel.EventParameters() ep.add(event) if preferredMagnitude and preferredMagnitude is not momentMagnitude: preferredOrigin.add(preferredMagnitude) ep.add(preferredOrigin) if focalMechanism: if triggeringOrigin: if triggeringOrigin is not preferredOrigin: ep.add(triggeringOrigin) if derivedOrigin: if momentMagnitude: derivedOrigin.add(momentMagnitude) ep.add(derivedOrigin) ep.add(focalMechanism) # finally dump event parameters as formatted XML archive to stdout ar = IO.XMLArchive() ar.setFormattedOutput(True) ar.create("-") ar.writeObject(ep) ar.close() del ep
def datafromxml(filename): ev = None ar = IO.XMLArchive() err = ar.open(filename) if err == False: print >> sys.stderr, "Filename '%s' is not accessible." % (filename) return None obj = ar.readObject() ar.close() ep = DataModel.EventParameters.Cast(obj) if type(ep) == type(None): print >> sys.stderr, "File (%s) is no event, skipping." % filename return None if ep.eventCount == 0: print >> sys.stderr, "File (%s) has no events, skipping." % filename return None evt = ep.event(0) evt = DataModel.Event.Cast(evt) if type(evt) == type(None): print >> sys.stderr, "Cannot get event from file (%s), skipping." % filename return None if evt.preferredOriginID() == "": print >> sys.stderr, "No origin (%s), skipping." % filename return None print >> sys.stderr, "\nProcessing event %s (%s)" % (evt.publicID(), filename) if evt.preferredMagnitudeID() == "": print >> sys.stderr, " No magnitude (%s)" % filename ori = ep.findOrigin(evt.preferredOriginID()) mag = ori.findMagnitude(evt.preferredMagnitudeID()) mag = mag.magnitude().value() if type(mag) != type(None) else None # ## Assembly errors from Sc3 solution try: eh = math.sqrt( math.pow(ori.latitude().uncertainty(), 2) + math.pow(ori.longitude().uncertainty(), 2)) except Core.ValueException: eh = 0.0 try: ez = ori.depth().uncertainty() except Core.ValueException: ez = 0.0 try: rms = ori.quality().standardError() except Core.ValueException: rms = 0.0 try: ev = Event(time=sc3timeparse(ori.time()), longitude=ori.longitude().value(), latitude=ori.latitude().value(), depth=ori.depth().value(), magnitude=mag, eh=eh, ez=ez, rms=rms) except Exception, e: print >> sys.stderr, " %s" % (str(e)) return None
def run(self): if not self._processCommandLineOptions(): return False dbq = self.query() ep = DataModel.EventParameters() # If we got an event ID as command-line argument... if self._evid: # Retrieve event from DB evt = dbq.loadObject(DataModel.Event.TypeInfo(), self._evid) evt = DataModel.Event.Cast(evt) if evt is None: raise TypeError, "unknown event '" + self._evid + "'" # If start time was not specified, compute it from origin time. if self._startTime is None: orid = evt.preferredOriginID() org = dbq.loadObject(DataModel.Origin.TypeInfo(), orid) org = DataModel.Origin.Cast(org) t0 = org.time().value() self._startTime = t0 + Core.TimeSpan(-before) self._endTime = t0 + Core.TimeSpan(after) print >> sys.stderr, "time window: %s ... %s" % ( self._startTime, self._endTime) if not self.commandline().hasOption("no-origins"): # Loop over all origins of the event for org in dbq.getOrigins(self._evid): org = DataModel.Origin.Cast(org) # We only look for manual events. if org.evaluationMode() != DataModel.MANUAL: continue self._orids.append(org.publicID()) # FIRST the pick query loop, THEN the amplitude query loop! # NESTED QUERY LOOPS ARE NOT ALLOWED!!! picks = [] for obj in dbq.getPicks(self._startTime, self._endTime): pick = DataModel.Pick.Cast(obj) if pick: if pick.evaluationMode( ) == DataModel.MANUAL and self.commandline().hasOption( "no-manual-picks"): continue if pick.waveformID().networkCode() in self._networkBlacklist: continue picks.append(pick) ep.add(pick) print >> sys.stderr, "loaded %d picks " % ep.pickCount( ) for i, pick in enumerate(picks): # amplitude query loop for each pick, see above comments. for obj in dbq.getAmplitudesForPick(pick.publicID()): ampl = DataModel.Amplitude.Cast(obj) if ampl: ep.add(ampl) sys.stderr.write("loaded amplitudes for %d of %d picks\r" % (i, len(picks))) print >> sys.stderr, "loaded %d amplitudes " % ep.amplitudeCount( ) if not self.commandline().hasOption("no-origins"): for i, orid in enumerate(self._orids): # XXX There was occasionally a problem with: # org = dbq.loadObject(DataModel.Origin.TypeInfo(), orid) # org = DataModel.Origin.Cast(org) # NOTE when org was directly overwritten. # resulting in a segfault. The reason is not clear, but # is most probably in the Python wrapper. The the segfault # can be avoided by creating an intermediate object 'obj'. obj = dbq.loadObject(DataModel.Origin.TypeInfo(), orid) org = DataModel.Origin.Cast(obj) ep.add(org) sys.stderr.write("loaded %d of %d manual origins\r" % (i, len(self._orids))) print >> sys.stderr, "loaded %d manual origins " % ep.originCount( ) # finally dump event parameters as formatted XML archive to stdout ar = IO.XMLArchive() ar.setFormattedOutput(True) ar.create("-") ar.writeObject(ep) ar.close() del ep return True
self.loadStreams() try: if self.inputFile == '-': f = sys.stdin else: f = open(self.inputFile) except IOError, e: Logging.error(str(e)) return False ep = self.sh2proc(f) if ep is None: return False ar = IO.XMLArchive() ar.create('-') ar.setFormattedOutput(True) ar.writeObject(ep) ar.close() return True ############################################################################### def main(): try: app = SH2Proc() return app() except: sys.stderr.write(str(traceback.format_exc()))