def iterate_events(self, subset=None, starttime=None, endtime=None, parse_arrivals=True, parse_magnitudes=True): tbl_event = self.tables["event"] view = tbl_event.join("origin") if starttime is not None: starttime = validate_time(starttime) _view = view.subset("time >= _%f_" % starttime.timestamp) view.free() view = _view if endtime is not None: endtime = validate_time(endtime) _view = view.subset("endtime < _%f_" % endtime.timestamp) view.free() view = _view if subset: _view = view.subset(subset) view.free() view = _view event_view = view.separate("event") view.free() for event in event_view.iter_record(): prefor = event.getv("prefor")[0] yield self.parse_origin(prefor, parse_arrivals=parse_arrivals, parse_magnitudes=parse_magnitudes) event_view.free()
def __init__(self, database, station, channel, starttime, endtime): starttime = validate_time(starttime) endtime = validate_time(endtime) if isinstance(station, sp.station.Station): sta = station.name if isinstance(channel, sp.station.Channel): chan = channel.code try: groupd = database.wfdisc["grouped"] groupd.record = groupd.find("sta =~ /%s/ && chan =~ /%s/" % (sta, chan)) except Exception as e: raise IOError("data not found") rnge = groupd.get_range() sortd = database.wfdisc["sorted"] view = sortd.list2subset(range(rnge[0], rnge[1])) _tmp = view.subset("endtime > _%f_ && time < _%f_" % (starttime.timestamp, endtime.timestamp)) view.free() view = _tmp st = obspy.core.Stream() for record in view.iter_record(): st += obspy.core.read(record.filename()[1], starttime=starttime, endtime=endtime)[0] view.free() st.merge() tr = st[0] self.stats = tr.stats self.stats.station = station self.stats.channel = channel self.data = tr.data
def __init__(self, code, ondate, offdate): self.code = code self.ondate = validate_time(ondate) self.offdate = validate_time(offdate) self.inactive_periods = () self.sample_rates = {'E': 0, 'H': 1, 'B': 2, 'L': 3} self.instruments = {'H': 0, 'N': 1} self.components = {'Z': 0, 'N': 1, 'E': 2, '1': 3, '2': 4}
def __init__(self, name, lon, lat, elev, network, ondate=-1, offdate=-1): self.name = name self.lon = lon % 360. self.lat = lat self.elev = elev self.network = network self.ondate = validate_time(ondate) self.offdate = validate_time(offdate) self.channels = {}
def time_range(self, table, subset=None): table = self.tables[table] if subset: view = table.subset(subset) _view = view.sort("time") view.free() view = _view else: view = table.sort("time") view.record = 0 time = view.getv("time")[0] starttime = validate_time(time) view.record = view.record_count - 1 time = view.getv("time")[0] endtime = validate_time(time) return starttime, endtime
def __init__(self, lat, lon, depth, time, arrivals=None, magnitudes=None, orid=-1, evid=-1, sdobs=-1, nass=-1, ndef=-1, author=None): self.lat = lat self.lon = lon % 360. self.depth = depth self.time = validate_time(time) self.arrivals = () self.magnitudes = () self.stations = {} if arrivals: self.add_arrivals(arrivals) if magnitudes: self.add_magnitudes(magnitudes) self.orid = orid self.evid = evid self.sdobs = sdobs self.nass = nass self.ndef = ndef self.author = author
def fetch(self, station, channel, starttime, endtime): """ Fetch waveform data from ANF rsync server at UCSD. """ starttime = validate_time(starttime) endtime = validate_time(endtime) if isinstance(station, seispy.station.Station): station = station.name if isinstance(station, seispy.station.Channel): channel = channel.code if not starttime.year == endtime.year: raise NotImplementedError tbl_wfdisc = self.dbs[starttime.year].lookup(table="wfdisc") view = tbl_wfdisc.subset( "sta =~ /%s/ && chan =~ /%s/ && endtime > _%f_" "&& time < _%f_" % (station, channel, starttime.timestamp, endtime.timestamp)) view_unique = view.sort(("sta", "chan"), unique=True) st = Stream() for control_record in view_unique.iter_record(): sta, chan = control_record.getv("sta", "chan") view_data = view.subset( "sta =~ /%s/ && chan =~ /%s/ && endtime" "> _%f_ && time < _%f_" % (sta, chan, starttime.timestamp, endtime.timestamp)) for data_record in view_data.iter_record(): ddir, dfile = data_record.getv("dir", "dfile") while n_rsync_processes() >= 6: print "SLEEPING,", n_rsync_processes() time.sleep(1) with open(os.devnull, 'w') as FNULL: subprocess.call([ "rsync", "-a", "-v", "-P", "--whole-file", "%s/%s/%s" % (self.server_directory, ddir, dfile), self.temp_dir ], stdout=FNULL) os.listdir(self.temp_dir) st += read(os.path.join(self.temp_dir, dfile)) os.remove(os.path.join(self.temp_dir, dfile)) if len(st) == 0: raise IOError("data not found") st.trim(starttime, endtime) return st
def group_detections(self, subset=None, starttime=None, endtime=None): detections = [] if starttime is not None: starttime = validate_time(starttime) if endtime is not None: endtime = validate_time(endtime) view = self.tables["detection"].subset( "time >= _%f_ && " "time <= _%f_" % (starttime.timestamp, endtime.timestamp)) else: view = self.tables["detection"].subset("time >= _%f_" % starttime.timestamp) _view = view.sort("time") view.free() view = _view elif endtime is not None: endtime = validate_time(endtime) view = self.tables["detection"].subset("time <= _%f_" % endtime.timestamp) _view = view.sort("time") view.free() view = _view else: view = self.tables["detection"].sort("time") if subset is not None: _view = view.subset(subset) view.free() view = _view for record in view.iter_record(): station, channel, time, label = record.getv( "sta", "chan", "time", "state") station = self.virtual_network.stations[station] # channel = station.channels[channel] detections += [ seispy.event.Detection(station, channel, time, label) ] return detections
def __init__(self, station, channel, time, phase, arid=-1, snr=-1, timeres=-999.000): self.station = station self.channel = channel self.time = validate_time(time) self.phase = phase self.arid = arid self.snr = snr self.timeres = timeres
def __init__(self, starttime, endtime): self.starttime = validate_time(starttime) self.endtime = validate_time(endtime)
with open(os.devnull, 'w') as FNULL: subprocess.call([ "rsync", "-a", "-v", "-P", "--whole-file", "%s/%s/%s" % (self.server_directory, ddir, dfile), self.temp_dir ], stdout=FNULL) os.listdir(self.temp_dir) st += read(os.path.join(self.temp_dir, dfile)) os.remove(os.path.join(self.temp_dir, dfile)) if len(st) == 0: raise IOError("data not found") st.trim(starttime, endtime) return st def n_rsync_processes(): try: n = len([p.parent() for p in ps.process_iter() if p.name() == "rsync"]) / 2 except ps.NoSuchProcess: return n_rsync_processes() return n if __name__ == "__main__": willy = Groundhog("rsync://eqinfo.ucsd.edu/ANZA_waveforms") st = willy.fetch("PFO", "HHZ", validate_time("2015-120T00:00:00"), validate_time("2015-120T00:01:00")) st.plot()
def __init__(self, *args, **kwargs): if len(args) == 1: if isinstance(args[0], str) and os.path.isfile(args[0]): tr = obspy.core.read(args[0])[0] self.stats = tr.stats self.data = tr.data else: if isinstance(args[0], gazelle.datascope.Dbptr): dbptr = args[0] if dbptr.query(dbTABLE_NAME) == 'wfdisc': if dbptr.record >= 0 and dbptr.record < dbptr.record_count: tr = obspy.core.read(dbptr.filename()[1])[0] self.stats = tr.stats self.data = tr.data else: raise ValueError("invalid record value: %d" % dbptr.record) else: raise ValueError("invalid table value: %d" % dbptr.table) else: raise TypeError("invalid type: %s" % type(args[0])) else: mandatory_kwargs = ('station', 'channel', 'starttime', 'endtime') for kw in mandatory_kwargs: if kw not in kwargs: raise ValueError("invalid keyword arguments") if not ("database_pointer" in kwargs or "database_path" in kwargs): raise ValueError( "invalid keyword arguments - specify database") starttime = validate_time(kwargs['starttime']) endtime = validate_time(kwargs['endtime']) if 'database_path' in kwargs: if not isfile("%s.wfdisc" % kwargs['database_path']): raise IOError("file not found: %s" % kwargs['database_path']) dbptr = dbopen(kwargs['database_path'], 'r') elif 'database_pointer' in kwargs and\ isinstance(kwargs['database_pointer'], gazelle.datascope.Dbptr): dbptr = kwargs['database_pointer'] else: raise ValueError("invalid keyword arguments") if isinstance(kwargs['station'], sp.station.Station): sta = kwargs["station"].name else: sta = kwargs["station"] if isinstance(kwargs['channel'], sp.station.Channel): chan = kwargs["channel"].code else: chan = kwargs["channel"] dbptr = dbptr.lookup(table='wfdisc') dbptr = dbptr.subset("sta =~ /%s/ && chan =~ /%s/ && "\ "endtime > _%f_ && time < _%f_" % (sta, chan, kwargs['starttime'], kwargs['endtime'])) if dbptr.record_count == 0: raise IOError("no data found") st = obspy.core.Stream() for record in dbptr.iter_record(): st += obspy.core.read(record.filename()[1], starttime=starttime, endtime=endtime)[0] st.merge() tr = st[0] self.stats = tr.stats self.stats.station = kwargs['station'] self.stats.channel = kwargs['channel'] self.data = tr.data
def __init__(self, station, channel, time, label, snr=-1): self.station = station self.channel = channel self.time = validate_time(time) self.label = label self.snr = snr