def get_streams_gema(networks, stations, starttime, endtime, only_vertical_channel=False, local_dir_name=None): if not local_dir_name: local_dir_name = "%s/archive" % (os.getenv("HOME")) if only_vertical_channel: channels = "*Z" else: channels = "*" # READ ARCHIVE DATABASE st = Stream() this_day = UTCDateTime(starttime.strftime("%Y-%m-%d")) last_day = UTCDateTime(endtime.strftime("%Y-%m-%d")) while this_day <= last_day: for network, station in zip(networks, stations): pattern = '%s/%s/%s/%s/%s*' % (local_dir_name, this_day.strftime("%Y"), network, station, channels) paths_ch = sorted(glob.glob(pattern)) for path in paths_ch: pattern = "%s/*%s" % (path, this_day.strftime("%Y.%03j")) msfile_list = glob.glob(pattern) if len(msfile_list) > 0: for msfile in msfile_list: st += read(msfile, starttime=starttime, endtime=endtime) this_day += 86400 # PATCH PROBLEM DIFFERENT SAMPLING RATES IN LONQ STATION FROM SCREAM for tr in st.select(station="LONQ"): if tr.stats.sampling_rate != 50: st.remove(tr) # EXPORT GAPS AND MERGE STREAM gaps = st.get_gaps() if len(st) > 0: # and len(gaps)>0 st.trim(starttime, endtime) st.merge(method=1, interpolation_samples=-1, fill_value='interpolate') return st, gaps
def get_fill_trace(tr): """ Subroutine to get data fill for single trace! @rtype: float """ st = Stream(traces=[tr]) trstart = tr.stats.starttime trend = tr.stats.endtime dttot = (trend) - (trstart) gaps = st.get_gaps() fill = 1.0 for g in gaps: gapstart = g[4] gapend = g[5] fill -= (gapend - gapstart) / dttot return fill
def get_fill_trace(tr): """ Subroutine to get data fill for single trace! @rtype: float """ st = Stream(traces=[tr]) trstart = tr.stats.starttime trend = tr.stats.endtime dttot = (trend) - (trstart) gaps = st.get_gaps() fill = 1.0 for g in gaps: gapstart = g[4] gapend = g[5] fill -= (gapend - gapstart) / dttot return fill
def get_streams_gema_old(networks, stations, starttime, endtime, only_vertical_channel=False, local_dir_name=None): if not local_dir_name: local_dir_name = "%s/mount" % (os.getenv("HOME")) r = requests.get(r'http://jsonip.com') public_ip = r.json()['ip'] if (socket.gethostname() == 'maniedba') and (public_ip == "152.74.135.51"): local_buffer = "/home/gema/seiscomp3/var/lib/seedlink/buffer" local_archive = "/home/gema/seiscomp3/var/lib/archive " else: local_buffer = "%s/seiscomp_data_buffer" % (local_dir_name) local_archive = "%s/seiscomp_data_archive" % (local_dir_name) st = Stream() if only_vertical_channel: channels = "*Z" else: channels = "*" # read archive directory for network, station in zip(networks, stations): this_day = starttime while this_day <= endtime: pattern = '%s/%s/%s/%s/%s.D' % (local_archive, this_day.strftime("%Y"), network, station, channels) paths_ch = sorted(glob.glob(pattern)) for path in paths_ch: pattern = "%s/*%s" % (path, this_day.strftime("%Y.%03j")) msfile_list = glob.glob(pattern) if len(msfile_list) > 0: for msfile in msfile_list: st += read(msfile, starttime=starttime, endtime=endtime) this_day += 86400 gaps = st.get_gaps() # read buffer directory if UTCDateTime().now() - endtime <= 3600: for network, station in zip(networks, stations): path = '%s/%s/segments' % (local_buffer, station) name_list = os.listdir(path) full_list = [os.path.join(path, i) for i in name_list] time_sorted_list = sorted(full_list, key=os.path.getmtime) msfiles = time_sorted_list[-3::] for msfile in msfiles: st += read(msfile, starttime=starttime, endtime=endtime).select(channel=channels) if len(st) > 1: st.merge(method=1, interpolation_samples=-1, fill_value='interpolate') return st, gaps