def timeseries(self, network, station, location, channel, starttime, endtime, filter=[], filename=None, output='miniseed', **kwargs): """ Low-level interface for `timeseries` Web service of IRIS (http://service.iris.edu/irisws/timeseries/)- release 1.3.5 (2012-06-07). This method fetches segments of seismic data and returns data formatted in either MiniSEED, ASCII or SAC. It can optionally filter the data. **Channel and temporal constraints (required)** The four SCNL parameters (Station - Channel - Network - Location) are used to determine the channel of interest, and are all required. Wildcards are not accepted. :type network: str :param network: Network code, e.g. ``'IU'``. :type station: str :param station: Station code, e.g. ``'ANMO'``. :type location: str :param location: Location code, e.g. ``'00'`` :type channel: str :param channel: Channel code, e.g. ``'BHZ'``. :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime` :param starttime: Start date and time. :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime` :param endtime: End date and time. **Filter Options** The following parameters act as filters upon the time series. :type filter: list of str, optional :param filter: Filter list. List order matters because each filter operation is performed in the order given. For example ``filter=["demean", "lp=2.0"]`` will demean and then apply a low-pass filter, while ``filter=["lp=2.0", "demean"]`` will apply the low-pass filter first, and then demean. ``"taper=WIDTH,TYPE"`` Apply a time domain symmetric tapering function to the time series data. The width is specified as a fraction of the trace length from 0 to 0.5. The window types HANNING (default), HAMMING, or COSINE may be optionally followed, e.g. ``"taper=0.25"`` or ``"taper=0.5,COSINE"``. ``"envelope=true"`` Calculate the envelope of the time series. This calculation uses a Hilbert transform approximated by a time domain filter. ``"lp=FREQ"`` Low-pass filter the time-series using an IIR 4th order filter, using this value (in Hertz) as the cutoff, e.g. ``"lp=1.0"``. ``"hp=FREQ"`` High-pass filter the time-series using an IIR 4th order filter, using this value (in Hertz) as the cutoff, e.g. ``"hp=3.0"``. ``"bp=FREQ1,FREQ2"`` Band pass frequencies, in Hz, e.g. ``"bp=0.1,1.0"``. ``"demean"`` Remove mean value from data. ``"scale"`` Scale data samples by specified factor, e.g. ``"scale=2.0"`` If ``"scale=AUTO"``, the data will be scaled by the stage-zero gain. Cannot specify both ``scale`` and ``divscale``. Cannot specify both ``correct`` and ``scale=AUTO``. ``"divscale"`` Scale data samples by the inverse of the specified factor, e.g ``"divscale=2.0"``. You cannot specify both ``scale`` and ``divscale``. ``"correct"`` Apply instrument correction to convert to earth units. Uses either deconvolution or polynomial response correction. Cannot specify both ``correct`` and ``scale=AUTO``. Correction on > 10^7 samples will result in an error. At a sample rate of 20 Hz, 10^7 samples is approximately 5.8 days. ``"freqlimits=FREQ1,FREQ2,FREQ3,FREQ4"`` Specify an envelope for a spectrum taper for deconvolution, e.g. ``"freqlimits=0.0033,0.004,0.05,0.06"``. Frequencies are specified in Hertz. This cosine taper scales the spectrum from 0 to 1 between FREQ1 and FREQ2 and from 1 to 0 between FREQ3 and FREQ4. Can only be used with the correct option. Cannot be used in combination with the ``autolimits`` option. ``"autolimits=X,Y"`` Automatically determine frequency limits for deconvolution, e.g. ``"autolimits=3.0,3.0"``. A pass band is determined for all frequencies with the lower and upper corner cutoffs defined in terms of dB down from the maximum amplitude. This algorithm is designed to work with flat responses, i.e. a response in velocity for an instrument which is flat to velocity. Other combinations will likely result in unsatisfactory results. Cannot be used in combination with the ``freqlimits`` option. ``"units=UNIT"`` Specify output units. Can be DIS, VEL, ACC or DEF, where DEF results in no unit conversion, e.g. ``"units=VEL"``. Option ``units`` can only be used with ``correct``. ``"diff"`` Differentiate using 2 point (uncentered) method ``"int"`` Integrate using trapezoidal (midpoint) method ``"decimate=SAMPLERATE"`` Specify the sample-rate to decimate to, e.g. ``"decimate=2.0"``. The sample-rate of the source divided by the given sample-rate must be factorable by 2,3,4,7. **Miscelleneous options** :type filename: str, optional :param filename: Name of a output file. If this parameter is given nothing will be returned. Default is ``None``. :type output: str, optional :param output: Output format if parameter ``filename`` is used. ``'ascii'`` Data format, 1 column (values) ``'ascii2'`` ASCII data format, 2 columns (time, value) ``'ascii'`` Same as ascii2 ``'audio'`` audio WAV file ``'miniseed'`` IRIS MiniSEED format ``'plot'`` A simple plot of the time series ``'saca'`` SAC, ASCII format ``'sacbb'`` SAC, binary big-endian format ``'sacbl'`` SAC, binary little-endian format :rtype: :class:`~obspy.core.stream.Stream` or ``None`` :return: ObsPy Stream object if no ``filename`` is given. .. rubric:: Example >>> from obspy.clients.iris import Client >>> from obspy import UTCDateTime >>> dt = UTCDateTime("2005-01-01T00:00:00") >>> client = Client() >>> st = client.timeseries("IU", "ANMO", "00", "BHZ", dt, dt+10) >>> print(st[0].data) # doctest: +ELLIPSIS [ 24 20 19 19 19 15 10 4 -4 -11 ... >>> st = client.timeseries("IU", "ANMO", "00", "BHZ", dt, dt+10, ... filter=["correct", "demean", "lp=2.0"]) >>> print(st[0].data) # doctest: +ELLIPSIS [ -1.57488682e-06 -1.26318002e-06 -7.84807128e-07 ... """ kwargs['network'] = str(network) kwargs['station'] = str(station) if location: kwargs['location'] = str(location)[0:2] else: kwargs['location'] = '--' kwargs['channel'] = str(channel) # convert UTCDateTime to string for query kwargs['starttime'] = UTCDateTime(starttime).format_IRIS_web_service() kwargs['endtime'] = UTCDateTime(endtime).format_IRIS_web_service() # output if filename: kwargs['output'] = output else: kwargs['output'] = 'miniseed' # build up query try: data = self._fetch("timeseries", param_list=filter, **kwargs) except urllib.request.HTTPError as e: msg = "No waveform data available (%s: %s)" msg = msg % (e.__class__.__name__, e) raise Exception(msg) # write directly if file name is given if filename: return self._to_file_or_data(filename, data, True) # create temporary file for writing data with NamedTemporaryFile() as tf: tf.write(data) # read stream using obspy.io.mseed tf.seek(0) try: stream = read(tf.name, 'MSEED') except: stream = Stream() return stream
from obspy import Stream from numpy import argmax # STATION, CHANNEL (DDF --> 400 Hz), NETWWORK AND LOCATION CODES sta = 'FG8' # STATION cha = 'BHZ' # CHANNEL net = 'GI' # loc = '00' # location, it depends mostly of which network you are in. client = Client('138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t1 = UTCDateTime(2018, 5, 20, 2, 58, 20) #the format is year:day_of_the_year:month t2 = t1 + 90 st_ref2 = Stream() st_ref2 = client.get_waveforms(net, sta, loc, cha, t1 , t2) st_ref2.detrend(type='linear') st_ref2.detrend(type='demean') st_ref2.filter(type='bandpass',freqmin=0.5, freqmax=5) st_ref2.plot(color='r',starttime=t1, endtime=t2) ref2_st = st_ref2[0].data #%% # STATION, CHANNEL (DDF --> 400 Hz), NETWWORK AND LOCATION CODES sta = 'FG8' # STATION cha = 'BDF' # CHANNEL net = 'GI' # loc = '02' # location, it depends mostly of which network you are in.
def _prepare_data(detect_data, detections, template, delays, shift_len, plot): """ Prepare data for lag_calc - reduce memory here. :type detect_data: obspy.core.stream.Stream :param detect_data: Stream to extract detection streams from. :type detections: list :param detections: List of :class:`eqcorrscan.core.match_filter.Detection` to get data for. :type template: tuple :param template: tuple of (template_name, template) :type delays: list :param delays: Dictionary of delay times in seconds keyed by sta.channel. :type shift_len: float :param shift_len: Shift length in seconds allowed for picking. :type plot: bool :param plot: Whether to plot the data extracted or not, used for debugging. :returns: List of detect_streams to be worked on :rtype: list """ detect_streams = [] for detection in detections: if detection.template_name != template[0]: continue # Stream to be saved for new detection detect_stream = [] max_delay = 0 for tr in detect_data: template_tr = template[1].select( station=tr.stats.station, channel=tr.stats.channel) if len(template_tr) >= 1: # Save template trace length in seconds template_len = ( len(template_tr[0]) / template_tr[0].stats.sampling_rate) else: continue # If there is no template-data match then skip the rest # of the trace loop. # Grab the delays for the desired template: [(sta, chan, delay)] # Now grab the delay for the desired trace for this template delay = delays[tr.stats.station + '.' + tr.stats.channel] if delay > max_delay: max_delay = delay detect_stream.append(tr.slice( starttime=detection.detect_time - shift_len + delay, endtime=detection.detect_time + delay + shift_len + template_len).copy()) for tr in detect_stream: if len(tr.data) == 0: msg = ('No data in %s.%s for detection at time %s' % (tr.stats.station, tr.stats.channel, detection.detect_time)) warnings.warn(msg) detect_stream.remove(tr) elif tr.stats.endtime - tr.stats.starttime < ( 2 * shift_len) + template_len: msg = ("Insufficient data for %s.%s will not use." % (tr.stats.station, tr.stats.channel)) warnings.warn(msg) detect_stream.remove(tr) elif np.ma.is_masked(tr.data): msg = ("Masked data found for %s.%s, will not use." % (tr.stats.station, tr.stats.channel)) warnings.warn(msg) detect_stream.remove(tr) # Check for duplicate traces stachans = [(tr.stats.station, tr.stats.channel) for tr in detect_stream] c_stachans = Counter(stachans) for key in c_stachans.keys(): if c_stachans[key] > 1: msg = ('Multiple channels for %s.%s, likely a data issue' % (key[0], key[1])) raise LagCalcError(msg) if plot: background = detect_data.slice( starttime=detection.detect_time - (shift_len + 5), endtime=detection.detect_time + shift_len + max_delay + 7).copy() for tr in background: if len(tr.data) == 0: background.remove(tr) detection_multiplot( stream=background, template=Stream(detect_stream), times=[detection.detect_time - shift_len], title='Detection Extracted') if not len(detect_stream) == 0: detect_stream = Stream(detect_stream).split() # Make sure there are no masks left over. # Create tuple of (template name, data stream) detect_streams.append((detection.template_name, Stream(detect_stream))) return detect_streams
# Get saved event info, also used to name files # date_label = '2018-04-02' # date for filename file = open('EvLocs/' + eq_file1, 'r') lines = file.readlines() split_line = lines[0].split() t1 = UTCDateTime(split_line[1]) date_label1 = split_line[1][0:10] file = open('EvLocs/' + eq_file2, 'r') lines = file.readlines() split_line = lines[0].split() t2 = UTCDateTime(split_line[1]) date_label2 = split_line[1][0:10] fname1 = 'HD' + date_label1 + '_' + date_label2 + '_tshift.mseed' tshift = Stream() tshift = read(fname1) fname1 = 'HD' + date_label1 + '_' + date_label2 + '_amp_ratio.mseed' amp_ratio = Stream() amp_ratio = read(fname1) fname2 = 'HD' + date_label1 + '_' + date_label2 + '_amp_ave.mseed' amp_ave = Stream() amp_ave = read(fname2) tshift_full = tshift.copy() tshift.decimate(decimate_fac, no_filter=True) amp_ratio.decimate(decimate_fac, no_filter=True) amp_ave.decimate(decimate_fac, no_filter=True) #print(f'len(tshift): ' {len(tshift):4d} 'len(tshift[0].data): ' {len(tshift[0].data:4d}) print(f'len(tshift): {len(tshift):4d}')
def get_waveforms_nscl(self, seedname, starttime, duration): """ Gets a regular expression of channels from a start time for a duration in seconds. The regular expression must represent all characters of the 12-character NNSSSSSCCCLL pattern e.g. "US.....[BSHE]HZ.." is valid, but "US.....[BSHE]H" is not. Complex regular expressions are permitted "US.....BHZ..|CU.....[BH]HZ.." .. rubric:: Notes For detailed information regarding the usage of regular expressions in the query, see also the documentation for CWBQuery ("CWBQuery.doc") available at ftp://hazards.cr.usgs.gov/CWBQuery/. Using ".*" regular expression might or might not work. If the 12 character seed name regular expression is less than 12 characters it might get padded with spaces on the server side. :type seedname: str :param seedname: The 12 character seedname or 12 character regexp matching channels :type start: :class:`~obspy.core.utcdatetime.UTCDateTime` :param start: The starting date/time to get :type duration: float :param duration: The duration in seconds to get :rtype: :class:`~obspy.core.stream.Stream` :returns: Stream object with requested data .. rubric:: Example >>> from obspy.clients.neic import Client >>> from obspy import UTCDateTime >>> client = Client() >>> t = UTCDateTime() - 5 * 3600 # 5 hours before now >>> st = client.get_waveforms_nscl("IUANMO BH.00", t, 10) >>> print(st) # doctest: +ELLIPSIS 3 Trace(s) in Stream: IU.ANMO.00.BH... | 40.0 Hz, 401 samples IU.ANMO.00.BH... | 40.0 Hz, 401 samples IU.ANMO.00.BH... | 40.0 Hz, 401 samples """ start = str(UTCDateTime(starttime)).replace("T", " ").replace("Z", "") line = "'-dbg' '-s' '%s' '-b' '%s' '-d' '%s'\t" % \ (seedname, start, duration) if self.debug: print(ascdate() + " " + asctime() + " line=" + line) # prepare for routing through http_proxy_connect address = (self.host, self.port) if self.proxy: proxy = (self.proxy.hostname, self.proxy.port) auth = ((self.proxy.username, self.proxy.password) if self.proxy.username else None) success = False while not success: try: if self.proxy: s, _, _ = http_proxy_connect(address, proxy, auth, timeout=self.timeout) # This socket is already connected to the proxy else: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.timeout is not None: s.settimeout(self.timeout) s.connect((self.host, self.port)) with io.BytesIO() as tf: s.send(line.encode('ascii', 'strict')) if self.debug: print(ascdate(), asctime(), "Connected - start reads") slept = 0 maxslept = self.timeout / 0.05 totlen = 0 while True: try: # Recommended bufsize is a small power of 2. data = s.recv(4096) if self.debug: print(ascdate(), asctime(), "read len", str(len(data)), " total", str(totlen)) _pos = data.find(b"<EOR>") # <EOR> can be after every 512 bytes which seems to # be the record length cwb query uses. if _pos >= 0 and (_pos + totlen) % 512 == 0: if self.debug: print(ascdate(), asctime(), b"<EOR> seen") tf.write(data[0:_pos]) totlen += len(data[0:_pos]) tf.seek(0) try: st = read(tf, 'MSEED') except Exception: st = Stream() st.trim(starttime, starttime + duration) s.close() success = True break else: totlen += len(data) tf.write(data) slept = 0 except socket.error: if slept > maxslept: print(ascdate(), asctime(), "Timeout on connection", "- try to reconnect") slept = 0 s.close() sleep(0.05) slept += 1 except socket.error: print(traceback.format_exc()) print("CWB QueryServer at " + self.host + "/" + str(self.port)) raise except Exception as e: print(traceback.format_exc()) print("**** exception found=" + str(e)) raise if self.debug: print(ascdate() + " " + asctime() + " success? len=" + str(totlen)) st.merge(-1) return st
def test_filter(self): """ Tests the filter method of the Stream object. Basically three scenarios are tested (with differing filter options): - filtering with in_place=False: - is original stream unchanged? - is data of filtered stream's traces the same as if done by hand - is processing information present in filtered stream's traces - filtering with in_place=True: - is data of filtered stream's traces the same as if done by hand - is processing information present in filtered stream's traces - filtering with bad arguments passed to stream.filter(): - is a TypeError properly raised? - after all bad filter calls, is the stream still unchanged? """ # set specific seed value such that random numbers are reproducible np.random.seed(815) header = { 'network': 'BW', 'station': 'BGLD', 'starttime': UTCDateTime(2007, 12, 31, 23, 59, 59, 915000), 'npts': 412, 'sampling_rate': 200.0, 'channel': 'EHE' } trace1 = Trace(data=np.random.randint(0, 1000, 412), header=deepcopy(header)) header['starttime'] = UTCDateTime(2008, 1, 1, 0, 0, 4, 35000) header['npts'] = 824 trace2 = Trace(data=np.random.randint(0, 1000, 824), header=deepcopy(header)) header['starttime'] = UTCDateTime(2008, 1, 1, 0, 0, 10, 215000) trace3 = Trace(data=np.random.randint(0, 1000, 824), header=deepcopy(header)) header['starttime'] = UTCDateTime(2008, 1, 1, 0, 0, 18, 455000) header['npts'] = 50668 trace4 = Trace(data=np.random.randint(0, 1000, 50668), header=deepcopy(header)) mseed_stream = Stream(traces=[trace1, trace2, trace3, trace4]) header = { 'network': '', 'station': 'RNON ', 'location': '', 'starttime': UTCDateTime(2004, 6, 9, 20, 5, 59, 849998), 'sampling_rate': 200.0, 'npts': 12000, 'channel': ' Z' } trace = Trace(data=np.random.randint(0, 1000, 12000), header=header) gse2_stream = Stream(traces=[trace]) # streams to run tests on: streams = [mseed_stream, gse2_stream] # drop the longest trace of the first stream to save a second streams[0].pop() streams_bkp = deepcopy(streams) # different sets of filters to run test on: filters = [['bandpass', { 'freqmin': 1., 'freqmax': 20. }], ['bandstop', { 'freqmin': 5, 'freqmax': 15., 'corners': 6 }], ['lowpass', { 'freq': 30.5, 'zerophase': True }], ['highpass', { 'freq': 2, 'corners': 2 }]] filter_map = { 'bandpass': bandpass, 'bandstop': bandstop, 'lowpass': lowpass, 'highpass': highpass } # tests for in_place=True for j, st in enumerate(streams): st_bkp = streams_bkp[j] for filt_type, filt_ops in filters: st = deepcopy(streams_bkp[j]) st.filter(filt_type, **filt_ops) # test if all traces were filtered as expected for i, tr in enumerate(st): data_filt = filter_map[filt_type]( st_bkp[i].data, df=st_bkp[i].stats.sampling_rate, **filt_ops) np.testing.assert_array_equal(tr.data, data_filt) self.assertTrue('processing' in tr.stats) self.assertEqual(len(tr.stats.processing), 1) self.assertEqual(tr.stats.processing[0], "filter:%s:%s" % (filt_type, filt_ops)) st.filter(filt_type, **filt_ops) for i, tr in enumerate(st): self.assertTrue('processing' in tr.stats) self.assertEqual(len(tr.stats.processing), 2) for proc_info in tr.stats.processing: self.assertEqual( proc_info, "filter:%s:%s" % (filt_type, filt_ops)) # some tests that should raise an Exception st = streams[0] st_bkp = streams_bkp[0] bad_filters = [['bandpass', { 'freqmin': 1., 'XXX': 20. }], ['bandstop', [1, 2, 3, 4, 5]], ['bandstop', None], ['bandstop', 3], ['bandstop', 'XXX']] for filt_type, filt_ops in bad_filters: self.assertRaises(TypeError, st.filter, filt_type, filt_ops) bad_filters = [['bandpass', { 'freqmin': 1., 'XXX': 20. }], ['bandstop', { 'freqmin': 5, 'freqmax': "XXX", 'corners': 6 }], ['bandstop', {}], ['bandpass', { 'freqmin': 5, 'corners': 6 }], ['bandpass', { 'freqmin': 5, 'freqmax': 20., 'df': 100. }]] for filt_type, filt_ops in bad_filters: self.assertRaises(TypeError, st.filter, filt_type, **filt_ops) bad_filters = [['XXX', {'freqmin': 5, 'freqmax': 20., 'corners': 6}]] for filt_type, filt_ops in bad_filters: self.assertRaises(ValueError, st.filter, filt_type, **filt_ops) # test if stream is unchanged after all these bad tests for i, tr in enumerate(st): np.testing.assert_array_equal(tr.data, st_bkp[i].data) self.assertEqual(tr.stats, st_bkp[i].stats)
def get_value(self): station_id, coordinates = self.items[self.current_index] data = Stream() # Now get the actual waveform files. Also find the # corresponding station file and check the coordinates. this_waveforms = { _i["channel_id"]: _i for _i in waveforms if _i["channel_id"].startswith(station_id + ".") } marked_for_deletion = [] for key, value in this_waveforms.iteritems(): value["trace"] = read(value["filename"])[0] data += value["trace"] value["station_file"] = \ station_cache.get_station_filename( value["channel_id"], UTCDateTime(value["starttime_timestamp"])) if value["station_file"] is None: marked_for_deletion.append(key) msg = ("Warning: Data and station information for '%s'" " is available, but the station information " "only for the wrong timestamp. You should try " "and retrieve the correct station file.") warnings.warn(msg % value["channel_id"]) continue data[-1].stats.station_file = value["station_file"] for key in marked_for_deletion: del this_waveforms[key] if not this_waveforms: msg = "Could not retrieve data for station '%s'." % \ station_id warnings.warn(msg) return None # Now attempt to get the synthetics. synthetics_filenames = [] for name, path in synthetic_files.iteritems(): if (station_id + ".") in name: synthetics_filenames.append(path) if len(synthetics_filenames) != 3: msg = "Found %i not 3 synthetics for station '%s'." % ( len(synthetics_filenames), station_id) warnings.warn(msg) return None synthetics = Stream() # Read all synthetics. for filename in synthetics_filenames: synthetics += read(filename) for synth in synthetics: if synth.stats.channel in ["X", "Z"]: synth.data *= -1.0 synth.stats.channel = SYNTH_MAPPING[synth.stats.channel] synth.stats.starttime = event_info["origin_time"] # Process the data. len_synth = synthetics[0].stats.endtime - \ synthetics[0].stats.starttime data.trim(synthetics[0].stats.starttime - len_synth * 0.05, synthetics[0].stats.endtime + len_synth * 0.05) if data: max_length = max([tr.stats.npts for tr in data]) else: max_length = 0 if max_length == 0: msg = ( "Warning: After trimming the waveform data to " "the time window of the synthetics, no more data is " "left. The reference time is the one given in the " "QuakeML file. Make sure it is correct and that " "the waveform data actually contains data in that " "time span.") warnings.warn(msg) data.detrend("linear") data.taper() new_time_array = np.linspace( synthetics[0].stats.starttime.timestamp, synthetics[0].stats.endtime.timestamp, synthetics[0].stats.npts) # Simulate the traces. for trace in data: # Decimate in case there is a large difference between # synthetic sampling rate and sampling_rate of the data. # XXX: Ugly filter, change! if trace.stats.sampling_rate > (6 * synth.stats.sampling_rate): new_nyquist = trace.stats.sampling_rate / 2.0 / 5.0 trace.filter("lowpass", freq=new_nyquist, corners=4, zerophase=True) trace.decimate(factor=5, no_filter=None) station_file = trace.stats.station_file if "/SEED/" in station_file: paz = Parser(station_file).getPAZ( trace.id, trace.stats.starttime) trace.simulate(paz_remove=paz) elif "/RESP/" in station_file: trace.simulate( seedresp={ "filename": station_file, "units": "VEL", "date": trace.stats.starttime }) else: raise NotImplementedError # Make sure that the data array is at least as long as the # synthetics array. Also add some buffer sample for the # spline interpolation to work in any case. buf = synth.stats.delta * 5 if synth.stats.starttime < (trace.stats.starttime + buf): trace.trim(starttime=synth.stats.starttime - buf, pad=True, fill_value=0.0) if synth.stats.endtime > (trace.stats.endtime - buf): trace.trim(endtime=synth.stats.endtime + buf, pad=True, fill_value=0.0) old_time_array = np.linspace( trace.stats.starttime.timestamp, trace.stats.endtime.timestamp, trace.stats.npts) # Interpolation. trace.data = interp1d(old_time_array, trace.data, kind=1)(new_time_array) trace.stats.starttime = synthetics[0].stats.starttime trace.stats.sampling_rate = \ synthetics[0].stats.sampling_rate data.filter("bandpass", freqmin=lowpass, freqmax=highpass) synthetics.filter("bandpass", freqmin=lowpass, freqmax=highpass) # Rotate the synthetics if nessesary. if self.rot_angle: # First rotate the station back to see, where it was # recorded. lat, lng = rotations.rotate_lat_lon( coordinates["latitude"], coordinates["longitude"], self.rot_axis, -self.rot_angle) # Rotate the data. n_trace = synthetics.select(component="N")[0] e_trace = synthetics.select(component="E")[0] z_trace = synthetics.select(component="Z")[0] n, e, z = rotations.rotate_data(n_trace.data, e_trace.data, z_trace.data, lat, lng, self.rot_axis, self.rot_angle) n_trace.data = n e_trace.data = e z_trace.data = z return { "data": data, "synthetics": synthetics, "coordinates": coordinates }
def readQ(filename, headonly=False, data_directory=None, byteorder='=', **kwargs): # @UnusedVariable """ Reads a Seismic Handler Q file and returns an ObsPy Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: Q header file to be read. Must have a `QHD` file extension. :type headonly: bool, optional :param headonly: If set to True, read only the head. This is most useful for scanning available data in huge (temporary) data sets. :type data_directory: str, optional :param data_directory: Data directory where the corresponding QBN file can be found. :type byteorder: str, optional :param byteorder: Enforce byte order for data file. This is important for Q files written in older versions of Seismic Handler, which don't explicit state the `BYTEORDER` flag within the header file. Can be little endian (``'<'``), big endian (``'>'``), or native byte order (``'='``). Defaults to ``'='``. :rtype: :class:`~obspy.core.stream.Stream` :return: A ObsPy Stream object. Q files consists of two files per data set: * a ASCII header file with file extension `QHD` and the * binary data file with file extension `QBN`. The read method only accepts header files for the ``filename`` parameter. ObsPy assumes that the corresponding data file is within the same directory if the ``data_directory`` parameter is not set. Otherwise it will search in the given ``data_directory`` for a file with the `QBN` file extension. This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. .. rubric:: Example >>> from obspy import read >>> st = read("/path/to/QFILE-TEST.QHD") >>> st #doctest: +ELLIPSIS <obspy.core.stream.Stream object at 0x...> >>> print(st) # doctest: +ELLIPSIS 3 Trace(s) in Stream: .TEST..BHN | 2009-10-01T12:46:01.000000Z - ... | 20.0 Hz, 801 samples .TEST..BHE | 2009-10-01T12:46:01.000000Z - ... | 20.0 Hz, 801 samples .WET..HHZ | 2010-01-01T01:01:05.999000Z - ... | 100.0 Hz, 4001 samples """ if not headonly: if not data_directory: data_file = os.path.splitext(filename)[0] + '.QBN' else: data_file = os.path.basename(os.path.splitext(filename)[0]) data_file = os.path.join(data_directory, data_file + '.QBN') if not os.path.isfile(data_file): msg = "Can't find corresponding QBN file at %s." raise IOError(msg % data_file) fh_data = open(data_file, 'rb') # loop through read header file fh = open(filename, 'rt') line = fh.readline() cmtlines = int(line[5:7]) - 1 # comment lines comments = [] for _i in range(0, cmtlines): comments += [fh.readline()] # trace lines traces = {} i = -1 id = '' for line in fh: cid = int(line[0:2]) if cid != id: id = cid i += 1 traces.setdefault(i, '') traces[i] += line[3:].strip() # create stream object stream = Stream() for id in sorted(traces.keys()): # fetch headers header = {} header['sh'] = { "FROMQ": True, "FILE": os.path.splitext(os.path.split(filename)[1])[0], } channel = ['', '', ''] npts = 0 for item in traces[id].split('~'): key = item.strip()[0:4] value = item.strip()[5:].strip() if key == 'L001': npts = header['npts'] = int(value) elif key == 'L000': continue elif key == 'R000': header['delta'] = float(value) elif key == 'R026': header['calib'] = float(value) elif key == 'S001': header['station'] = value elif key == 'C000' and value: channel[2] = value[0] elif key == 'C001' and value: channel[0] = value[0] elif key == 'C002' and value: channel[1] = value[0] elif key == 'C003': if value == '<' or value == '>': byteorder = header['sh']['BYTEORDER'] = value elif key == 'S021': # 01-JAN-2009_01:01:01.0 # 1-OCT-2009_12:46:01.000 header['starttime'] = toUTCDateTime(value) elif key == 'S022': header['sh']['P-ONSET'] = toUTCDateTime(value) elif key == 'S023': header['sh']['S-ONSET'] = toUTCDateTime(value) elif key == 'S024': header['sh']['ORIGIN'] = toUTCDateTime(value) elif key: key = INVERTED_SH_IDX.get(key, key) if key in SH_KEYS_INT: header['sh'][key] = int(value) elif key in SH_KEYS_FLOAT: header['sh'][key] = float(value) else: header['sh'][key] = value # set channel code header['channel'] = ''.join(channel) # remember record number header['sh']['RECNO'] = len(stream) + 1 if headonly: # skip data stream.append(Trace(header=header)) else: if not npts: stream.append(Trace(header=header)) continue # read data data = fh_data.read(npts * 4) dtype = native_str(byteorder + 'f4') data = np.fromstring(data, dtype=dtype) # convert to system byte order data = np.require(data, native_str('=f4')) stream.append(Trace(data=data, header=header)) if not headonly: fh_data.close() fh.close() return stream
def readASC(filename, headonly=False, skip=0, delta=None, length=None, **kwargs): # @UnusedVariable """ Reads a Seismic Handler ASCII file and returns an ObsPy Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: ASCII file to be read. :type headonly: bool, optional :param headonly: If set to True, read only the head. This is most useful for scanning available data in huge (temporary) data sets. :type skip: int, optional :param skip: Number of lines to be skipped from top of file. If defined only one trace is read from file. :type delta: float, optional :param delta: If ``skip`` is used, ``delta`` defines sample offset in seconds. :type length: int, optional :param length: If ``skip`` is used, ``length`` defines the number of values to be read. :rtype: :class:`~obspy.core.stream.Stream` :return: A ObsPy Stream object. .. rubric:: Example >>> from obspy import read >>> st = read("/path/to/QFILE-TEST-ASC.ASC") >>> st # doctest: +ELLIPSIS <obspy.core.stream.Stream object at 0x...> >>> print(st) # doctest: +ELLIPSIS 3 Trace(s) in Stream: .TEST..BHN | 2009-10-01T12:46:01.000000Z - ... | 20.0 Hz, 801 samples .TEST..BHE | 2009-10-01T12:46:01.000000Z - ... | 20.0 Hz, 801 samples .WET..HHZ | 2010-01-01T01:01:05.999000Z - ... | 100.0 Hz, 4001 samples """ fh = open(filename, 'rt') # read file and split text into channels channels = [] headers = {} data = io.StringIO() for line in fh.readlines()[skip:]: if line.isspace(): # blank line # check if any data fetched yet if len(headers) == 0 and data.tell() == 0: continue # append current channel data.seek(0) channels.append((headers, data)) # create new channel headers = {} data = io.StringIO() if skip: # if skip is set only one trace is read, everything else makes # no sense. break continue elif line[0].isalpha(): # header entry key, value = line.split(':', 1) key = key.strip() value = value.strip() headers[key] = value elif not headonly: # data entry - may be written in multiple columns data.write(line.strip() + ' ') fh.close() # create ObsPy stream object stream = Stream() # custom header custom_header = {} if delta: custom_header["delta"] = delta if length: custom_header["npts"] = length for headers, data in channels: # create Stats header = Stats(custom_header) header['sh'] = {} channel = [' ', ' ', ' '] # generate headers for key, value in headers.items(): if key == 'DELTA': header['delta'] = float(value) elif key == 'LENGTH': header['npts'] = int(value) elif key == 'CALIB': header['calib'] = float(value) elif key == 'STATION': header['station'] = value elif key == 'COMP': channel[2] = value[0] elif key == 'CHAN1': channel[0] = value[0] elif key == 'CHAN2': channel[1] = value[0] elif key == 'START': # 01-JAN-2009_01:01:01.0 # 1-OCT-2009_12:46:01.000 header['starttime'] = toUTCDateTime(value) else: # everything else gets stored into sh entry if key in SH_KEYS_INT: header['sh'][key] = int(value) elif key in SH_KEYS_FLOAT: header['sh'][key] = float(value) else: header['sh'][key] = value # set channel code header['channel'] = ''.join(channel) if headonly: # skip data stream.append(Trace(header=header)) else: # read data data = loadtxt(data, dtype=np.float32, ndmin=1) # cut data if requested if skip and length: data = data[:length] # use correct value in any case header["npts"] = len(data) stream.append(Trace(data=data, header=header)) return stream
def multi_event_singlechan(streams, catalog, clip=10.0, pre_pick=2.0, freqmin=False, freqmax=False, realign=False, cut=(-3.0, 5.0), PWS=False, title=False): r"""Function to plot data from a single channel at a single station for \ multiple events - data will be alligned by their pick-time given in the \ picks. :type streams: list of :class:obspy.stream :param streams: List of the streams to use, can contain more traces than \ you plan on plotting :type catalog: obspy.core.event.Catalog :param catalog: Catalog of events, one for each trace, with a single pick :type clip: float :param clip: Length in seconds to plot, defaults to 10.0 :type pre_pick: float :param pre_pick: Length in seconds to extract and plot before the pick, \ defaults to 2.0 :type freqmin: float :param freqmin: Low cut for bandpass in Hz :type freqmax: float :param freqmax: High cut for bandpass in Hz :type realign: bool :param realign: To compute best alignement based on correlation or not. :type cut: tuple :param cut: tuple of start and end times for cut in seconds from the pick :type PWS: bool :param PWS: compute Phase Weighted Stack, if False, will compute linear \ stack. :type title: str :param title: Plot title. :returns: Alligned and cut traces, and new picks """ from eqcorrscan.utils import stacking import copy from eqcorrscan.core.match_filter import normxcorr2 from obspy import Stream import warnings fig, axes = plt.subplots(len(catalog) + 1, 1, sharex=True, figsize=(7, 12)) axes = axes.ravel() traces = [] al_traces = [] # Keep input safe clist = copy.deepcopy(catalog) st_list = copy.deepcopy(streams) for i, event in enumerate(clist): if st_list[i].select(station=event.picks[0].waveform_id.station_code, channel='*' + event.picks[0].waveform_id.channel_code[-1]): tr = st_list[i].select( station=event.picks[0].waveforms_id.station_code, channel='*' + event.picks[0].waveform_id.channel_code[-1])[0] else: print('No data for ' + event.pick[0].waveform_id) continue tr.detrend('linear') if freqmin: tr.filter('bandpass', freqmin=freqmin, freqmax=freqmax) if realign: tr_cut = tr.copy() tr_cut.trim(event.picks[0].time + cut[0], event.picks[0].time + cut[1], nearest_sample=False) if len(tr_cut.data) <= (0.5 * (cut[1] - cut[0]) * tr_cut.stats.sampling_rate): msg = ''.join([ 'Not enough in the trace for ', tr.stats.station, '.', tr.stats.channel, '\n', 'Suggest removing pick from sfile at time ', str(event.picks[0].time) ]) warnings.warn(msg) else: al_traces.append(tr_cut) else: tr.trim(event.picks[0].time - pre_pick, event.picks[0].time + clip - pre_pick, nearest_sample=False) if len(tr.data) == 0: msg = ''.join([ 'No data in the trace for ', tr.stats.station, '.', tr.stats.channel, '\n', 'Suggest removing pick from sfile at time ', str(event.picks[0].time) ]) warnings.warn(msg) continue traces.append(tr) if realign: shift_len = int(0.25 * (cut[1] - cut[0]) * al_traces[0].stats.sampling_rate) shifts = stacking.align_traces(al_traces, shift_len) for i in xrange(len(shifts)): print('Shifting by ' + str(shifts[i]) + ' seconds') event.picks[0].time -= shifts[i] traces[i].trim(event.picks[0].time - pre_pick, event.picks[0].time + clip - pre_pick, nearest_sample=False) # We now have a list of traces traces = [(trace, trace.stats.starttime.datetime) for trace in traces] traces.sort(key=lambda tup: tup[1]) traces = [trace[0] for trace in traces] # Plot the traces for i, tr in enumerate(traces): y = tr.data x = np.arange(len(y)) x = x / tr.stats.sampling_rate # convert to seconds axes[i + 1].plot(x, y, 'k', linewidth=1.1) axes[i + 1].yaxis.set_ticks([]) traces = [Stream(trace) for trace in traces] if PWS: linstack = stacking.PWS_stack(traces) else: linstack = stacking.linstack(traces) tr = linstack.select(station=event[0].picks[0].waveform_id.station_code, channel='*' + event[0].picks[0].waveform_id.channel_code[-1])[0] y = tr.data x = np.arange(len(y)) x = x / tr.stats.sampling_rate axes[0].plot(x, y, 'r', linewidth=2.0) axes[0].set_ylabel('Stack', rotation=0) axes[0].yaxis.set_ticks([]) for i, slave in enumerate(traces): cc = normxcorr2(tr.data, slave[0].data) axes[i + 1].set_ylabel('cc=' + str(round(np.max(cc), 2)), rotation=0) axes[i + 1].text(0.9, 0.15, str(round(np.max(slave[0].data))), bbox=dict(facecolor='white', alpha=0.95), transform=axes[i + 1].transAxes) axes[i + 1].text( 0.7, 0.85, slave[0].stats.starttime.datetime.strftime('%Y/%m/%d %H:%M:%S'), bbox=dict(facecolor='white', alpha=0.95), transform=axes[i + 1].transAxes) axes[-1].set_xlabel('Time (s)') if title: axes[0].set_title(title) plt.subplots_adjust(hspace=0) plt.show() return traces, clist
def get_N(select_event=0): from obspy.clients.fdsn import Client import matplotlib.pyplot as plt from obspy import read_events, read from obspy import read_inventory from obspy import Stream, Trace from obspy import UTCDateTime import os client = Client('SCEDC') os.environ['PATH'] += os.pathsep + '/usr/local/bin' os.chdir('/Users/vidale/Documents/PyCode/LAB/Spare') # select_event = 12 chan_type = 'EHN,HHN,HNN,HLN,BHN' # e.g., BHN network_sel = 'CI,CE,NP' # CE has four traces, but won't deconvolve #network_sel = 'CI,NP' min_lat = 33.75 max_lat = 34.2 min_lon = -118.5 max_lon = -117.75 start_buff = 50 end_buff = 300 st = Stream() if select_event > 15: fname_inv = 'LAB.QUAKEML2' LAB = read_events(fname_inv, format='QUAKEML') if select_event == 16: t = LAB[1].origins[0].time elif select_event == 17: t = LAB[5].origins[0].time elif select_event == 18: t = LAB[14].origins[0].time elif select_event == 19: t = LAB[13].origins[0].time else: fname_inv = 'LAB.QUAKEML' LAB = read_events(fname_inv, format='QUAKEML') t = LAB[select_event].origins[0].time #print('event:',LAB) #plt.style.use('ggplot') #plt.rcParams['figure.figsize'] = 12, 8 #LAB.plot(projection = 'local', resolution = 'h') #%% Make inventory of all stations in box recording this channel t = LAB[select_event].origins[0].time print(str(t)) s_t = t - start_buff e_t = t + end_buff inventory = client.get_stations(starttime = s_t, endtime = e_t, channel=chan_type, level='response', network=network_sel, minlatitude = min_lat, maxlatitude = max_lat, minlongitude = min_lon, maxlongitude = max_lon) #print(inventory) print('inventory has ' + str(len(inventory)) + ' networks recording data') #for network in inventory: # sta_cnt = 0 # for station in network: # sta_cnt += sta_cnt # print('Network ' + str(network) + ' has ' + str(sta_cnt) + ' stations to try') #inventory.plot(projection = 'local', resolution = 'h') # not working #%% Check inventory of stations for traces at time of event cnt_try = 0 cnt_got = 0 for network in inventory: for station in network: if cnt_try % 20 == 0: print('Try ' + str(cnt_try) + ' got ' + str(cnt_got) + ' sgrams ' + str(len(st))) cnt_try += +1 try: st += client.get_waveforms(network.code, station.code, location='*',channel=chan_type, starttime=s_t, endtime = e_t, attach_response=True) cnt_got += 1 except: pass print(str(cnt_try) + ' stations examined, ' + str(cnt_got) + ' have data, ' + str(len(st)) + ' traces extracted ') fname = 'event' + str(select_event) + '/event' + str(select_event) + 'N_all.mseed' st.write(fname,format = 'MSEED') #st=read(fname) for tr in st: print('Station ' + tr.stats.station + ' channel ' + tr.stats.channel) tr = Trace() hnn_chosen = 0 ehn_chosen = 0 hhn_chosen = 0 hln_chosen = 0 bhn_chosen = 0 for tr in st: if tr.stats.channel == 'HNN': hnn_chosen += 1 if tr.stats.channel == 'EHN': ehn_chosen += 1 if tr.stats.channel == 'HHN': hhn_chosen += 1 if tr.stats.channel == 'HLN': hln_chosen += 1 if tr.stats.channel == 'BHN': bhn_chosen += 1 print('Total channels ' + str(len(st)) + ' - HNN, EHN, HHN, HLN, BHN have ' + str(hnn_chosen) + ' ' + str(ehn_chosen) + ' ' + str(hhn_chosen) + ' ' + str(hln_chosen) + ' ' + str(bhn_chosen)) for tr in st: if (tr.stats.network != 'CE') and (tr.stats.station != 'BVH') and (tr.stats.station != 'LAX'): # if tr.stats.network != 'CE': tr.remove_response(water_level=40, inventory=inventory, output='ACC') fname = 'event' + str(select_event) + '/event' + str(select_event) + 'N_decon.mseed' st.write(fname,format = 'MSEED') ''' st=read('event14N_decon.mseed') ''' tr2 = Trace() st_chosen = Stream() hhn_chosen = 0 ehn_chosen = 0 hnn_chosen = 0 hln_chosen = 0 bhn_chosen = 0 for tr in st: if tr.stats.channel == 'HNN': st_chosen += tr hnn_chosen += 1 elif tr.stats.channel == 'EHN': # write EHN if present and BHN is not present skip = 0 for tr2 in st: if tr2.stats.channel == 'HNN' and tr2.stats.station == tr.stats.station: skip = 1 if skip == 0: st_chosen += tr ehn_chosen += 1 elif tr.stats.channel == 'HHN': skip = 0 for tr2 in st: if tr2.stats.channel == 'HNN' and tr2.stats.station == tr.stats.station: skip = 1 if tr2.stats.channel == 'EHN' and tr2.stats.station == tr.stats.station: skip = 1 if skip == 0: st_chosen += tr hhn_chosen += 1 elif tr.stats.channel == 'HLN': skip = 0 for tr2 in st: if tr2.stats.channel == 'HNN' and tr2.stats.station == tr.stats.station: skip = 1 if tr2.stats.channel == 'EHN' and tr2.stats.station == tr.stats.station: skip = 1 if tr2.stats.channel == 'HHN' and tr2.stats.station == tr.stats.station: skip = 1 if skip == 0: st_chosen += tr hln_chosen += 1 elif tr.stats.channel == 'BHN': skip = 0 for tr2 in st: if tr2.stats.channel == 'HNN' and tr2.stats.station == tr.stats.station: skip = 1 if tr2.stats.channel == 'EHN' and tr2.stats.station == tr.stats.station: skip = 1 if tr2.stats.channel == 'HHN' and tr2.stats.station == tr.stats.station: skip = 1 if tr2.stats.channel == 'HLN' and tr2.stats.station == tr.stats.station: skip = 1 if skip == 0: st_chosen += tr bhn_chosen += 1 for tr in st_chosen: print(tr.stats.station + ' ' + tr.stats.channel) print('Chosen - HNN, EHN, HHN, HLN, BHN have ' + str(hnn_chosen) + ' ' + str(ehn_chosen) + ' ' + str(hhn_chosen) + ' ' + str(hln_chosen) + ' ' + str(bhn_chosen)) print(str(len(st_chosen)) + ' traces in dataset') fname = 'event' + str(select_event) + '/event' + str(select_event) + 'N_chosen.mseed' st_chosen.write(fname,format = 'MSEED')
def initial_data_gather(code,event_id,bounds,output="VEL", rotate=False,plotmap=False): """gather event information, observation and synthetic traces, preprocess all traces accordingly and return one stream object with 6 traces """ # station information net,sta,loc,cha = code.split('.') # filter bounds tmin,tmax = bounds # grab synthetic data locally, decide syntheticdata_path = join(pathnames()['syns'],event_id,'') syntheticdata = Stream() for c in ["N","E","Z"]: syntheticdata_filename = "{n}.{s}.BX{co}.semv.mseed".format(n=net, s=sta, co=c) syntheticdata += read(join(syntheticdata_path,syntheticdata_filename)) # grab observation data observationdata,inv,cat = getdata.event_stream(code=code, event_id=event_id, startpad=0, endpad=350) if not observationdata: return None # plot event and station on a map if plotmap: plotmod.plot_event_station(inv,cat) # preprocessing, instrument response, STF convolution (synthetics) observationdata_proc = procmod.preprocess(observationdata, inv=inv, output=output) # synthetic timing information time_shift, half_duration = synmod.tshift_halfdur(event_id) # if GCMT solution doesn't exist, timeshift isn't possible if time_shift: syntheticdata_preproc = synmod.stf_convolve(st=syntheticdata, half_duration=half_duration, time_shift=time_shift) syntheticdata_proc = procmod.preprocess(syntheticdata_preproc) else: print('') syntheticdata_proc = procmod.preprocess(syntheticdata) # velocity to displacement if necessary if output == "DISP": syntheticdata_proc.differentiate() # rotate to theoretical backazimuth if rotate: BAz = find_BAz(inv,cat) observationdata_proc.rotate(method='NE->RT',back_azimuth=BAz) syntheticdata_proc.rotate(method='NE->RT',back_azimuth=BAz) # combine, common sampling rate, filter, trim common time st_IDG = observationdata_proc + syntheticdata_proc procmod.trimstreams(st_IDG) st_IDG.filter('bandpass',freqmin=1/tmax, freqmax=1/tmin, corners=2, zerophase=True) return st_IDG
def _read_datamark(filename, century="20", **kwargs): # @UnusedVariable """ Reads a DATAMARK file and returns a Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: DATAMARK file to be read. :param century: DATAMARK stores year as 2 numbers, need century to construct proper datetime. :rtype: :class:`~obspy.core.stream.Stream` :returns: Stream object containing header and data. """ output = {} srates = {} # read datamark file with open(filename, "rb") as fpin: fpin.seek(0, 2) sz = fpin.tell() fpin.seek(0) leng = 0 status0 = 0 start = 0 while leng < sz: pklen = fpin.read(4) if len(pklen) < 4: break leng = 4 truelen = np.fromstring(pklen, native_str('>i'))[0] if truelen == 0: break buff = fpin.read(6) leng += 6 yy = "%s%02x" % (century, ord(buff[0:1])) mm = "%x" % ord(buff[1:2]) dd = "%x" % ord(buff[2:3]) hh = "%x" % ord(buff[3:4]) mi = "%x" % ord(buff[4:5]) sec = "%x" % ord(buff[5:6]) date = UTCDateTime(int(yy), int(mm), int(dd), int(hh), int(mi), int(sec)) if start == 0: start = date if status0 == 0: sdata = None while leng < truelen: buff = fpin.read(4) leng += 4 flag = '%02x' % ord(buff[0:1]) chanum = '%02x' % ord(buff[1:2]) chanum = "%02s%02s" % (flag, chanum) datawide = int('%x' % (ord(buff[2:3]) >> 4)) srate = ord(buff[3:4]) xlen = (srate - 1) * datawide if datawide == 0: xlen = srate // 2 datawide = 0.5 idata00 = fpin.read(4) leng += 4 idata22 = np.fromstring(idata00, native_str('>i'))[0] if chanum in output: output[chanum].append(idata22) else: output[chanum] = [ idata22, ] srates[chanum] = srate sdata = fpin.read(xlen) leng += xlen if len(sdata) < xlen: fpin.seek(-(xlen - len(sdata)), 1) sdata += fpin.read(xlen - len(sdata)) msg = "This shouldn't happen, it's weird..." warnings.warn(msg) if datawide == 0.5: for i in range(xlen): idata2 = output[chanum][-1] + \ np.fromstring(sdata[i:i + 1], np.int8)[0] >> 4 output[chanum].append(idata2) idata2 = idata2 +\ (np.fromstring(sdata[i:i + 1], np.int8)[0] << 4) >> 4 output[chanum].append(idata2) elif datawide == 1: for i in range((xlen // datawide)): idata2 = output[chanum][-1] +\ np.fromstring(sdata[i:i + 1], np.int8)[0] output[chanum].append(idata2) elif datawide == 2: for i in range((xlen // datawide)): idata2 = output[chanum][-1] +\ np.fromstring(sdata[2 * i:2 * (i + 1)], native_str('>h'))[0] output[chanum].append(idata2) elif datawide == 3: for i in range((xlen // datawide)): idata2 = output[chanum][-1] +\ np.fromstring(sdata[3 * i:3 * (i + 1)] + b' ', native_str('>i'))[0] >> 8 output[chanum].append(idata2) elif datawide == 4: for i in range((xlen // datawide)): idata2 = output[chanum][-1] +\ np.fromstring(sdata[4 * i:4 * (i + 1)], native_str('>i'))[0] output[chanum].append(idata2) else: msg = "DATAWIDE is %s " % datawide + \ "but only values of 0.5, 1, 2, 3 or 4 are supported." raise NotImplementedError(msg) traces = [] for i in output.keys(): t = Trace(data=np.array(output[i])) t.stats.channel = str(i) t.stats.sampling_rate = float(srates[i]) t.stats.starttime = start traces.append(t) return Stream(traces=traces)
import matplotlib.pyplot as plt import numpy as np import os import sys import time import pyasdf from dug_seis.processing.dug_trigger import dug_trigger #from dug_seis.processing.event_processing import event_processing import re asdf_folder = '/Users/rinaldia/Documents/DUG-Seis_Output/raw' # 'raw' # Location of .h5 file files = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')]) # generates a list of stations = [i - 1 for i in [19, 20, 21, 22, 23, 24, 25, 26]] sta = Stream() for f in files: ds = pyasdf.ASDFDataSet(asdf_folder + '/' + f, mode='r') wf_list = ds.waveforms.list() for k in stations: sta += ds.waveforms[wf_list[k]].raw_recording for i in range(len(sta.traces)): sta.traces[i].stats.delta = 5.e-6 sta.merge() dt = UTCDateTime("2019-06-12T14:13:00.00") dt2 = UTCDateTime("2019-06-12T14:13:10.00")
def readSEISAN(filename, headonly=False, **kwargs): # @UnusedVariable """ Reads a SEISAN file and returns an ObsPy Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: SEISAN file to be read. :rtype: :class:`~obspy.core.stream.Stream` :return: A ObsPy Stream object. .. rubric:: Example >>> from obspy import read >>> st = read("/path/to/2001-01-13-1742-24S.KONO__004") >>> st # doctest: +ELLIPSIS <obspy.core.stream.Stream object at 0x...> >>> print(st) # doctest: +ELLIPSIS 4 Trace(s) in Stream: .KONO.0.B0Z | 2001-01-13T17:45:01.999000Z - ... | 20.0 Hz, 6000 samples .KONO.0.L0Z | 2001-01-13T17:42:24.924000Z - ... | 1.0 Hz, 3542 samples .KONO.0.L0N | 2001-01-13T17:42:24.924000Z - ... | 1.0 Hz, 3542 samples .KONO.0.L0E | 2001-01-13T17:42:24.924000Z - ... | 1.0 Hz, 3542 samples """ def _readline(fh, length=80): data = fh.read(length + 8) end = length + 4 start = 4 return data[start:end] # read data chunk from given file fh = open(filename, 'rb') data = fh.read(80 * 12) # get version info from file (byteorder, arch, _version) = _getVersion(data) # fetch lines fh.seek(0) # start with event file header # line 1 data = _readline(fh) number_of_channels = int(data[30:33]) # calculate number of lines with channels number_of_lines = number_of_channels // 3 + (number_of_channels % 3 and 1) if number_of_lines < 10: number_of_lines = 10 # line 2 data = _readline(fh) # line 3 for _i in xrange(0, number_of_lines): data = _readline(fh) # now parse each event file channel header + data stream = Stream() dlen = arch / 8 dtype = byteorder + 'i' + str(dlen) stype = '=i' + str(dlen) for _i in xrange(number_of_channels): # get channel header temp = _readline(fh, 1040) # create Stats header = Stats() header['network'] = (temp[16] + temp[19]).strip() header['station'] = temp[0:5].strip() header['location'] = (temp[7] + temp[12]).strip() header['channel'] = (temp[5:7] + temp[8]).strip() header['sampling_rate'] = float(temp[36:43]) header['npts'] = int(temp[43:50]) # create start and end times year = int(temp[9:12]) + 1900 month = int(temp[17:19]) day = int(temp[20:22]) hour = int(temp[23:25]) mins = int(temp[26:28]) secs = float(temp[29:35]) header['starttime'] = UTCDateTime(year, month, day, hour, mins) + secs if headonly: # skip data fh.seek(dlen * (header['npts'] + 2), 1) stream.append(Trace(header=header)) else: # fetch data data = np.fromfile(fh, dtype=dtype, count=header['npts'] + 2) # convert to system byte order data = np.require(data, stype) stream.append(Trace(data=data[2:], header=header)) return stream
def download_data(self, client, stdata=[], ndval=np.nan, new_sr=5., dts=120., returned=False, verbose=False): """ Downloads seismograms based on event origin time and P phase arrival. Parameters ---------- client : :class:`~obspy.client.fdsn.Client` Client object ndval : float Fill in value for missing data new_sr : float New sampling rate (Hz) dts : float Time duration (sec) stdata : List Station list returned : bool Whether or not to return the ``accept`` attribute Returns ------- accept : bool Whether or not the object is accepted for further analysis Attributes ---------- data : :class:`~obspy.core.Stream` Stream containing :class:`~obspy.core.Trace` objects """ if self.meta is None: raise (Exception("Requires event data as attribute - aborting")) if not self.meta.accept: return # Define start time for request tstart = self.meta.time + self.meta.ttime - dts tend = self.meta.time + self.meta.ttime + dts # Get waveforms print("* Requesting Waveforms: ") print("* Startime: " + tstart.strftime("%Y-%m-%d %H:%M:%S")) print("* Endtime: " + tend.strftime("%Y-%m-%d %H:%M:%S")) # Download data err, stream = utils.download_data(client=client, sta=self.sta, start=tstart, end=tend, stdata=stdata, ndval=ndval, new_sr=new_sr, verbose=verbose) # Store as attributes with traces in dictionary try: trE = stream.select(component='E')[0] trN = stream.select(component='N')[0] trZ = stream.select(component='Z')[0] self.data = Stream(traces=[trZ, trN, trE]) # Filter Traces and resample self.data.filter('lowpass', freq=0.5 * new_sr, corners=2, zerophase=True) self.data.resample(new_sr, no_filter=False) # If there is no ZNE, perhaps there is Z12? except: try: tr1 = stream.select(component='1')[0] tr2 = stream.select(component='2')[0] trZ = stream.select(component='Z')[0] self.data = Stream(traces=[trZ, tr1, tr2]) self.dataZ12 = Stream(traces=[trZ, tr1, tr2]) # Rotate from Z12 to ZNE using StDb azcorr attribute self.rotate(align='ZNE') # Filter Traces and resample self.data.filter('lowpass', freq=0.5 * new_sr, corners=2, zerophase=True) self.data.resample(new_sr, no_filter=False) except: self.meta.accept = False if returned: return self.meta.accept
EQNAME = "M3.1 Puerto Rico" START_TIME = UTCDateTime("2020-04-24 20:31:02") DURATION = 50 # duration of record to download in seconds # Filtering parameters F1 = 1.0 # High-pass filter corner F2 = 6.0 # Low-pass filter corner # Seismometers to load (see http://www.fdsn.org/networks/detail/AM/ for a list of all stations) seismometers = [['R4DB9', 18.018, -66.8386], ['RD17E', 18.0811, -67.0314], ['RCCD1', 17.991, -66.6108], ['REA26', 18.4414, -67.1532], ['R2974', 18.4595, -66.3415], ['S4051', 18.3063, -66.0759], ['RA906', 18.4324, -66.0588]] # Load the data waveform = Stream() # set up a blank stream variable for station in seismometers: # Download and filter data st = CLIENT.get_waveforms("AM", station[0], "00", "EHZ", starttime=START_TIME, endtime=START_TIME + DURATION) st.merge(method=0, fill_value='latest') st.detrend(type='demean') st.filter('bandpass', freqmin=F1, freqmax=F2) # Add the coordinates of the stations and distance in metres from the earthquake, needed for the section plot st[0].stats["coordinates"] = {} st[0].stats["coordinates"]["latitude"] = station[1] st[0].stats["coordinates"]["longitude"] = station[2]
def rotate(self, vp=None, vs=None, align=None): """ Rotates 3-component seismograms from vertical (Z), east (E) and north (N) to longitudinal (L), radial (Q) and tangential (T) components of motion. Note that the method 'rotate' from ``obspy.core.stream.Stream`` is used for the rotation ``'ZNE->ZRT'`` and ``'ZNE->LQT'``. Rotation ``'ZNE->PVH'`` is implemented separately here due to different conventions. Parameters ---------- vp : float P-wave velocity at surface (km/s) vs : float S-wave velocity at surface (km/s) align : str Alignment of coordinate system for rotation ('ZRT', 'LQT', or 'PVH') Returns ------- rotated : bool Whether or not the object has been rotated """ if not self.meta.accept: return if self.meta.rotated: print("Data have been rotated already - continuing") return # Use default values from meta data if arguments are not specified if not align: align = self.meta.align if align == 'ZNE': # Rotating from 1,2 to N,E is the negative of # rotation from RT to NE, with # baz corresponding to azim of component 1 from obspy.signal.rotate import rotate_rt_ne # Copy traces trZ = self.data.select(component='Z')[0].copy() trN = self.data.select(component='1')[0].copy() trE = self.data.select(component='2')[0].copy() azim = self.sta.azcorr N, E = rotate_rt_ne(trN.data, trE.data, azim) trN.data = -1. * N trE.data = -1. * E # Update stats of streams trN.stats.channel = trN.stats.channel[:-1] + 'N' trE.stats.channel = trE.stats.channel[:-1] + 'E' self.data = Stream(traces=[trZ, trN, trE]) elif align == 'ZRT': self.data.rotate('NE->RT', back_azimuth=self.meta.baz) self.meta.align = align self.meta.rotated = True elif align == 'LQT': self.data.rotate('ZNE->LQT', back_azimuth=self.meta.baz, inclination=self.meta.inc) for tr in self.data: if tr.stats.channel.endswith('Q'): tr.data = -tr.data self.meta.align = align self.meta.rotated = True elif align == 'PVH': # First rotate to ZRT self.data.rotate('NE->RT', back_azimuth=self.meta.baz) # Copy traces trP = self.data.select(component='Z')[0].copy() trV = self.data.select(component='R')[0].copy() trH = self.data.select(component='T')[0].copy() slow = self.meta.slow if not vp: vp = self.meta.vp if not vs: vs = self.meta.vs # Vertical slownesses # P vertical slowness qp = np.sqrt(1. / vp / vp - slow * slow) # S vertical slowness qs = np.sqrt(1. / vs / vs - slow * slow) # Elements of rotation matrix m11 = slow * vs * vs / vp m12 = -(1. - 2. * vs * vs * slow * slow) / (2. * vp * qp) m21 = (1. - 2. * vs * vs * slow * slow) / (2. * vs * qs) m22 = slow * vs # Rotation matrix rot = np.array([[-m11, m12], [-m21, m22]]) # Vector of Radial and Vertical r_z = np.array([trV.data, trP.data]) # Rotation vec = np.dot(rot, r_z) # Extract P and SV, SH components trP.data = vec[0, :] trV.data = vec[1, :] trH.data = -trH.data / 2. # Update stats of streams trP.stats.channel = trP.stats.channel[:-1] + 'P' trV.stats.channel = trV.stats.channel[:-1] + 'V' trH.stats.channel = trH.stats.channel[:-1] + 'H' # Over-write data attribute self.data = Stream(traces=[trP, trV, trH]) self.meta.align = align self.meta.rotated = True else: raise (Exception("incorrect 'align' argument"))
def find_common_segments(str1, str2, verbose=False): if len(str1) == 0 or len(str2) == 0: msg = 'One or both streams are empty.' raise ValueError(msg) str1new = Stream() str2new = Stream() n1 = 0 n2 = 0 numsamp1 = 0 numsampnew = 0 for i in range(len(str1)): numsamp1 += len(str1[i].data) while n1 < len(str1) and n2 < len(str2): start1 = str1[n1].stats.starttime start2 = str2[n2].stats.starttime end1 = str1[n1].stats.endtime end2 = str2[n2].stats.endtime # Test if segments overlap at all if start1 > end2: n2 += 1 continue if start2 > end1: n1 += 1 continue # Find start and endtime start = max(start1, start2) end = min(end1, end2) str1new += str1[n1].slice(starttime=start, endtime=end) str2new += str2[n2].slice(starttime=start, endtime=end) # increase index if end1 > end2: n2 += 1 elif end1 == end2: n1 += 1 n2 += 1 else: n1 += 1 for i in range(len(str1new)): numsampnew += len(str1new[i].data) if numsampnew > 0: percentkept = numsamp1 / numsampnew * 100 else: percentkept = 0. return (str1new, str2new, percentkept)
def deconvolve(self, phase='P', vp=None, vs=None, align=None, method='wiener', pre_filt=None, gfilt=None, wlevel=0.01): """ Deconvolves three-compoent data using one component as the source wavelet. The source component is always taken as the dominant compressional component, which can be either 'Z', 'L', or 'P'. Parameters ---------- vp : float P-wave velocity at surface (km/s) vs : float S-wave velocity at surface (km/s) align : str Alignment of coordinate system for rotation ('ZRT', 'LQT', or 'PVH') method : str Method for deconvolution. Options are 'wiener' or 'multitaper' gfilt : float Center frequency of Gaussian filter (Hz). wlevel : float Water level used in ``method='water'``. Attributes ---------- rf : :class:`~obspy.core.Stream` Stream containing the receiver function traces """ if not self.meta.accept: return def _npow2(x): return 1 if x == 0 else 2**(x - 1).bit_length() def _pad(array, n): tmp = np.zeros(n) tmp[:array.shape[0]] = array return tmp def _gauss_filt(dt, nft, f0): df = 1. / (nft * dt) nft21 = int(0.5 * nft + 1) f = df * np.arange(nft21) w = 2. * np.pi * f gauss = np.zeros(nft) gauss[:nft21] = np.exp(-0.25 * (w / f0)**2.) / dt gauss[nft21:] = np.flip(gauss[1:nft21 - 1]) return gauss def _decon(parent, daughter1, daughter2, noise, nn, method): # Get length, zero padding parameters and frequencies dt = parent.stats.delta # Wiener or Water level deconvolution if method == 'wiener' or method == 'water': npad = _npow2(nn * 2) freqs = np.fft.fftfreq(npad, d=dt) # Fourier transform Fp = np.fft.fft(parent.data, n=npad) Fd1 = np.fft.fft(daughter1.data, n=npad) Fd2 = np.fft.fft(daughter2.data, n=npad) Fn = np.fft.fft(noise.data, n=npad) # Auto and cross spectra Spp = np.real(Fp * np.conjugate(Fp)) Sd1p = Fd1 * np.conjugate(Fp) Sd2p = Fd2 * np.conjugate(Fp) Snn = np.real(Fn * np.conjugate(Fn)) # Final processing depends on method if method == 'wiener': Sdenom = Spp + Snn elif method == 'water': phi = np.amax(Spp) * wlevel Sdenom = Spp Sdenom[Sdenom < phi] = phi # Multitaper deconvolution elif method == 'multitaper': from spectrum import dpss npad = nn # Re-check length and pad with zeros if necessary if not np.allclose([ tr.stats.npts for tr in [parent, daughter1, daughter2, noise] ], npad): parent.data = _pad(parent.data, npad) daughter1.data = _pad(daughter1.data, npad) daughter2.data = _pad(daughter2.data, npad) noise.data = _pad(noise.data, npad) freqs = np.fft.fftfreq(npad, d=dt) NW = 2.5 Kmax = int(NW * 2 - 2) [tapers, eigenvalues] = dpss(npad, NW, Kmax) # Get multitaper spectrum of data Fp = np.fft.fft(np.multiply(tapers.transpose(), parent.data)) Fd1 = np.fft.fft( np.multiply(tapers.transpose(), daughter1.data)) Fd2 = np.fft.fft( np.multiply(tapers.transpose(), daughter2.data)) Fn = np.fft.fft(np.multiply(tapers.transpose(), noise.data)) # Auto and cross spectra Spp = np.sum(np.real(Fp * np.conjugate(Fp)), axis=0) Sd1p = np.sum(Fd1 * np.conjugate(Fp), axis=0) Sd2p = np.sum(Fd2 * np.conjugate(Fp), axis=0) Snn = np.sum(np.real(Fn * np.conjugate(Fn)), axis=0) # Denominator Sdenom = Spp + Snn else: print("Method not implemented") pass # Apply Gaussian filter? if gfilt: gauss = _gauss_filt(dt, npad, gfilt) gnorm = np.sum(gauss) * (freqs[1] - freqs[0]) * dt else: gauss = np.ones(npad) gnorm = 1. # Copy traces rfp = parent.copy() rfd1 = daughter1.copy() rfd2 = daughter2.copy() # Spectral division and inverse transform rfp.data = np.fft.fftshift( np.real(np.fft.ifft(gauss * Spp / Sdenom)) / gnorm) rfd1.data = np.fft.fftshift( np.real(np.fft.ifft(gauss * Sd1p / Sdenom)) / np.amax(rfp.data) / gnorm) rfd2.data = np.fft.fftshift( np.real(np.fft.ifft(gauss * Sd2p / Sdenom)) / np.amax(rfp.data) / gnorm) return rfp, rfd1, rfd2 if not self.meta.rotated: print("Warning: Data have not been rotated yet - rotating now") self.rotate(vp=vp, vs=vs, align=align) if not self.meta.snr: print("Warning: SNR has not been calculated - " + "calculating now using default") self.calc_snr() if hasattr(self, 'rf'): print("Warning: Data have been deconvolved already - passing") return # Get the name of components (order is critical here) cL = self.meta.align[0] cQ = self.meta.align[1] cT = self.meta.align[2] # Define signal and noise trL = self.data.select(component=cL)[0].copy() trQ = self.data.select(component=cQ)[0].copy() trT = self.data.select(component=cT)[0].copy() trNl = self.data.select(component=cL)[0].copy() trNq = self.data.select(component=cQ)[0].copy() if phase == 'P' or 'PP': # Get signal length (i.e., seismogram to deconvolve) from trace length dts = len(trL.data) * trL.stats.delta / 2. nn = int(round((dts - 5.) * trL.stats.sampling_rate)) + 1 # Crop traces for signal (-5. to dts-10 sec) trL.trim(self.meta.time + self.meta.ttime - 5., self.meta.time + self.meta.ttime + dts - 10., nearest_sample=False, pad=nn, fill_value=0.) trQ.trim(self.meta.time + self.meta.ttime - 5., self.meta.time + self.meta.ttime + dts - 10., nearest_sample=False, pad=nn, fill_value=0.) trT.trim(self.meta.time + self.meta.ttime - 5., self.meta.time + self.meta.ttime + dts - 10., nearest_sample=False, pad=nn, fill_value=0.) # Crop trace for noise (-dts to -5 sec) trNl.trim(self.meta.time + self.meta.ttime - dts, self.meta.time + self.meta.ttime - 5., nearest_sample=False, pad=nn, fill_value=0.) trNq.trim(self.meta.time + self.meta.ttime - dts, self.meta.time + self.meta.ttime - 5., nearest_sample=False, pad=nn, fill_value=0.) elif phase == 'S' or 'SKS': # Get signal length (i.e., seismogram to deconvolve) from trace length dts = len(trL.data) * trL.stats.delta / 2. # Crop traces for signal (-5. to dts-10 sec) trL.trim(self.meta.time + self.meta.ttime + 25. - dts / 2., self.meta.time + self.meta.ttime + 25.) trQ.trim(self.meta.time + self.meta.ttime + 25. - dts / 2., self.meta.time + self.meta.ttime + 25.) trT.trim(self.meta.time + self.meta.ttime + 25. - dts / 2., self.meta.time + self.meta.ttime + 25.) # Crop trace for noise (-dts to -5 sec) trNl.trim(self.meta.time + self.meta.ttime - dts, self.meta.time + self.meta.ttime - dts / 2.) trNq.trim(self.meta.time + self.meta.ttime - dts, self.meta.time + self.meta.ttime - dts / 2.) # Demean, detrend, taper, demean, detrend trL.detrend().taper(max_percentage=0.05, max_length=2.) trQ.detrend().taper(max_percentage=0.05, max_length=2.) trT.detrend().taper(max_percentage=0.05, max_length=2.) trNl.detrend().taper(max_percentage=0.05, max_length=2.) trNq.detrend().taper(max_percentage=0.05, max_length=2.) # This follows the pre-processing in Lim et al., GJI, 2017 if pre_filt: trL.filter('bandpass', freqmin=pre_filt[0], freqmax=pre_filt[1], corners=2, zerophase=True) trQ.filter('bandpass', freqmin=pre_filt[0], freqmax=pre_filt[1], corners=2, zerophase=True) trT.filter('bandpass', freqmin=pre_filt[0], freqmax=pre_filt[1], corners=2, zerophase=True) # Deconvolve if phase == 'P' or 'PP': rfL, rfQ, rfT = _decon(trL, trQ, trT, trNl, nn, method) elif phase == 'S' or 'SKS': rfQ, rfL, rfT = _decon(trQ, trL, trT, trNq, nn, method) # Update stats of streams rfL.stats.channel = 'RF' + self.meta.align[0] rfQ.stats.channel = 'RF' + self.meta.align[1] rfT.stats.channel = 'RF' + self.meta.align[2] self.rf = Stream(traces=[rfL, rfQ, rfT])
ksta = 0 knode = 0 realstr = True starttime = UTCDateTime('20140926') for station in stations: if not 'stream' in locals(): spiked = copy.deepcopy(flat) spiked[offset+(samp_rate*lags[ksta][knode]):\ offset+length+(samp_rate*lags[ksta][knode])]=1 tr = Trace(spiked) tr.stats.station = station tr.stats.channel = 'S1' tr.stats.network = 'SYN' tr.stats.sampling_rate = samp_rate tr.stats.starttime = starttime stream = Stream(tr) else: spiked = copy.deepcopy(flat) spiked[offset+(samp_rate*lags[ksta][knode]):\ offset+length+(samp_rate*lags[ksta][knode])]=1 tr = Trace(spiked) tr.stats.station = station tr.stats.channel = 'S1' tr.stats.network = 'SYN' tr.stats.sampling_rate = samp_rate tr.stats.starttime = starttime stream += tr ksta += 1 if realstr: # stream=obsread('scripts/brightness_test.ms') # stream.detrend('demean')
def pro3pair(eq_file1, eq_file2, stat_corr=1, simple_taper=0, skip_SNR=0, dphase='PKIKP', dphase2='PKiKP', dphase3='PKIKP', dphase4='PKiKP', rel_time=1, start_buff=-200, end_buff=500, plot_scale_fac=0.05, qual_threshold=0, corr_threshold=0.5, freq_min=1, freq_max=3, min_dist=0, max_dist=180, alt_statics=0, statics_file='nothing', ARRAY=0, ref_loc=0): # Parameters # ARRAY 0 is Hinet, 1 is LASA, 2 is NORSAR # start_buff = -50 # plots start Xs after PKIKP # end_buff = 200 # plots end Xs after PKIKP # plot_scale_fac = 0.5 # Bigger numbers make each trace amplitude bigger on plot # stat_corr = 1 # apply station static corrections # qual_threshold = 0.2 # minimum SNR # corr_threshold = 0.7 # minimum correlation in measuring shift to use station in static construction # dphase = 'PKIKP' # phase to be aligned # dphase2 = 'PKiKP' # another phase to have traveltime plotted # dphase3 = 'pPKiKP' # another phase to have traveltime plotted # dphase4 = 'pPKIKP' # another phase to have traveltime plotted #%% Set some parameters verbose = 0 # more output # rel_time = 1 # timing is relative to a chosen phase, otherwise relative to OT taper_frac = .05 #Fraction of window tapered on both ends signal_dur = 5. # signal length used in SNR calculation plot_tt = 1 # plot the traveltimes? do_decimate = 0 # 0 if no decimation desired #ref_loc = 0 # 1 if selecting stations within ref_rad of ref_lat and ref_lon # 0 if selecting stations by distance from earthquake if ref_loc == 1: if ARRAY == 0: ref_lat = 36.3 # °N, around middle of Japan ref_lon = 138.5 # °E ref_rad = 1.5 # ° radius (°) elif ARRAY == 1: ref_lat = 46.7 # °N keep only inner rings A-D ref_lon = -106.22 # °E ref_rad = 0.4 # ° radius (°) if rel_time == 0: # SNR requirement not implemented for unaligned traces qual_threshold = 0 # Plot with reduced velocity? red_plot = 0 red_dist = 55 red_time = 300 red_slow = 7.2 # seconds per degree #%% Import functions from obspy import UTCDateTime from obspy import Stream from obspy import read from obspy.geodetics import gps2dist_azimuth import numpy as np import os from obspy.taup import TauPyModel import matplotlib.pyplot as plt import time model = TauPyModel(model='iasp91') import sys # don't show any warnings import warnings if not sys.warnoptions: warnings.simplefilter("ignore") print('Running pro3a_sort_plot_pair') start_time_wc = time.time() #%% Get saved event info, also used to name files # event 2016-05-28T09:47:00.000 -56.241 -26.935 78 print('Opening ' + eq_file1) if ARRAY == 0: file = open(eq_file1, 'r') elif ARRAY == 1: file = open('EvLocs/' + eq_file1, 'r') lines = file.readlines() split_line = lines[0].split() # ids.append(split_line[0]) ignore label for now t1 = UTCDateTime(split_line[1]) date_label1 = split_line[1][0:10] year1 = split_line[1][0:4] ev_lat1 = float(split_line[2]) ev_lon1 = float(split_line[3]) ev_depth1 = float(split_line[4]) print('1st event: date_label ' + date_label1 + ' time ' + str(t1) + ' lat ' + str(ev_lat1) + ' lon ' + str(ev_lon1) + ' depth ' + str(ev_depth1)) print('Opening ' + eq_file2) if ARRAY == 0: file = open(eq_file2, 'r') elif ARRAY == 1: file = open('EvLocs/' + eq_file2, 'r') lines = file.readlines() split_line = lines[0].split() # ids.append(split_line[0]) ignore label for now t2 = UTCDateTime(split_line[1]) date_label2 = split_line[1][0:10] year2 = split_line[1][0:4] ev_lat2 = float(split_line[2]) ev_lon2 = float(split_line[3]) ev_depth2 = float(split_line[4]) print('2nd event: date_label ' + date_label2 + ' time ' + str(t2) + ' lat ' + str(ev_lat2) + ' lon ' + str(ev_lon2) + ' depth ' + str(ev_depth2)) #%% Get station location file if stat_corr == 1: # load static terms, only applies to Hinet and LASA if ARRAY == 0: if alt_statics == 0: # standard set sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta_statics.txt' else: # custom set made by this event for this event sta_file = ( '/Users/vidale/Documents/GitHub/Array_codes/Files/' + 'HA' + date_label1[:10] + 'pro4_' + dphase + '.statics') elif ARRAY == 1: sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/L_sta_statics.txt' with open(sta_file, 'r') as file: lines = file.readlines() print(str(len(lines)) + ' stations read from ' + sta_file) # Load station coords into arrays station_index = range(len(lines)) st_names = [] st_dist = [] st_lats = [] st_lons = [] st_shift = [] st_corr = [] for ii in station_index: line = lines[ii] split_line = line.split() st_names.append(split_line[0]) st_dist.append(split_line[1]) st_lats.append(split_line[2]) st_lons.append(split_line[3]) st_shift.append(split_line[4]) st_corr.append(split_line[5]) else: # no static terms, always true for LASA or NORSAR if ARRAY == 0: # Hinet set sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta.txt' elif ARRAY == 1: # LASA set sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/LASA_sta.txt' else: # NORSAR set sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/NORSAR_sta.txt' with open(sta_file, 'r') as file: lines = file.readlines() print(str(len(lines)) + ' stations read from ' + sta_file) # Load station coords into arrays station_index = range(len(lines)) st_names = [] st_lats = [] st_lons = [] for ii in station_index: line = lines[ii] split_line = line.split() st_names.append(split_line[0]) st_lats.append(split_line[1]) st_lons.append(split_line[2]) #%% Is taper too long compared to noise estimation window? totalt = end_buff - start_buff noise_time_skipped = taper_frac * totalt if simple_taper == 0: if noise_time_skipped >= 0.5 * (-start_buff): print( 'Specified taper of ' + str(taper_frac * totalt) + ' is not big enough compared to available noise estimation window ' + str(-start_buff - noise_time_skipped) + '. May not work well.') old_taper_frac = taper_frac taper_frac = -0.5 * start_buff / totalt print('Taper reset from ' + str(old_taper_frac * totalt) + ' to ' + str(taper_frac * totalt) + ' seconds.') #%% Load waveforms and decimate to 10 sps st1 = Stream() st2 = Stream() if ARRAY == 0: fname1 = 'HD' + date_label1 + '.mseed' fname2 = 'HD' + date_label2 + '.mseed' elif ARRAY == 1: fname1 = 'Mseed/HD' + date_label1 + '.mseed' fname2 = 'Mseed/HD' + date_label2 + '.mseed' st1 = read(fname1) st2 = read(fname2) if do_decimate != 0: st1.decimate(do_decimate) st2.decimate(do_decimate) print('1st trace has : ' + str(len(st1[0].data)) + ' time pts ') print('st1 has ' + str(len(st1)) + ' traces') print('st2 has ' + str(len(st2)) + ' traces') print('1st trace starts at ' + str(st1[0].stats.starttime) + ', event at ' + str(t1)) print('2nd trace starts at ' + str(st2[0].stats.starttime) + ', event at ' + str(t2)) #%% Select by distance, window and adjust start time to align picked times st_pickalign1 = Stream() st_pickalign2 = Stream() for tr in st1: # traces one by one, find lat-lon by searching entire inventory. Inefficient if float( year1 ) < 1970: # fix the damn 1969 -> 2069 bug in Gibbon's LASA data temp_t = str(tr.stats.starttime) temp_tt = '19' + temp_t[2:] tr.stats.starttime = UTCDateTime(temp_tt) for ii in station_index: if ARRAY == 0: # have to chop off last letter, always 'h' this_name = st_names[ii] this_name_truc = this_name[0:5] name_truc_cap = this_name_truc.upper() elif ARRAY == 1: name_truc_cap = st_names[ii] if (tr.stats.station == name_truc_cap ): # find station in inventory # if (tr.stats.station == st_names[ii]): # find station in inventory if stat_corr != 1 or float( st_corr[ii] ) > corr_threshold: # if using statics, reject low correlations stalat = float(st_lats[ii]) stalon = float( st_lons[ii] ) # look up lat & lon again to find distance if ref_loc == 1: ref_distance = gps2dist_azimuth( stalat, stalon, ref_lat, ref_lon) ref_dist = ref_distance[0] / (1000 * 111) distance = gps2dist_azimuth( stalat, stalon, ev_lat1, ev_lon1) # Get traveltimes again, hard to store tr.stats.distance = distance[0] # distance in km dist = distance[0] / (1000 * 111) if ref_loc != 1 and min_dist < dist and dist < max_dist: # select distance range from earthquake try: # print('Phase ' + dphase + ', depth ' + str(ev_depth1) + ' distance ' + str(dist)) arrivals = model.get_travel_times( source_depth_in_km=ev_depth1, distance_in_degree=dist, phase_list=[dphase]) atime = arrivals[0].time # print(dphase + ' arrival time is ' + str(atime)) if stat_corr == 1: # apply static station corrections tr.stats.starttime -= float(st_shift[ii]) if rel_time == 1: s_t = t1 + atime + start_buff e_t = t1 + atime + end_buff else: s_t = t1 + start_buff e_t = t1 + end_buff tr.trim(starttime=s_t, endtime=e_t) # deduct theoretical traveltime and start_buf from starttime if rel_time == 1: tr.stats.starttime -= atime st_pickalign1 += tr except: pass elif ref_loc == 1: if ref_dist < ref_rad: # alternatively, select based on distance from ref location try: arrivals = model.get_travel_times( source_depth_in_km=ev_depth1, distance_in_degree=dist, phase_list=[dphase]) atime = arrivals[0].time if stat_corr == 1: # apply static station corrections tr.stats.starttime -= float(st_shift[ii]) if rel_time == 1: s_t = t1 + atime + start_buff e_t = t1 + atime + end_buff else: s_t = t1 + start_buff e_t = t1 + end_buff tr.trim(starttime=s_t, endtime=e_t) # deduct theoretical traveltime and start_buf from starttime if rel_time == 1: tr.stats.starttime -= atime st_pickalign1 += tr except: pass # if len(tr.data) == 0: # print('Event 1 - empty window. Trace starts at ' + str(tr.stats.starttime) + ', event at ' + str(t1)) for tr in st2: # traces one by one if float( year2 ) < 1970: # fix the damn 1969 -> 2069 bug in Gibbon's LASA data temp_t = str(tr.stats.starttime) temp_tt = '19' + temp_t[2:] tr.stats.starttime = UTCDateTime(temp_tt) for ii in station_index: if ARRAY == 0: # have to chop off last letter, always 'h' this_name = st_names[ii] this_name_truc = this_name[0:5] name_truc_cap = this_name_truc.upper() elif ARRAY == 1: name_truc_cap = st_names[ii] if (tr.stats.station == name_truc_cap ): # find station in inventory # if (tr.stats.station == st_names[ii]): # find station in inventory if stat_corr != 1 or float( st_corr[ii] ) > corr_threshold: # if using statics, reject low correlations stalat = float(st_lats[ii]) stalon = float(st_lons[ii]) if ref_loc == 1: ref_distance = gps2dist_azimuth( stalat, stalon, ref_lat, ref_lon) ref_dist = ref_distance[0] / (1000 * 111) distance = gps2dist_azimuth( stalat, stalon, ev_lat2, ev_lon2) # Get traveltimes again, hard to store tr.stats.distance = distance[0] # distance in km dist = distance[0] / (1000 * 111) if ref_loc != 1 and min_dist < dist and dist < max_dist: # select distance range from earthquake try: arrivals = model.get_travel_times( source_depth_in_km=ev_depth2, distance_in_degree=dist, phase_list=[dphase]) atime = arrivals[0].time if stat_corr == 1: # apply static station corrections tr.stats.starttime -= float(st_shift[ii]) if rel_time == 1: s_t = t2 + atime + start_buff e_t = t2 + atime + end_buff else: s_t = t2 + start_buff e_t = t2 + end_buff tr.trim(starttime=s_t, endtime=e_t) # deduct theoretical traveltime and start_buf from starttime if rel_time == 1: tr.stats.starttime -= atime st_pickalign2 += tr except: pass elif ref_loc == 1: if ref_dist < ref_rad: # alternatively, select based on distance from ref location try: arrivals = model.get_travel_times( source_depth_in_km=ev_depth2, distance_in_degree=dist, phase_list=[dphase]) atime = arrivals[0].time if stat_corr == 1: # apply static station corrections tr.stats.starttime -= float(st_shift[ii]) if rel_time == 1: s_t = t2 + atime + start_buff e_t = t2 + atime + end_buff else: s_t = t2 + start_buff e_t = t2 + end_buff tr.trim(starttime=s_t, endtime=e_t) # deduct theoretical traveltime and start_buf from starttime if rel_time == 1: tr.stats.starttime -= atime st_pickalign2 += tr except: pass # if len(tr.data) == 0: # print('Event 2 - empty window. Trace starts at ' + str(tr.stats.starttime) + ', event at ' + str(t2)) print('After alignment and range selection: ' + str(len(st_pickalign1)) + ' traces') #%% #print(st) # at length if verbose: print(st1.__str__(extended=True)) print(st2.__str__(extended=True)) if rel_time == 1: print(st_pickalign1.__str__(extended=True)) print(st_pickalign2.__str__(extended=True)) #%% Detrend, taper, filter st_pickalign1.detrend(type='simple') st_pickalign2.detrend(type='simple') st_pickalign1.taper(taper_frac) st_pickalign2.taper(taper_frac) st_pickalign1.filter('bandpass', freqmin=freq_min, freqmax=freq_max, corners=4, zerophase=True) st_pickalign2.filter('bandpass', freqmin=freq_min, freqmax=freq_max, corners=4, zerophase=True) st_pickalign1.taper(taper_frac) st_pickalign2.taper(taper_frac) #%% Cull further by imposing SNR threshold on both traces st1good = Stream() st2good = Stream() for tr1 in st_pickalign1: for tr2 in st_pickalign2: if ((tr1.stats.network == tr2.stats.network) & (tr1.stats.station == tr2.stats.station)): if skip_SNR == 1: st1good += tr1 st2good += tr2 else: # estimate median noise t_noise1_start = int(len(tr1.data) * taper_frac) t_noise2_start = int(len(tr2.data) * taper_frac) t_noise1_end = int( len(tr1.data) * (-start_buff) / (end_buff - start_buff)) t_noise2_end = int( len(tr2.data) * (-start_buff) / (end_buff - start_buff)) noise1 = np.median( abs(tr1.data[t_noise1_start:t_noise1_end])) noise2 = np.median( abs(tr2.data[t_noise2_start:t_noise2_end])) # estimate median signal t_signal1_start = int( len(tr1.data) * (-start_buff) / (end_buff - start_buff)) t_signal2_start = int( len(tr2.data) * (-start_buff) / (end_buff - start_buff)) t_signal1_end = t_signal1_start + int( len(tr1.data) * signal_dur / (end_buff - start_buff)) t_signal2_end = t_signal2_start + int( len(tr2.data) * signal_dur / (end_buff - start_buff)) signal1 = np.median( abs(tr1.data[t_signal1_start:t_signal1_end])) signal2 = np.median( abs(tr2.data[t_signal2_start:t_signal2_end])) # test SNR SNR1 = signal1 / noise1 SNR2 = signal2 / noise2 if (SNR1 > qual_threshold and SNR2 > qual_threshold): st1good += tr1 st2good += tr2 if skip_SNR == 1: print('Matches (no SNR test): ' + str(len(st1good)) + ' traces') else: print('Match and above SNR threshold: ' + str(len(st1good)) + ' traces') #%% get station lat-lon, compute distance for plot for tr in st1good: for ii in station_index: if (tr.stats.station == st_names[ii]): # find station in inventory stalon = float( st_lons[ii]) # look up lat & lon again to find distance stalat = float(st_lats[ii]) distance = gps2dist_azimuth(stalat, stalon, ev_lat1, ev_lon1) tr.stats.distance = distance[0] # distance in km for tr in st2good: for ii in station_index: if (tr.stats.station == st_names[ii]): # find station in inventory stalon = float( st_lons[ii]) # look up lat & lon again to find distance stalat = float(st_lats[ii]) distance = gps2dist_azimuth(stalat, stalon, ev_lat2, ev_lon2) tr.stats.distance = distance[0] # distance in km print('Made it to here.') #%% # plot traces fig_index = 3 plt.close(fig_index) plt.figure(fig_index, figsize=(8, 8)) plt.xlim(start_buff, end_buff) plt.ylim(min_dist, max_dist) for tr in st1good: dist_offset = tr.stats.distance / (1000 * 111 ) # trying for approx degrees ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime - t1) if red_plot == 1: shift = red_time + (dist_offset - red_dist) * red_slow ttt = ttt - shift # These lines used to cause a crash in Spyder plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac / (tr.data.max() - tr.data.min()) + dist_offset, color='green') #plt.title(fname1) print('And made it to here?') for tr in st2good: dist_offset = tr.stats.distance / (1000 * 111 ) # trying for approx degrees ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime - t2) if red_plot == 1: shift = red_time + (dist_offset - red_dist) * red_slow ttt = ttt - shift ttt = ttt # These lines used to cause a crash in Spyder plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac / (tr.data.max() - tr.data.min()) + dist_offset, color='red') print('And made it to here.') #%% Plot traveltime curves if plot_tt: # first traveltime curve line_pts = 50 dist_vec = np.arange(min_dist, max_dist, (max_dist - min_dist) / line_pts) # distance grid time_vec1 = np.arange( min_dist, max_dist, (max_dist - min_dist) / line_pts) # empty time grid of same length (filled with -1000) for i in range(0, line_pts): arrivals = model.get_travel_times(source_depth_in_km=ev_depth1, distance_in_degree=dist_vec[i], phase_list=[dphase]) num_arrivals = len(arrivals) found_it = 0 for j in range(0, num_arrivals): if arrivals[j].name == dphase: time_vec1[i] = arrivals[j].time found_it = 1 if found_it == 0: time_vec1[i] = np.nan # second traveltime curve if dphase2 != 'no': time_vec2 = np.arange( min_dist, max_dist, (max_dist - min_dist) / line_pts) # empty time grid of same length (filled with -1000) for i in range(0, line_pts): arrivals = model.get_travel_times( source_depth_in_km=ev_depth1, distance_in_degree=dist_vec[i], phase_list=[dphase2]) num_arrivals = len(arrivals) found_it = 0 for j in range(0, num_arrivals): if arrivals[j].name == dphase2: time_vec2[i] = arrivals[j].time found_it = 1 if found_it == 0: time_vec2[i] = np.nan if rel_time == 1: time_vec2 = time_vec2 - time_vec1 plt.plot(time_vec2, dist_vec, color='orange') # third traveltime curve if dphase3 != 'no': time_vec3 = np.arange( min_dist, max_dist, (max_dist - min_dist) / line_pts) # empty time grid of same length (filled with -1000) for i in range(0, line_pts): arrivals = model.get_travel_times( source_depth_in_km=ev_depth1, distance_in_degree=dist_vec[i], phase_list=[dphase3]) num_arrivals = len(arrivals) found_it = 0 for j in range(0, num_arrivals): if arrivals[j].name == dphase3: time_vec3[i] = arrivals[j].time found_it = 1 if found_it == 0: time_vec3[i] = np.nan if rel_time == 1: time_vec3 = time_vec3 - time_vec1 plt.plot(time_vec3, dist_vec, color='yellow') # fourth traveltime curve if dphase4 != 'no': time_vec4 = np.arange( min_dist, max_dist, (max_dist - min_dist) / line_pts) # empty time grid of same length (filled with -1000) for i in range(0, line_pts): arrivals = model.get_travel_times( source_depth_in_km=ev_depth1, distance_in_degree=dist_vec[i], phase_list=[dphase4]) num_arrivals = len(arrivals) found_it = 0 for j in range(0, num_arrivals): if arrivals[j].name == dphase4: time_vec4[i] = arrivals[j].time found_it = 1 if found_it == 0: time_vec4[i] = np.nan if rel_time == 1: time_vec4 = time_vec4 - time_vec1 plt.plot(time_vec4, dist_vec, color='purple') if rel_time == 1: time_vec1 = time_vec1 - time_vec1 plt.plot(time_vec1, dist_vec, color='blue') plt.xlabel('Time (s)') plt.ylabel('Epicentral distance from event (°)') if ARRAY == 0: plt.title(dphase + ' for ' + fname1 + ' vs ' + fname2) elif ARRAY == 1: plt.title(dphase + ' for ' + fname1[8:18] + ' vs ' + fname2[8:18]) plt.show() #%% Save processed files if ARRAY == 0: fname1 = 'HD' + date_label1 + 'sel.mseed' fname2 = 'HD' + date_label2 + 'sel.mseed' elif ARRAY == 1: fname1 = 'Pro_Files/HD' + date_label1 + 'sel.mseed' fname2 = 'Pro_Files/HD' + date_label2 + 'sel.mseed' st1good.write(fname1, format='MSEED') st2good.write(fname2, format='MSEED') elapsed_time_wc = time.time() - start_time_wc print('This job took ' + str(elapsed_time_wc) + ' seconds') os.system('say "Done"')
#Absolute timevector #tout=arange(0,86300,60.) tout = arange(0, 8 * 3600, 60.) #Minimum amr level min_amr = 1 #Shoal depth? H0 = 5.0 #Coordinates of gauges xyz = genfromtxt( '/Users/dmelgar/DEMs/SRTM15/tehuant_coast_points_filtered.xyz') #Read all data, interpolate, put in Stream st = Stream() for k in range(len(files)): print '%d of %d' % (k, len(files)) shoal = (abs(xyz[k, 2]) / H0)**0.25 g = genfromtxt(files[k]) #Amr levels levels = g[:, 0] #Time and amplitudes t = g[:, 1] eta = g[:, 5] * shoal
def xcor_process(st, inv): xcor_st = Stream() # tr1 = Stream([st[0]]) # tr2 = Stream([st[1]]) # # y, x, comp = IntervalStackXCorr(tr1, tr2) # # for day_stack_xcor in y: # print(type(day_stack_xcor)) # print(day_stack_xcor[0]) # # # fill in headers # stats = {'network': tr1[0].stats.network, 'station': tr1[0].stats.station, 'location': '', # 'channel': tr1[0].stats.channel, 'npts': len(day_stack_xcor[0]), 'sampling_rate': 100, # 'mseed': {'dataquality': 'D'}} # # stats["starttime"] = tr1[0].stats.starttime # # xcor_st += Trace(data=day_stack_xcor[0], header=stats) for tr in st: temp_st = Stream(traces=[tr]) print('') print(tr.stats.asdf.labels) # get the uid label uid_label = tr.stats.asdf.labels[1] print(uid_label) perm_st = id_st_dict[uid_label] temp_tr = temp_st[0] ref_tr = perm_st[0] stationPair = ref_tr.stats.station + '.' + temp_tr.stats.station print(temp_st) print(perm_st) print("DO XCOR......") xcl, winsPerInterval = xcorr2(ref_tr, temp_tr) if (xcl is None): print("\t\tWarning: no cross-correlation results returned for station-pair %s, " % (stationPair) + " due to gaps in data.") continue print(xcl) print(winsPerInterval) # saveXCorrPlot(y, x, '/g/data/ha3/', 'test_plot', comp) # fill in headers for new xcor trace stats = {'network': tr.stats.network, 'station': tr.stats.station, 'location': "", 'channel': uid_label[2:], 'npts': len(xcl[0]), 'sampling_rate': tr.stats.sampling_rate, 'mseed': {'dataquality': 'D'}, "asdf": {}} stats["starttime"] = tr.stats.starttime temp_tr = Trace(data=xcl[0], header=stats) temp_tr.stats.asdf.labels = tr.stats.asdf.labels xcor_st += temp_tr # break return xcor_st
def readSU(filename, headonly=False, byteorder=None, unpack_trace_headers=False, **kwargs): # @UnusedVariable """ Reads a Seismic Unix (SU) file and returns an ObsPy Stream object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.stream.read` function, call this instead. :type filename: str :param filename: SU file to be read. :type headonly: boolean, optional :param headonly: If set to True, read only the header and omit the waveform data. :type byteorder: ``'<'``, ``'>'``, or ``None`` :param byteorder: Determines the endianness of the file. Either ``'>'`` for big endian or ``'<'`` for little endian. If it is ``None``, it will try to autodetect the endianness. The endianness is always valid for the whole file. Defaults to ``None``. :type unpack_trace_headers: bool, optional :param unpack_trace_headers: Determines whether or not all trace header values will be unpacked during reading. If ``False`` it will greatly enhance performance and especially memory usage with large files. The header values can still be accessed and will be calculated on the fly but tab completion will no longer work. Look in the headers.py for a list of all possible trace header values. Defaults to ``False``. :returns: A ObsPy :class:`~obspy.core.stream.Stream` object. .. rubric:: Example >>> from obspy import read >>> st = read("/path/to/1.su_first_trace") >>> st #doctest: +ELLIPSIS <obspy.core.stream.Stream object at 0x...> >>> print(st) #doctest: +ELLIPSIS 1 Trace(s) in Stream: ... | 2005-12-19T15:07:54.000000Z - ... | 4000.0 Hz, 8000 samples """ # Read file to the internal segy representation. su_object = readSUFile(filename, endian=byteorder, unpack_headers=unpack_trace_headers) # Create the stream object. stream = Stream() # Get the endianness from the first trace. endian = su_object.traces[0].endian # Loop over all traces. for tr in su_object.traces: # Create new Trace object for every segy trace and append to the Stream # object. trace = Trace() stream.append(trace) # skip data if headonly is set if headonly: trace.stats.npts = tr.npts else: trace.data = tr.data trace.stats.su = AttribDict() # If all values will be unpacked create a normal dictionary. if unpack_trace_headers: # Add the trace header as a new attrib dictionary. header = AttribDict() for key, value in tr.header.__dict__.iteritems(): setattr(header, key, value) # Otherwise use the LazyTraceHeaderAttribDict. else: # Add the trace header as a new lazy attrib dictionary. header = LazyTraceHeaderAttribDict(tr.header.unpacked_header, tr.header.endian) trace.stats.su.trace_header = header # Also set the endianness. trace.stats.su.endian = endian # The sampling rate should be set for every trace. It is a sample # interval in microseconds. The only sanity check is that is should be # larger than 0. tr_header = trace.stats.su.trace_header if tr_header.sample_interval_in_ms_for_this_trace > 0: trace.stats.delta = \ float(tr.header.sample_interval_in_ms_for_this_trace) / \ 1E6 # If the year is not zero, calculate the start time. The end time is # then calculated from the start time and the sampling rate. # 99 is often used as a placeholder. if tr_header.year_data_recorded > 0: year = tr_header.year_data_recorded # The SEG Y rev 0 standard specifies the year to be a 4 digit # number. Before that it was unclear if it should be a 2 or 4 # digit number. Old or wrong software might still write 2 digit # years. Every number <30 will be mapped to 2000-2029 and every # number between 30 and 99 will be mapped to 1930-1999. if year < 100: if year < 30: year += 2000 else: year += 1900 julday = tr_header.day_of_year julday = tr_header.day_of_year hour = tr_header.hour_of_day minute = tr_header.minute_of_hour second = tr_header.second_of_minute trace.stats.starttime = UTCDateTime( year=year, julday=julday, hour=hour, minute=minute, second=second) return stream
def get_waveforms(self, network, station, location, channel, starttime, endtime, merge=-1, sds_type=None, **kwargs): """ Read data from a local SeisComP Data Structure (SDS) directory tree. >>> from obspy import UTCDateTime >>> t = UTCDateTime("2015-10-12T12") >>> st = client.get_waveforms("IU", "ANMO", "*", "HH?", t, t+30) ... # doctest: +SKIP :type network: str :param network: Network code of requested data (e.g. "IU"). Wildcards '*' and '?' are supported. :type station: str :param station: Station code of requested data (e.g. "ANMO"). Wildcards '*' and '?' are supported. :type location: str :param location: Location code of requested data (e.g. ""). Wildcards '*' and '?' are supported. :type channel: str :param channel: Channel code of requested data (e.g. "HHZ"). Wildcards '*' and '?' are supported. :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime` :param starttime: Start of requested time window. :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime` :param endtime: End of requested time window. :type merge: int or None :param merge: Specifies, which merge operation should be performed on the stream before returning the data. Default (``-1``) means only a conservative cleanup merge is performed to merge seamless traces (e.g. when reading across day boundaries). See :meth:`Stream.merge(...) <obspy.core.stream.Stream.merge>` for details. If set to ``None`` (or ``False``) no merge operation at all will be performed. :type sds_type: str :param sds_type: Override SDS data type identifier that was specified during client initialization. :param kwargs: Additional kwargs that get passed on to :func:`~obspy.core.stream.read` internally, mostly for internal low-level purposes used by other methods. :rtype: :class:`~obspy.core.stream.Stream` """ if starttime >= endtime: msg = ("'endtime' must be after 'starttime'.") raise ValueError(msg) sds_type = sds_type or self.sds_type st = Stream() full_paths = self._get_filenames(network=network, station=station, location=location, channel=channel, starttime=starttime, endtime=endtime, sds_type=sds_type) for full_path in full_paths: st += read(full_path, format=self.format, starttime=starttime, endtime=endtime, **kwargs) # make sure we only have the desired data, just in case the file # contents do not match the expected SEED id st = st.select(network=network, station=station, location=location, channel=channel) # avoid trim/merge operations when we do a headonly read for # `_get_availability_percentage()` if kwargs.get("_no_trim_or_merge", False): return st st.trim(starttime, endtime) if merge is None or merge is False: pass else: st.merge(merge) return st
def get_all_stations(day): from obspy.clients.earthworm import Client from obspy import UTCDateTime from obspy import Stream year1 = 2014 month1 = 11 day1 = 24 hour1 = 0 minute1 = 0 second1 = 0 num = 12 lb_num = 6 #%% LB01 try: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st1 = Stream() st1 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy st1.detrend(type='linear') st1.detrend(type='demean') break_test = st1 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = st1.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(st1[0].data) ) < 10 or st1[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st1 = Stream() st1 = client.get_waveforms(net, sta, '', cha, t1, t2) st1.detrend(type='linear') st1.detrend(type='demean') st1[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st1 = Stream() st1 = client.get_waveforms(net, sta, '', cha, t1, t2) st1.detrend(type='linear') st1.detrend(type='demean') st1[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 #%% LB02 try: sta = 'LB02' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st2 = Stream() st2 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy st2.detrend(type='linear') st2.detrend(type='demean') break_test = st2 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = st2.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(st2[0].data) ) < 10 or st2[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st2 = Stream() st2 = client.get_waveforms(net, sta, '', cha, t1, t2) st2.detrend(type='linear') st2.detrend(type='demean') st2[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st2 = Stream() st2 = client.get_waveforms(net, sta, '', cha, t1, t2) st2.detrend(type='linear') st2.detrend(type='demean') st2[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 #%% LB03 try: sta = 'LB03' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st3 = Stream() st3 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy st3.detrend(type='linear') st3.detrend(type='demean') break_test = st3 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = st3.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(st3[0].data) ) < 10 or st3[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st3 = Stream() st3 = client.get_waveforms(net, sta, '', cha, t1, t2) st3.detrend(type='linear') st3.detrend(type='demean') st3[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st3 = Stream() st3 = client.get_waveforms(net, sta, '', cha, t1, t2) st3.detrend(type='linear') st3.detrend(type='demean') st3[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 #%% LB04 try: sta = 'LB04' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st4 = Stream() st4 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy st4.detrend(type='linear') st4.detrend(type='demean') break_test = st4 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = st4.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(st4[0].data) ) < 10 or st4[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st4 = Stream() st4 = client.get_waveforms(net, sta, '', cha, t1, t2) st4.detrend(type='linear') st4.detrend(type='demean') st4[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st4 = Stream() st4 = client.get_waveforms(net, sta, '', cha, t1, t2) st4.detrend(type='linear') st4.detrend(type='demean') st4[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 #%% LB05 try: sta = 'LB05' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st5 = Stream() st5 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy st5.detrend(type='linear') st5.detrend(type='demean') break_test = st5 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = st5.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(st5[0].data) ) < 10 or st5[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st5 = Stream() st5 = client.get_waveforms(net, sta, '', cha, t1, t2) st5.detrend(type='linear') st5.detrend(type='demean') st5[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st5 = Stream() st5 = client.get_waveforms(net, sta, '', cha, t1, t2) st5.detrend(type='linear') st5.detrend(type='demean') st5[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 #%% LB06 try: sta = 'LB06' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st6 = Stream() st6 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy st6.detrend(type='linear') st6.detrend(type='demean') break_test = st6 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = st6.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(st6[0].data) ) < 10 or st6[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st6 = Stream() st6 = client.get_waveforms(net, sta, '', cha, t1, t2) st6.detrend(type='linear') st6.detrend(type='demean') st6[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 except: # give 2 seconds of blank data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. st6 = Stream() st6 = client.get_waveforms(net, sta, '', cha, t1, t2) st6.detrend(type='linear') st6.detrend(type='demean') st6[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 lb_num = lb_num - 1 #%% LS01 try: sta = 'LS01' # STATION LS01 cha = 'EHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts1 = Stream() sts1 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy sts1.detrend(type='linear') sts1.detrend(type='demean') break_test = sts1 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = sts1.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(sts1[0].data) ) < 10 or sts1[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts1 = Stream() sts1 = client.get_waveforms(net, sta, '', cha, t1, t2) sts1.detrend(type='linear') sts1.detrend(type='demean') sts1[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts1 = Stream() sts1 = client.get_waveforms(net, sta, '', cha, t1, t2) sts1.detrend(type='linear') sts1.detrend(type='demean') sts1[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 #%% LS02 try: sta = 'LS02' # STATION LS02 cha = 'EHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts2 = Stream() sts2 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy sts2.detrend(type='linear') sts2.detrend(type='demean') break_test = sts2 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = sts2.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(sts2[0].data) ) < 10 or sts2[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts2 = Stream() sts2 = client.get_waveforms(net, sta, '', cha, t1, t2) sts2.detrend(type='linear') sts2.detrend(type='demean') sts2[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts2 = Stream() sts2 = client.get_waveforms(net, sta, '', cha, t1, t2) sts2.detrend(type='linear') sts2.detrend(type='demean') sts2[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 #%% LS03 try: sta = 'LS03' # STATION LS03 cha = 'EHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts3 = Stream() sts3 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy sts3.detrend(type='linear') sts3.detrend(type='demean') break_test = sts3 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = sts3.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(sts3[0].data) ) < 10 or sts3[0].stats.npts < 7920000 or mid_dat < 0.1 or sts3[ 0].stats.starttime.timestamp > 1442707190: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts3 = Stream() sts3 = client.get_waveforms(net, sta, '', cha, t1, t2) sts3.detrend(type='linear') sts3.detrend(type='demean') sts3[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts3 = Stream() sts3 = client.get_waveforms(net, sta, '', cha, t1, t2) sts3.detrend(type='linear') sts3.detrend(type='demean') sts3[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 #%% LS04 try: sta = 'LS04' # STATION LS04 cha = 'EHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts4 = Stream() sts4 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy sts4.detrend(type='linear') sts4.detrend(type='demean') break_test = sts4 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = sts4.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(sts4[0].data) ) < 10 or sts4[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts4 = Stream() sts4 = client.get_waveforms(net, sta, '', cha, t1, t2) sts4.detrend(type='linear') sts4.detrend(type='demean') sts4[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts4 = Stream() sts4 = client.get_waveforms(net, sta, '', cha, t1, t2) sts4.detrend(type='linear') sts4.detrend(type='demean') sts4[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 #%% LS05 try: sta = 'LS05' # STATION LB0S cha = 'EHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts5 = Stream() sts5 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy sts5.detrend(type='linear') sts5.detrend(type='demean') break_test = sts5 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = sts5.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(sts5[0].data) ) < 10 or sts5[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts5 = Stream() sts5 = client.get_waveforms(net, sta, '', cha, t1, t2) sts5.detrend(type='linear') sts5.detrend(type='demean') sts5[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 except: # give 2 seconds of data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts5 = Stream() sts5 = client.get_waveforms(net, sta, '', cha, t1, t2) sts5.detrend(type='linear') sts5.detrend(type='demean') sts5[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 #%% LS06 try: sta = 'LS06' # STATION LS06 cha = 'EHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 + day * 24 * 60 * 60 t2 = t1 + 23 * 60 * 60 + 59 * 60 + 59.999 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts6 = Stream() sts6 = client.get_waveforms(net, sta, '', cha, t1, t2) # st is a stream, we can operate normally as in obspy sts6.detrend(type='linear') sts6.detrend(type='demean') break_test = sts6 break_test = break_test[0].filter("bandpass", freqmin=1, freqmax=10) sorted_data = sts6.copy() sorted_data = abs(sorted_data[0].data) sorted_data.sort() mid_dat = sorted_data[int(len(sorted_data) / 2)] if sum(abs(sts6[0].data) ) < 10 or sts6[0].stats.npts < 7920000 or mid_dat < 0.1: sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime( year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts6 = Stream() sts6 = client.get_waveforms(net, sta, '', cha, t1, t2) sts6.detrend(type='linear') sts6.detrend(type='demean') sts6[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 except: # give 2 seconds of blank data instead sta = 'LB01' # STATION LB01 cha = 'HHZ' # CHANNEL - Vertical net = 'Z4' # Santiaguito volcano loc = '' # location, it depends mostly of which network you are in. client = Client( '138.253.112.23', 16022) # ip, port - ip's 138.253.113.19 or 138.253.112.23 t0 = UTCDateTime(year1, month1, day1, hour1, minute1, second1) #the format is year:day_of_the_year:month t1 = t0 t2 = t1 + 2 #UTCDateTime(year2, month2, day2, hour2, minute2, second2) # notice we have here 10 minutes, but we can select our times. sts6 = Stream() sts6 = client.get_waveforms(net, sta, '', cha, t1, t2) sts6.detrend(type='linear') sts6.detrend(type='demean') sts6[0].filter("bandpass", freqmin=0.1, freqmax=0.1) num = num - 1 #%% return all stations return (st1, st2, st3, st4, st5, st6, sts1, sts2, sts3, sts4, sts5, sts6, num, lb_num) #st7
def _template_gen(picks, st, length, swin='all', prepick=0.05, plot=False): r"""Function to generate a cut template in the obspy \ Stream class from a given set of picks and data, also in an obspy stream \ class. Should be given pre-processed data (downsampled and filtered). :type picks: List of obspy.core.event.Pick :param picks: Picks to extract data around :type st: :class: 'obspy.Stream' :param st: Stream to etract templates from :type length: float :param length: Length of template in seconds :type swin: string :param swin: P, S or all, defaults to all :type prepick: float :param prepick: Length in seconds to extract before the pick time \ default is 0.05 seconds :type plot: bool :param plot: To plot the template or not, default is True :returns: obspy.Stream Newly cut template. .. note:: By convention templates are generated with P-phases on the \ vertical channel and S-phases on the horizontal channels, normal \ seismograph naming conventions are assumed, where Z denotes vertical \ and N, E, R, T, 1 and 2 denote horizontal channels, either oriented \ or not. To this end we will **only** use Z channels if they have a \ P-pick, and will use one or other horizontal channels **only** if \ there is an S-pick on it. .. warning:: If there is no phase_hint included in picks, and swin=all, \ all channels with picks will be used. """ import copy from eqcorrscan.utils.EQcorrscan_plotting import pretty_template_plot as\ tplot from obspy import Stream import warnings stations = [] channels = [] st_stachans = [] for pick in picks: # Check to see that we are only taking the appropriate picks if swin == 'all': # Annoying compatability with seisan two channel codes stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code[0] + pick.waveform_id.channel_code[-1]) elif swin == 'P' and 'P' in pick.phase_hint.upper(): # Use the 'in' statement to cope with phase names like 'PN' etc. stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code[0] + pick.waveform_id.channel_code[-1]) elif swin == 'S' and 'S' in pick.phase_hint.upper(): stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code[0] + pick.waveform_id.channel_code[-1]) else: raise IOError('Phase type is not in [all, P, S]') for tr in st: st_stachans.append('.'.join([tr.stats.station, tr.stats.channel])) for i, station in enumerate(stations): if not '.'.join([station, channels[i]]) in st_stachans: warnings.warn('No data provided for ' + station + '.' + channels[i]) # Select which channels we actually have picks for for tr in st: if tr.stats.station in stations: # This is used to cope with seisan handling channel codes as # two charectar codes, internally we will do the same. if len(tr.stats.channel) == 3: temp_channel = tr.stats.channel[0] + tr.stats.channel[2] elif len(tr.stats.channel) == 2: temp_channel = tr.stats.channel # Take all channels tr.stats.channel = temp_channel if 'st1' not in locals(): st1 = Stream(tr) else: st1 += tr if 'st1' not in locals(): msg = ('No data available for these picks or no picks match ' + 'these data! Will not error, but you should check yo self') warnings.warn(msg) return st = copy.deepcopy(st1) del st1 if plot: stplot = st.copy() # Cut the data for tr in st: if 'starttime' in locals(): del starttime if swin == 'all': for pick in picks: if not pick.phase_hint: msg = 'Pick for ' + pick.waveform_id.station_code + '.' +\ pick.waveform_id.channel_code + ' has no phase ' +\ 'hint given, you should not use this template for ' +\ 'cross-correlation re-picking!' warnings.warn(msg) if pick.waveform_id.station_code == tr.stats.station and \ pick.waveform_id.channel_code[0] + \ pick.waveform_id.channel_code[-1] == \ tr.stats.channel: starttime = pick.time - prepick else: # If there is phase information then we should use our # convention. if pick.waveform_id.station_code == tr.stats.station and \ pick.waveform_id.channel_code[0] + \ pick.waveform_id.channel_code[-1] ==\ tr.stats.channel \ and 'P' in pick.phase_hint.upper(): starttime = pick.time - prepick elif pick.waveform_id.station_code == tr.stats.station and\ tr.stats.channel[-1] in ['1', '2', 'N', 'E', 'R', 'T'] and\ 'S' in pick.phase_hint.upper(): starttime = pick.time - prepick else: for pick in picks: if pick.waveform_id.station_code == tr.stats.station and\ swin in pick.phase_hint.upper(): starttime = pick.time - prepick if 'starttime' in locals(): print("Cutting " + tr.stats.station + '.' + tr.stats.channel) tr.trim(starttime=starttime, endtime=starttime + length, nearest_sample=False) print(tr.stats.starttime) print(tr.stats.endtime) if 'st1' not in locals(): st1 = Stream(tr) else: st1 += tr else: print('No pick for ' + tr.stats.station + '.' + tr.stats.channel) # Ensure that the template is the correct length if len(tr.data) == (tr.stats.sampling_rate * length) + 1: tr.data = tr.data[0:-1] if plot: background = stplot.trim( st1.sort(['starttime'])[0].stats.starttime - 10, st1.sort(['starttime'])[-1].stats.endtime + 10) tplot(st1, background=background) del stplot del st # st1.plot(size=(800,600)) return st1
def process(self, cfg): # Preparatory steps if cfg.testrun: teststream = Stream(self.stream[0].copy()) testtitle = ['Raw data'] Fs = self.stream[0].stats.sampling_rate if Fs > cfg.Fs_new[-1]: self.add_antialias(Fs, cfg.Fs_new[-1] * cfg.Fs_antialias_factor) if cfg.instr_correction: self.add_inv(cfg.instr_correction_input, cfg.instr_correction_unit) self.check_nan_inf(cfg.verbose) if len(self.stream) == 0: return () if cfg.wins_detrend: if cfg.verbose: print('* Detrend', file=self.ofid) self.detrend() if cfg.wins_demean: if cfg.verbose: print('* Demean', file=self.ofid) self.demean() # ToDo: Prettier event excluder if cfg.event_exclude: # Re-merging not necessary anymore, as slicing was moved to after decimation (to avoid gaps and overlaps) # Re-merge for event exclusion #if cfg.verbose: # print('* Re-Merging for event exclusion', # file = self.ofid) self.stream._cleanup() if cfg.verbose: print('* Excluding high energy windows', file=self.ofid) # This is run twice self.event_exclude(cfg) self.event_exclude(cfg) # if cfg.wins: # if cfg.verbose: # print('* Slicing stream', file = self.ofid) # self.stream = pp.slice_traces(self.stream, # cfg.wins_len_sec,cfg.quality_minlengthsec, # cfg.verbose,self.ofid) if Fs > cfg.Fs_new[-1]: if cfg.verbose: print('* Downsample', file=self.ofid) self.downsampling(cfg, True) if cfg.testrun: teststream += self.stream[0].copy() testtitle.append('After antialias, downsampling') if cfg.wins: if cfg.verbose: print('* Slicing stream', file=self.ofid) self.stream = pp.slice_traces(self.stream, cfg.wins_len_sec, cfg.quality_minlengthsec, cfg.verbose, self.ofid) if cfg.wins_taper is not None: if cfg.verbose: print('* Taper', file=self.ofid) self.taper(cfg.wins_taper_type, cfg.wins_taper) if cfg.testrun: teststream += self.stream[0].copy() testtitle.append('After detrend, event exclusion') if cfg.instr_correction: if cfg.verbose: print('* Remove response', file=self.ofid) self.remove_response(cfg.instr_correction_prefilt, cfg.instr_correction_waterlevel, cfg.instr_correction_unit, cfg.verbose) if cfg.testrun: teststream += self.stream[0].copy() testtitle.append('After instrument correction') self.plot_test(teststream, testtitle) self.check_nan_inf(cfg.verbose) self.stream._cleanup() return ()
fout.write('%s\t%.6f\t%.6f\t%.2f\n' % (stas[k], lon, lat, alt)) fout.close() if gps2sac: stanames = genfromtxt(station_file, usecols=0, dtype='S') coords = genfromtxt(station_file, usecols=[1, 2]) for k in range(len(gps_files)): print k #Now read the data gps = genfromtxt(gps_files[k], skip_header=4) gps = gps[::-1] #Initalize obspy stream object n = Stream(Trace()) e = Stream(Trace()) u = Stream(Trace()) #Fill gaps with zeros t = gps[:, 0] dt = t[1] - t[0] print 'dt=' + str(dt) gap_positions = where(diff(t) > dt + dt / 10)[0] + 1 print str(len(gap_positions) + 1) + ' segments (' + str( len(gap_positions)) + ' gaps) found' if len(gap_positions) > 0: #There are gaps for i in range(len(gap_positions)): if i == 0: #Fill with data (first trace) n[0].data = gps[0:gap_positions[0], 2]