def read_IMS_ASCII(path, net='', **kwargs): """ read a IMS_ASCII seismogram from a single station :param path: path to file :return: uquake.core.Stream """ data = np.loadtxt(path, delimiter=',', skiprows=1) stats = Stats() with open(path) as fid: field = fid.readline().split(',') stats.sampling_rate = float(field[1]) timetmp = datetime.fromtimestamp(float(field[5])) \ + timedelta( seconds=float(field[6]) / 1e6) # trigger time in second trgtime_UTC = UTCDateTime(timetmp) stats.starttime = trgtime_UTC - float(field[10]) / stats.sampling_rate stats.npts = len(data) stats.station = field[8] stats.network = net traces = [] component = np.array(['X', 'Y', 'Z']) std = np.std(data, axis=0) mstd = np.max(std) for k, dt in enumerate(data.T): stats.channel = '%s' % (component[k]) traces.append(Trace(data=np.array(dt), header=stats)) return Stream(traces=traces)
def python2obspy(self): from obspy.core.trace import Stats, Trace from obspy.core.utcdatetime import UTCDateTime s = Stats() s.network = self.network s.station = self.station s.location = self.location s.channel = self.channel s.sampling_rate = self.sampling_rate s.starttime = UTCDateTime(self.starttime) s.npts = len(self.data) misc_fields = dict() if 'CALIB' in self.misc_fields: s.calib = self.misc_fields.pop('CALIB') s.update(self.misc_fields) return Trace(self.data[:], header=s)
def to_sac_and_mseed(self, export_path, station_number, force_without_loc): # Check if file exist export_path_sac = export_path + self.get_export_file_name() + ".sac" export_path_msd = export_path + self.get_export_file_name() + ".mseed" #export_path_wav = export_path + self.get_export_file_name() + ".wav" if os.path.exists(export_path_sac) and os.path.exists(export_path_msd): return # Check if the station location have been calculated if self.station_loc is None and not force_without_loc: print self.get_export_file_name() + ": Skip sac/mseed generation, wait the next ascent to compute location" return # Fill header info stats = Stats() stats.sampling_rate = self.decimated_fs stats.network = "MH" stats.station = station_number stats.starttime = self.date stats.sac = dict() if not force_without_loc: stats.sac["stla"] = self.station_loc.latitude stats.sac["stlo"] = self.station_loc.longitude stats.sac["stdp"] = self.depth stats.sac["user0"] = self.snr stats.sac["user1"] = self.criterion stats.sac["iztype"] = 9 # 9 == IB in sac format # Save data into a Stream object trace = Trace() trace.stats = stats trace.data = self.data stream = Stream(traces=[trace]) # Save stream object print export_path_sac stream.write(export_path_sac, format='SAC') print export_path_msd stream.write(export_path_msd, format='MSEED')
def invert_raw(): ###################################### # Binary ###################################### if mode == "Binary": catch_files = [] files = glob.glob(file_path + "*") for file in files: catch = re.findall( ".*[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}_[0-9]{2}_[0-9]{2}\.[0-9]{6}", file) if len(catch) > 0: catch_files.append(file) ###################################### # Freq file ###################################### freq_file = glob.glob(file_path + "*_freq") if len(freq_file) > 1: print "warning : more than one freq file in folder" if len(freq_file) == 0: print "warning no freq file discovered use :" + str(sampling_freq) else: content = "40.000000" with open(freq_file[0], "r") as f: content = f.read() sampling_freq = float(content) print "Sampling used : " + str(sampling_freq) files_nb = len(catch_files) file_offset = 1 for catch_file in catch_files: print catch_file print "File nb : " + str(file_offset) + "/" + str(files_nb) date = UTCDateTime( re.findall( ".*([0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}_[0-9]{2}_[0-9]{2}\.[0-9]{6})", catch_file)[0]) rawdata = numpy.fromfile(catch_file, numpy.int32) ###################################### # Plot plotly file ###################################### # Add acoustic values to the graph #data_line = graph.Scattergl(x=[date + i/sampling_freq for i in range(0,len(rawdata))], # y=rawdata, # name="counts", # line=dict(color='blue', width=2), # mode='lines') #plotlydata = [data_line] #layout = graph.Layout(title="Plot", # xaxis=dict(title='Date', titlefont=dict(size=18)), # yaxis=dict(title='Counts', titlefont=dict(size=18)), # hovermode='closest' # ) #plotly.plot({'data': plotlydata, 'layout': layout}, # filename=catch_file + ".html", # auto_open=False) ###################################### # Create SAC file ###################################### # Fill header info stats = Stats() stats.sampling_rate = sampling_freq stats.network = "test" stats.station = 0 stats.starttime = date stats.sac = dict() # Save data into a Stream object trace = Trace() trace.stats = stats trace.data = rawdata stream = Stream(traces=[trace]) # Save stream object stream.write(catch_file + ".sac", format='SAC') stream.write(catch_file + ".mseed", format='MSEED') file_offset = file_offset + 1 else: ###################################### # Text ###################################### #filename = "tool_invert_raw/1553771378.490936" #date = UTCDateTime(1553771378.490936) # text #f = open(filename, 'r') #rawdata = numpy.array(f.read().rstrip('\n').split('\n')) #f.close() # binary ###################################### # Plot plotly file ###################################### # Add acoustic values to the graph data_line = graph.Scattergl( x=[date + i / sampling_freq for i in range(0, len(rawdata))], y=rawdata, name="counts", line=dict(color='blue', width=2), mode='lines') plotlydata = [data_line] layout = graph.Layout(title="Plot", xaxis=dict(title='Date', titlefont=dict(size=18)), yaxis=dict(title='Counts', titlefont=dict(size=18)), hovermode='closest') plotly.plot({ 'data': plotlydata, 'layout': layout }, filename=filename + ".html", auto_open=False) ###################################### # Create SAC file ###################################### # Fill header info stats = Stats() stats.sampling_rate = sampling_freq stats.network = "test" stats.station = 0 stats.starttime = date stats.sac = dict() # Save data into a Stream object trace = Trace() trace.stats = stats trace.data = rawdata stream = Stream(traces=[trace]) # Save stream object stream.write(filename + ".sac", format='SAC')
def read_TEXCEL_CSV(filename, **kwargs): """ Reads a texcel csv file and returns a uquake Stream object. .. warning:: This function should NOT be called directly, it registers via the uquake :func:`~uquake.core.stream.read` function, call this instead. :param filename: the path to the file :param kwargs: :return: ~uquake.core.stream.Stream """ with open(filename) as fle: x = [] y = [] z = [] for k, line in enumerate(fle): if k == 0: if 'MICROPHONE' in line: offset = 9 else: offset = 8 # header if k < 2: continue val = line.strip().split(',') # relative time if k == 3: rt0 = timedelta(seconds=float(val[0])) elif k == 6: station = str(eval(val[offset])) elif k == 7: date = val[offset] elif k == 8: date_time = date + " " + val[offset] datetime = parse(date_time) starttime = datetime + rt0 elif k == 9: site = val[offset] elif k == 10: location = val[offset] elif k == 17: sensitivity_x = float(val[offset]) sensitivity_y = float(val[offset + 1]) sensitivity_z = float(val[offset + 2]) elif k == 18: range_x = float(val[offset]) range_y = float(val[offset + 1]) range_z = float(val[offset + 2]) elif k == 19: trigger_x = float(val[offset]) trigger_y = float(val[offset + 1]) trigger_z = float(val[offset + 2]) elif k == 20: si_x = float(val[offset]) si_y = float(val[offset + 1]) si_z = float(val[offset + 2]) elif k == 21: sr_x = float(val[offset]) sr_y = float(val[offset + 1]) sr_z = float(val[offset + 2]) x.append(float(val[1])) y.append(float(val[2])) z.append(float(val[3])) x = np.array(x) y = np.array(y) z = np.array(z) stats = Stats() stats.network = site stats.delta = si_x / 1000.0 stats.npts = len(x) stats.location = location stats.station = station stats.starttime = UTCDateTime(starttime) stats.channel = 'radial' tr_x = Trace(data=x / 1000.0, header=stats) stats.delta = si_y / 1000.0 stats.channel = 'transverse' tr_y = Trace(data=y / 1000.0, header=stats) stats.delta = si_z / 1000.0 stats.channel = 'vertical' tr_z = Trace(data=z / 1000.0, header=stats) return Stream(traces=[tr_x, tr_y, tr_z])
def attach_obspy_trace_stats(self, kstnm, kinst, force_without_loc=False): '''Attaches attribute: obspy_trace_stats, an obspy.core.trace.Stats instance. obspy_trace_stats holds metadata common to both miniSEED and SAC formats. obspy_trace_stats.sac holds extra metadata only found in the SAC format. Floats are NOT converted to np.float32() in either case. NB: the SAC header value shown to the world (e.g., "sac.delta"), and the private SAC header written to disk (e.g., "sac._hf[0]"), differ in type. The relevant float header values that actually get written to disk with sac.write are stored in the private "._hf" attribute, which is not generated with initialization of the raw Stats() container. Therefore, if printing those values to, e.g. a text file, ensure the relevant F (float) fields are cast to np.float32 first. For example: >> from obspy.core.trace import Trace >> from obspy.io.sac.sactrace import SACTrace >> trace = Trace() >> sac = SACTrace.from_obspy_trace(trace) <-- this gets called by sac.write (within stream.write) >> sac.delta = 1/20 >> isinstance(sac.delta, float) <-- True: this is the public attr shown to the world >> isinstance(sac.delta, np.float32) <-- False >> isinstance(sac._hf[0], float) <-- False >> isinstance(sac._hf[0], np.float32) <-- True: this is the private attr written to disk For more detail see: http://www.adc1.iris.edu/files/sac-manual/manual/file_format.html Update function `events.write_metadata` if the fields in this method are changed. ''' # Fill metadata common to SAC and miniSEED formats stats = Stats() stats.network = utils.network() stats.station = kstnm stats.location = "00" stats.channel = utils.band_code( self.decimated_fs) + "DH" # SEED manual Appendix A stats.starttime = self.corrected_starttime stats.sampling_rate = self.decimated_fs stats.npts = len(self.processed_data) # Extra metadata, some of which is only written to SAC files keys = [ 'stla', 'stlo', 'stel', 'stdp', 'scale', 'cmpaz', 'cmpinc', 'user0', 'user1', 'user2', 'user3', 'kinst', 'kuser0', 'kuser1', 'kuser2' ] def_float = -12345. # Default SAC header (we may not will not fill all of these keys) stats.sac = dict.fromkeys(keys, def_float) # Fill station-location header fields. if not force_without_loc: stats.sac["stla"] = self.station_loc.latitude stats.sac["stlo"] = self.station_loc.longitude # Elevation is 0 (our reference is truly sea level) stats.sac["stel"] = 0 # Add scaling factor to convert digital counts to Pa stats.sac["scale"] = utils.sacpz_const() # Add dip (CMPINC; "component incidence") in SAC dip convention, using as guide: # https://github.com/iris-edu/mseed2sac/blob/master/doc/mseed2sac.md # # SAC dip convention: "degrees down from vertical up/outward", # i.e., BHN, BHE = 90, BHZ = 0 # # SEED dip convection: "degrees down from horizontal" # i.e., BHN, BHE = 0, BHZ = -90 stats.sac["cmpinc"] = 0 # SAC dip # Add azimuth: horizontal projection of component vector measured clockwise from north # It is 0 for vertical components. Theoretically, BHN, BHZ = 90, BHE = 90 stats.sac["cmpaz"] = 0 # NB: I checked how IRIS serves up hydrophone data (in MATLAB): # >> s = irisFetch.Stations('channel', '*', '*', '*', '?DH') # # For all 3233 channels from 2147 stations that were returned: # dip = -90, 0, or 90 # azimuth = 0 or 360 # # For dip = -90, I assume that is the SEED dip convention # For dip = +90, I do not know; I thought perhaps it might be some(thing like a?) # right-hand-rule convention, but not all +90 dips are associated with 360 azimuth # REQ events do not record their depth at the time of acquisition, and because the onboard # detection algorithm was not triggered there are no trigger parameters to report if not self.is_requested: stats.sac[ "stdp"] = self.depth # meters (from external pressure sensor; down is positive) stats.sac["user0"] = self.snr stats.sac["user1"] = self.criterion stats.sac["user2"] = self.trig # sample index # Clock drift correction, which is the 'Time correction' applied in the 48-byte # fixed header in utils.set_mseed_time_correction() stats.sac[ "user3"] = self.clockdrift_correction # = self.mseed_time_correction # Generic instrument (e.g., '452.020') stats.sac['kinst'] = kinst # automaid version number stats.sac["kuser0"] = self.__version__ # String describing detection/request status, and number of wavelet scales transmitted # (e.g., 'DET.WLT5') reqdet_scales = self.processed_file_name.split('.')[-2:] stats.sac['kuser1'] = '.'.join(reqdet_scales) # String detailing the type of (i)CDF24 transform: edge correction and # normalization stats.sac[ 'kuser2'] = 'ec' + self.edges_correction + 'norm' + self.normalized # Attach Stats to events object self.obspy_trace_stats = stats
def dorange (self): # load batches print "mkms: loading batches.." self.bdatas = [] for i in self.ids: d = Dat () d.read (os.path.join (self.root, str(i) + '.DAT')) self.bdatas.append (d.bdata) # set up datastream print "mkms: setting up stream for %s.." % self.station, self.st = Stream () for bd in self.bdatas: for b in bd.batches: s = Stats () s.sampling_rate = self.sampling_rate s.npts = b.length s.network = self.network s.location = self.location s.station = self.station s.channel = self.channel s.starttime = UTCDateTime ((b.ref / 1000000.0)) t = Trace (data = numpy.array (b.samples_i, dtype = numpy.int32), header = s) self.st.append (t) print "done." # generate file name self.name = self.st[0].id.replace ('.', '_') self.start = self.st[0].stats.starttime self.name = self.start.strftime ("%Y-%m-%d-%H%M-%S") + '.' + self.name if self.optplot: self.plot () if not self.optnowrite: print "mkms: writing %s.mseed.." % self.name, if not os.path.exists (self.destdir): os.makedirs (self.destdir) self.st.write (os.path.join (self.destdir, self.name + '.mseed'), format = 'MSEED', encoding = 'INT32', byteorder = 1, flush = 1, verbose = 0) print "done." # write ids and refs idsf = open (os.path.join (self.destdir, self.name + '.ids'), 'w') refsf = open (os.path.join (self.destdir, self.name + '.refs'), 'w') for bd in self.bdatas: idsf.write ("%d,%d\n" % (bd.id, 1 if bd.e_sdlag else 0)) for b in bd.batches: refsf.write ("%d,%d,%d,%d,%s,%s,%s,%s,%s,%d\n" % (bd.id, b.no, b.ref, b.status, b.latitude[:-2], b.latitude[-2:], b.longitude[:-2], b.longitude[-2:], b.checksum, 1 if b.checksum_pass else 0)) idsf.close () refsf.close () return (self.name + '.mseed', idsf, refsf) else: print "mkms: would write %s.mseed (disabled)." % os.path.join (self.destdir, self.name) return None
if PlotUnit == 'VEL': #integrate accerelation if plotting velocity traceN.integrate(method='cumtrapz') traceE.integrate(method='cumtrapz') plottheta = theta[thetacount] * 180 / np.pi thetacount = thetacount + 1 # store stats stationname = sta + '%04d' % i channelnameN = cha + '%s' % 'N' channelnameE = cha + '%s' % 'E' # for NS components statsN = Stats() statsN.sampling_rate = 1.0 / sampling_rate_x statsN.delta = sampling_rate_x statsN.starttime = starttime statsN.npts = len(traceN.data) statsN.network = net statsN.station = stationname statsN.location = '' statsN.channel = channelnameN traceN.stats = statsN traceN.stats.sac = obspy.core.AttribDict() traceN.stats.sac.back_azimuth = plottheta # use this as azimuth of station #---applying filters---# traceN.filter('bandpass', freqmin=freqmin, freqmax=freqmax) tN = traceN.stats.starttime traceN.trim(starttime=tN, endtime=tN + trim_end_time) traceN.taper(0.05, side='right')