'station': 'RASPI', 'location': '00', 'channel': 'BH' + x, 'npts': length, 'sampling_rate': sampling_rate, 'mseed': { 'dataquality': 'D' }, 'starttime': starttime } Xt = Trace(data=data[x], header=statsx) Xt_filt = Xt.copy() Xt_filt.filter('lowpass', freq=20.0, corners=2, zerophase=True) stream = Stream(traces=[Xt_filt]) stream.plot(type='dayplot', outfile='dayplotFilter' + x + '.png', size=size, events=events) stream = Stream(traces=[Xt]) stream.plot(type='dayplot', outfile='dayplot' + x + '.png', size=size, events=events) #Remove all the download and generated files os.system('rm -rf /root/earthquaketemp') os.system( 'rclone copy /root google:earthquake/%s -vv -P --retries 1 --no-traverse --fast-list --transfers 10 --include=*.png --include=*.npy' % (process_date)) os.system('rm -rf /root/*.png') os.system('rm -rf /root/*.npy')
debug = True files = glob.glob('/tr1/telemetry_days/XX_FBA2/2017/2017_011/00_EH*.seed') if debug: print(files) # Make a stream object to hold data st = Stream() for curfile in files: if debug: print('Here is our current file: ' + curfile) st += read(curfile, starttime=stime, endtime=etime) st.merge() if debug: print(st) st.plot() # Here we can do a 10 volt test st2 = st.copy() nstime = UTCDateTime('2017-011T16:59:55.0') netime = nstime + 20. st2.trim(starttime=nstime, endtime=netime) # st2 Now has data for our first 10 V test # Now we convert st2 into Volts # Make two empty lists to save our results mminus =[] sminus =[] # This is the first voltage for tr in st2:
outdir = build_event_directory_for_nonlinloc(this_event[0], networks, stations, freqmin=1, freqmax=10, deconvolve=False, only_vertical_channel=False, time_before=60, time_after=300) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # CHECK IF SEISMIC EVENT IS CONFIRMED OR REJECTED sacfiles = glob.glob( outdir + "/*HZ*.sac" ) st = Stream() for sacfile in sacfiles: st1 = read(sacfile) for tr in st1: ind = np.where( tr.stats.station == stations )[0][0] tr.stats.distance = calc_vincenty_inverse( stlats[ind], stlons[ind], this_event[2], this_event[1] )[0] st += tr fig = plt.figure() st.plot(type='section', method='full', orientation='vertical', time_down=True, linewidth=.25, grid_linewidth=.25, show=False, fig=fig) ax = fig.axes[0] transform = blended_transform_factory(ax.transData, ax.transAxes) for tr in st: ax.text(tr.stats.distance / 1e3, 1.0, tr.stats.station, rotation=270, va="bottom", ha="center", transform=transform, zorder=10) figManager = plt.get_current_fig_manager() figManager.window.showMaximized() plt.show() flag = input("\n+ Is this a local earthquake? (yes/no): ") while flag!="yes" and flag !="no": flag = input("+ Is this a local earthquake? (yes/no): ") if flag=="yes" or flag =="no": break
data = numpy.zeros([datapoints], dtype=numpy.int16) starttime = UTCDateTime() adc.start_adc_difference(0, gain=GAIN, data_rate=sps) for x in range(datapoints): # sample = adc.read_adc_difference(0, gain=GAIN) sample = adc.get_last_result() data[x] = sample timenow = UTCDateTime() #print sample,timenow adc.stop_adc() stats = { 'network': 'TV', 'station': 'RASPI', 'location': '00', 'channel': 'BHZ', 'npts': datapoints, 'sampling_rate': sampling, 'mseed': { 'dataquality': 'D' }, 'starttime': starttime } stream = Stream([Trace(data=data, header=stats)]) stream.write('test.mseed', format='MSEED', encoding='INT16', reclen=512) stream.plot()
length = data.shape[0] # Update the Obspy structure for plots, from the data read print("Updating trace stats...") statsx.update({'npts': length}) statsx.update({'sampling_rate': int(sampling_rate / decimation)}) statsx.update({'starttime': starttime}) #______________________________________________________________________________ # Generate the dayplot and write to miniSeed format, for each component print("Generating trace...") statsx.update({ 'channel': 'BH' + str(comp) + "." + filename_date[4:6] + "-" + filename_date[6:8] }) Xt = Trace(data=data[:], header=statsx) #Xt.filter('lowpass', freq=50, corners=2, zerophase=True) del data stream = Stream(traces=[Xt]) del Xt # Plot output print("Generating plot...") outfile = os.path.join( plotdir, filename_date[0:8] + '-dayplotFilter' + str(comp) + '.png') stream.plot(type='dayplot', outfile=outfile, size=size, events=cat_all) print("Miniseed writing...") outminiseed = os.path.join(miniseeddir, filename_date[0:8] + "-comp" + str(comp) + '.mseed') stream.write(outminiseed, format='MSEED')
def Read_event(Year,jJul,Hour,Second,Station, plot): """return a stream containing the 3 traces N, Z,E of an event calib =1 * input : - station: station considered ex : '1' - year : year of the event ex 15 - jJul : julian day - Hour : hour of the event ex '08 - plot : if True plot will be shown ' *output : -st : type : stream ; Stream containing the 3 traces ENZ * exemples : 1. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Station, Year,jJul, Hour = ConvertDatestr(B[j]) ==> (Station, Year,jJul, Hour) = ('2', '15', '001', '00') st = Read_event(Year,jJul,Hour,12, Station, True) 2. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for j in [1,222344,222345]: Station, Year,jJul, Hour = ConvertDatestr(B[j]) print ConvertDatestr(B[j]) if not os.path.exists('/home/claire/PHD/Working/Data/Wangrong_seismic_data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHN.SAC'%(Year,jJul,Station,Year,jJul,str(Hour))) : print 'file not existing' , jJul, Hour continue Read_event(Year,jJul,Hour,12, Station, True) """ #0. Uniformise file format for file reading ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if int(Second)>=3600 : print Second h = int(Second)//3600 Hour=int(Hour)+h Second = int(Second)%3600 if int(Hour)>23 : jJul = str(jJul+ 1) Hour = str(int(Hour-24)) else : jJul = str(int(jJul)) if Hour<10 : Hour = '0'+str(Hour) if int(jJul)<10: jJul = '00'+str(jJul) elif int(jJul)<100: jJul = '0'+str(jJul) #1. reading files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ st = Stream() trE = read('/home/burtin/DATA/LinTianShan/Seismic_Data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHE.SAC'%(Year,jJul,Station,Year,jJul,str(Hour)))[0] trE.stats.calib = 1 st.append(trE) trN = read('/home/burtin/DATA/LinTianShan/Seismic_Data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHN.SAC'%(Year,jJul,Station,Year,jJul,str(Hour)))[0] trN.stats.calib = 1 st.append(trN) trZ = read('/home/burtin/DATA/LinTianShan/Seismic_Data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHZ.SAC'%(Year,jJul,Station,Year,jJul,str(Hour)))[0] trZ.stats.calib = 1 st.append(trZ) #2. Plotting waveform~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if plot == True : fig = plt.figure() fig.canvas.set_window_title('Waveform_%s_%s'%(jJul, Hour)) st.plot(fig=fig) return st
def Stream_Correctionall(st, plot): """return a st whitening and corrected from instrumental response with calibration = 1!!! * inputs : - st :type str; stream to be coirrected - station : type str, station considered - plot : if plot == True, plot of un corrected and corrected waveform will be show * output : - st : type : stream ; Stream containing the 3 traces ENZ whitening and corrected from instrumental response * exemple : stCorrected = Stream_Correction(st, '1', False) """ # 0. calib of traces of stream = 1 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ l=len(st) st_copy = st.copy() for i in xrange(l): st[i].stats.calib = 1 st_copy = st.copy() stcorrect=Stream() #1. Remove mean~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ st.detrend('demean') #Mean of data is subtracted #2. Detrend signal with simple ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ st.detrend('simple') #Subtracts a linear function defined by first/last sample of the trace # 3. Correction instrumentale response ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for tr in st : Station = tr.stats.station.split('0')[1] if Station in ["1","2","3","4","5","6"]: zeros = np.array([0.0+1j*0.0,0.0+1j*0.0,-392.0+1j*0.0,-1960.0+1j*0.0,-1490.0+1j*1740.0,-1490-1j*1740.0]) poles = np.array([-0.03691+0.03702j,-0.03691-0.03702j,-343.080+1j*0.0,-370.0+467.0*1j,-370.0-467.0*1j,-836.0+1522.0*1j,-836.0-1522.0*1j,-4900.0+4700.0*1j,-4900.0-4700.0*1j,-6900.0+0.0j,-15000+0.0*1j]) paz_st = {'poles':poles,'zeros': zeros,'gain': 4.3449E17,'sensitivity': 6.1358E+08} if Station =='7': paz_st = { 'poles': [-1.48597E-1+1j*1.48597E-1,-1.48597E-1+1j*-1.48597E-1,-2.46936E3+1j*0.0,-4.70635E1+1j*0.0,-3.36765E2+1j*-1.36655E2,-3.36765E2+1j*1.36655E2], 'zeros': [-3.16174E1+1j*0.0,0.0+1j*0.0,0.0+1j*0.0], 'gain': 4.8053e+8, 'sensitivity': 2.4347e+9} # AD facteur = 16.53volt/2^24 sensib=Sv*G(=1)/AD, tr.simulate(paz_remove = paz_st,water_level= 1E-4,simulate_sensitivity= False) stcorrect.append(tr) if plot==True : fig = plt.figure() plt.tick_params( axis='x', # changes apply to the x-axis which='both', # both major and minor ticks are affected bottom='off', # ticks along the bottom edge are off top='off', # ticks along the top edge are off labelbottom='off') plt.tick_params( axis='y', # changes apply to the x-axis which='both', # both major and minor ticks are affected right='off', # ticks along the bottom edge are off left='off', # ticks along the top edge are off labelleft='off') plt.title('Uncorrected') st_copy.plot(fig=fig, label = 'no corrected') fig1 = plt.figure() plt.tick_params( axis='x', # changes apply to the x-axis which='both', # both major and minor ticks are affected bottom='off', # ticks along the bottom edge are off top='off', # ticks along the top edge are off labelbottom='off') plt.tick_params( axis='y', # changes apply to the x-axis which='both', # both major and minor ticks are affected right='off', # ticks along the bottom edge are off left='off', # ticks along the top edge are off labelleft='off') plt.title('Corrected') stcorrect.plot(fig=fig1,label = 'corrected') return stcorrect
def plot(self, filter=None, save=False, show=True, ttgrid=None): st = Stream() willy = seispy.burrow.Groundhog() distance = sorted([ gps2dist_azimuth(self.lat, self.lon, arrival.station.lat, arrival.station.lon)[0] for arrival in self.arrivals ]) dmin, dmax = min(distance), max(distance) startlag = dmin / 6000. - 7 endlag = dmax / 2000. + 5 for arrival in self.arrivals: st += willy.fetch(arrival.station.name, arrival.channel.code, starttime=self.time + startlag, endtime=self.time + endlag) arrivals = sorted( self.arrivals, key=lambda arrival: (arrival.station.network, arrival.station.name, arrival.channel)) if filter is not None: st.filter(*filter[0], **filter[1]) st.trim(starttime=self.time + startlag + 2.) st.normalize() MAX_TRACES = 9 ncol = int(ceil(len(st) / float(MAX_TRACES))) + 1 nrow = int(ceil(len(st) / float(ncol - 1))) gs = GridSpec(nrow, ncol) gs.update(hspace=0, wspace=0) width = 1600 height = width / float(ncol) fig = st.plot(size=(width, height), handle=True) row, col = 0, 0 for i in range(len(fig.axes)): ax = fig.axes[i] arrival = arrivals[i] color = "r" if arrival.phase == "P"\ else "g" if arrival.phase == "S" else "b" ax.axvline(arrival.time.toordinal() + arrival.time._get_hours_after_midnight() / 24., color=color, linewidth=2, alpha=0.75) if ttgrid is not None: r, theta, phi = sp.geometry.geo2sph(self.lat, self.lon, self.depth) try: predicted = self.time + ttgrid.get_tt( arrival.station.name, arrival.phase, r, theta, phi) except KeyError: continue ax.axvline(predicted.toordinal() + predicted._get_hours_after_midnight() / 24., color=color, linewidth=2, linestyle="--", alpha=0.75) if row % nrow == 0: col += 1 row = 0 position = gs[row, col].get_position(fig) ax.set_position(position) ax.get_yaxis().set_visible(False) row += 1 for ax in fig.axes[nrow - 1::nrow] + [fig.axes[-1]]: ax.set_xticklabels(ax.get_xticklabels(), visible=True, fontsize=10, rotation=-15, horizontalalignment="left") gs.update(wspace=0.2) postl = gs[0].get_position(fig) posbl = gs[ncol * (nrow - 1)].get_position(fig) bbox_map = Bbox(((posbl.x0, posbl.y0), (posbl.x1, postl.y1))) ax = fig.add_axes(bbox_map) self.plot_map(ax=ax) fig.suptitle("%s (ID #%d)" % (self.time, self.evid)) if save: plt.savefig("%s.png" % save, format="png") if show: plt.show() else: plt.close()
def _plot_command(): """ Command-line plotting interface """ obs_types = [s for s in chan_maps] epilog = "--sfilt returns the first parenthetised subgroup, using \n" epilog += " python's re.search(). Some useful codes are: \n" epilog += " '.': matches any character\n" epilog += " '+': matches 1 or more repetitions of the preceding RE\n" epilog += " '?': matches 0 or 1 repetitions of the preceding RE\n" epilog += " '+?': like +, but non-greedy (don't match as many as possible)\n" epilog += " '\\': escapes special characters, like '*', '-' and '/')\n" epilog += " '()': delimit the subgroup to return\n\n" epilog += " Some examples are: \n" epilog += " FILENAME | PATTERN | RESULT\n" epilog += " =========================+================+=============\n" epilog += " haha-MOVA-OBS1-blah.lch | '\-(.+)\-' | MOVA-OBS1 \n" epilog += " haha-MOVA-OBS1-blah.lch | '\(-.+)\-.+\-' | MOVA \n" epilog += " haha-MOVA-OBS1-blah.lch | '\-.+\-(.+)\-' | OBS1 \n" epilog += " MOVA/haha.raw.lch | '([^\/]+)' | MOVA \n" parser = argparse.ArgumentParser( description=__doc__, epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("infiles", nargs="+", help="Input filename(s)") parser.add_argument("-s", "--start", dest="starttime", metavar="TIME", default=0, help="start time (ISO8601, or seconds after last file " "start) (default: %(default)s)") parser.add_argument("-e", "--end", dest="endtime", metavar="TIME", default=3600., help="end time (ISO8601, or seconds from start time) " "(default: %(default)s)") parser.add_argument("-t", "--type", choices=obs_types, dest="obs_type", metavar='TYPE', default='SPOBS2', help="obs type. Allowed choices are " + ", ".join(obs_types) + " (default: %(default)s)") parser.add_argument("--net", dest="network", default='NN', help="network code (default: %(default)s)") parser.add_argument("--chan", dest="channel", default='*', help="Plot only the given SEED channel/s (default: " "%(default)s)") parser.add_argument("--equal_scale", action="store_true", help="Force all traces equal y scale") parser.add_argument('-v', "--verbose", dest="verbose", action="store_true", help="Print information about the first and last " "read blocks") my_group = parser.add_mutually_exclusive_group(required=False) my_group.add_argument("--sta", dest="station", default='STA', help="station code. A 2-digit counter will be " "appended if more than one file is read. " "(default: %(default)s)") my_group.add_argument("--sfilt", dest="station_filt", help="regex filter to find station name in filename " "(see examples below)") args = parser.parse_args() # Set/normalize start and end times endtime = _normalize_time_arg(args.endtime) if endtime == 0: endtime = 3600. if not args.starttime: # set starttime to latest-starting file args.starttime = UTCDateTime(0) for infile in args.infiles: s, e = get_data_timelimits(infile) if s > args.starttime: args.starttime = s # Read file(s) station_code = None if args.station: if len(args.infiles) == 1: station_code = args.station stream = Stream() for (infile, i) in zip(args.infiles, range(len(args.infiles))): if args.station_filt: # print(re.search(args.station_filt, infile)) try: station_code = re.search(args.station_filt, infile).group(1) except Exception: print( 'no station code found using re.search("{}", "{}"'.format( args.station_filt, infile)) station_code = None if station_code is None: station_code = f'STA{i:02d}' s = read(infile, _normalize_time_arg(args.starttime), _normalize_time_arg(args.endtime), network=args.network, station=station_code, obs_type=args.obs_type, verbose=args.verbose) if s is not None: s = s.select(channel=args.channel) stream += s station_code = None if len(stream) > 0: stream.plot(size=(800, 600), equal_scale=args.equal_scale, method='full') else: print('Nothing read, nothing plotted!')