def plot_dist_RMS(path, origin): """ Function to recalculate the magnitudes for a given database and plot the RMS against distance. :type path: Str :param path: Database to convert :type origin: Tuple :param origin: Lat, Long and Depth of origin """ import glob from pro import Sfile_util sfilelist=glob.glob(path+'/*/*/*.S*') RMS=[] dist=[] for sfile in sfilelist: header=Sfile_util.readheader(sfile) RMS.append(header.t_RMS) dist.append(dist_calc(origin, (header.latitude, header.longitude, header.depth))) import matplotlib.pyplot as plt # plt.semilogy(dist,RMS, c='red', marker='o', ls='None') plt.plot(dist,RMS, c='red', marker='o', ls='None') plt.xlabel('Distance in km') plt.ylabel('RMS (s)') plt.xlim([0,350]) plt.ylim([0,10]) plt.title('RMS with distance from '+str(origin[0])+', '+str(origin[1])) # plt.legend() plt.show() return
def summary_table(path, output='csv'): """ Function to generate a summary table of earthquake information. Can output either as a .csv or a .tex file. :type path: str :param path: Database directory :type output: str :param output: Either 'csv' or 'tex' """ # Check that the output is correct if not output in ['csv', 'tex']: print output raise ValueError('Output format not recognised') import glob from pro import Sfile_util sfilelist=glob.glob(path+'/*/*/*.S*') f=open('Summary.'+output, 'w') # Write header if output == 'csv': f.write('Date, Origin time (UTC), Latitude (deg), Longitude (deg), Depth (km), Magnitude(seisan), Magnitude(local)\n') elif output == 'tex': f.write('\begin{table}{c c c c c c c}\n') f.write('\textbf{Date} & \textbf{Origin time (UTC)} & \textbf{Latitude (deg)}'+\ '\textbf{Longitude (deg)} & \textbf{Depth (km)} & \textbf{Magnitude(seisan)}'+\ '\textbf{Magnitude (local)}\\ \n') f.write('\hline\n') # Write contents for sfile in sfilelist: EQ_info=Sfile_util.readheader(sfile) Mag_out, Mag_std = event_magnitude(sfile) if output == 'csv': f.write(str(EQ_info.time.year)+'/'+str(EQ_info.time.month).zfill(2)+'/'+\ str(EQ_info.time.day).zfill(2)+', '+\ str(EQ_info.time.hour).zfill(2)+':'+\ str(EQ_info.time.minute).zfill(2)+':'+\ str(EQ_info.time.second).zfill(2)+'.'+\ str(EQ_info.time.microsecond).zfill(2)+', '+\ str(EQ_info.latitude)+', '+str(EQ_info.longitude)+', '+\ str(EQ_info.depth)+','+str(EQ_info.Mag_1)+','+\ str(Mag_out)+'\n') elif output == 'tex': f.write(str(EQ_info.time.year)+'/'+str(EQ_info.time.month).zfill(2)+'/'+\ str(EQ_info.time.day).zfill(2)+' & '+\ str(EQ_info.time.hour).zfill(2)+':'+\ str(EQ_info.time.minute).zfill(2)+':'+\ str(EQ_info.time.second).zfill(2)+'.'+\ str(EQ_info.time.microsecond).zfill(2)+' & '+\ str(EQ_info.latitude)+' & '+str(EQ_info.longitude)+' & '+\ str(EQ_info.depth)+' & '+str(EQ_info.Mag_1)+' & '+\ str(Mag_out)+'\\ \n') # Write end of table for latex if output == 'tex': f.write('\end{table}') f.close() print 'Written summary file: Summary.'+output return
def event_magnitude(sfile, min_sta=3): """ Function to generate the magnitude of a single event from a seisan s-file with amplitude picks in it :type sfile: String :param sfile: Nordic type s-file name, with full path :return: Local magnitude, standard deviation of magnitude """ from pro import Sfile_util from par import mag_conv_par as mag_par # Check that the s-file exists import glob if not glob.glob(sfile): raise NameError('Sfile does not exist: '+sfile) picks=Sfile_util.readpicks(sfile) Mag_out=[] for pick in picks: if pick.phase=='IAML': if 'sta_cor' in locals(): del sta_cor for station in mag_par.station_corrections: if pick.station==station[0]: sta_cor=station[1] if not 'sta_cor' in locals(): sta_cor=1.0 # print '\nStation correction not found for station '+pick.station # try: # Note, seisan stores half amplitudes Magnitude=mag_conv(pick.amplitude*2, pick.distance, sta_cor,\ mag_par.frequency_dependent,\ pick.peri) if not np.isnan(Magnitude): Mag_out.append(Magnitude) # except (ValueError): # print 'Either earthquake too far away, or frequency is not set' # print pick.peri # pass if len(Mag_out) > min_sta: Mag_std=np.std(Mag_out) Mag_out=np.mean(Mag_out) # Take the mean magnitude print Mag_out return Mag_out, Mag_std else: return np.nan, np.nan
defaults.trigbase,defaults.routype) # Generate file list which can then be autoregistered # f1.open('filenr.lis','w') # listno=0 # for wavefile in wavelist: # listno+=1 # f1.write('#'+str(listno).rjust(3)+' '+wavefile+'\n') # f1.close() print 'Generating s-files' import ntpath sfilelist=[] for wavepath in wavelist: wavefile=ntpath.basename(wavepath) shutil.copy(wavepath,wavefile) # Copy the file locally so that we can # cope with filename concatenation sfilelist.append(Sfile_util.blanksfile(wavefile,'L',defaults.userID,'.',defaults.overwrite)) # Write to local directory, otherwise fortran concatenation errors are # likely in the filename when running the filterdefaults.picker routine - move # later in the script else: # wavelist=glob.glob('*-*-*-*-*.*_*_*') sfilelist=glob.glob('*L.S*') import ntpath print('No detection routine selected, you just have continuous data') ###################Picking routines############################################ if defaults.picker=='FP': print('Will now run the filterpicker routine') # run filter defaults.picker routine adapted within rtquake and altered for use here # by Calum Chamberlain. i=0
def plot_residuals(path): """ Function to read in S-files and make a histogram of the pick residuals for each station :type path: Str """ import glob, sys import matplotlib.pyplot as plt from pro import Sfile_util import numpy as np sfilelist=glob.glob(path+'/*/*/*.S*') if 'stachan_list' in locals(): del stachan_list for sfile in sfilelist: picks=Sfile_util.readpicks(sfile) for pick in picks: match=0 if not 'stachan_list' in locals(): if abs(pick.timeres) <=4.0 and pick.channel != 'HT': if pick.phase=='P': stachan_list=[[pick.station, pick.channel, [pick.timeres],[]]] elif pick.phase=='S': stachan_list=[[pick.station, pick.channel, [], [pick.timeres]]] else: for stachan in stachan_list: if pick.station == stachan[0] and \ pick.channel == stachan[1] and not np.isnan(pick.timeres)\ and abs(pick.timeres) <= 4.0: if pick.phase=='P': stachan[2].append(pick.timeres) elif pick.phase=='S': stachan[3].append(pick.timeres) match=1 if match == 0 and not np.isnan(pick.timeres)\ and abs(pick.timeres) <= 4.0 and pick.channel != 'HT': print 'Found picks for: '+pick.station+' '+pick.channel if pick.phase=='P': stachan_list.append([pick.station, pick.channel, [pick.timeres], []]) elif pick.phase=='S': stachan_list.append([pick.station, pick.channel, [], [pick.timeres]]) # Print out some useful stats ppicks=0 spicks=0 presidual=0 sresidual=0 for stachan in stachan_list: ppicks+=len(stachan[2]) spicks+=len(stachan[3]) presidual+=sum(stachan[2]) sresidual+=sum(stachan[3]) print 'Total P-picks: '+str(ppicks) print 'Total S-picks: '+str(spicks) print 'P RMS mean: '+str(presidual/ppicks) print 'S RMS mean: '+str(sresidual/spicks) # Plot the results i=0 # Get unique list of stations, make one plot for each station stations=[] for stachan in stachan_list: stations+=[stachan[0]] stations=list(set(stations)) # Concatenate all the picks for each station stachan_list.sort() for stachan in stachan_list: if not 'sta_list' in locals(): sta_list=[[stachan[0], 'all', stachan[2], stachan[3]]] station=stachan[0] i=0 else: if station==stachan[0]: sta_list[i][3]+=stachan[3] sta_list[i][2]+=stachan[2] else: sta_list.append(stachan) i+=1 station=stachan[0] fig, axes = plt.subplots((len(sta_list)), 1, sharex=True)#, sharey=True) print 'I have picks for '+str(len(sta_list))+' stations' axes=axes.ravel() i=0 for stachan in sta_list: print 'Plotting for station: '+stachan[0] if len(stachan[2]) != 0: n, bins, patches=axes[i].hist(stachan[2], bins=np.arange(-4.0, 4.0, 0.025)\ , facecolor='Black', alpha=0.5) axes[i].text(0.85, 0.8, r'$\ P:\ \mu='+str(np.mean(stachan[2]))[0:4]+\ ',\ \sigma='+str(np.std(stachan[2]))[0:4]+\ ',\ n='+str(len(stachan[2]))+'$',\ horizontalalignment='center', verticalalignment='center', transform=axes[i].transAxes) if len(stachan[3]) != 0: n, bins, patches=axes[i].hist(stachan[3],bins=np.arange(-4.0, 4.0, 0.025)\ , facecolor='Red', alpha=0.75) axes[i].text(0.15, 0.8, r'$\ S:\ \mu='+str(np.mean(stachan[3]))[0:4]+\ ',\ \sigma='+str(np.std(stachan[3]))[0:4]+\ ',\ n='+str(len(stachan[3]))+'$',\ horizontalalignment='center', verticalalignment='center', transform=axes[i].transAxes, color='Red') axes[i].set_ylabel(stachan[0]) # axes[i].yaxis.set_label_position("right") axes[i].yaxis.tick_right() axes[i].locator_params(axis='y', nbins=2) i+=1 axes[i-1].set_xlabel('RMS residual (s)') plt.xlim(-2.0, 2.0) # plt.ylim(0,40) fig.subplots_adjust(hspace=0.25) fig.subplots_adjust(wspace=0) fig.text(0.94, 0.5, 'Number of picks', ha='center', va='center', rotation=270) plt.show() # plt.savefig('residuals.eps') return stachan_list
def recalc_database(path, plot=True): """ Overarching code to recalculate and plot all the magnitudes for a given database in seisan. Must be a databse of S-files in a SEISAN REA structure :type path: String :param path: Path to the top of the rea tree (above the year directories) :return: Event info, list of tuples (Mag_out, Mag_in, Date, Location) """ import glob, sys, warnings from par import mag_conv_par as mag_par from pro import Sfile_util from obspy import UTCDateTime if not glob.glob(path): raise NameError('Path does not exist '+path) sfilelist=glob.glob(path+'/*/*/*.S??????') sfilelist.sort() Mag_in=[] # Only want to take the local magnitudes Mag_out=[] Mag_clipped=[] Date=[] Event_info=[] for sfile in sfilelist: # sys.stdout.write('Working on sfile: '+sfile+'\r') print('Working on sfile: '+sfile+'\r') try: header=Sfile_util.readheader(sfile) del header except IndexError: warnings.warn(sfile+' is corrupt') break # sys.stdout.flush() Date.append(Sfile_util.readheader(sfile).time) if Sfile_util.readheader(sfile).time == UTCDateTime(0): raise ValueError(sfile+' has 0 date') if not np.isnan(Sfile_util.readheader(sfile).Mag_1) and\ Sfile_util.readheader(sfile).Mag_1_type=='L': Mag_in.append(Sfile_util.readheader(sfile).Mag_1) Magnitude=np.nan if Sfile_util.readheader(sfile).Mag_1_type=='L' and\ Sfile_util.readheader(sfile).ev_id != 'E': Magnitude=(event_magnitude(sfile)[0]) if not np.isnan(Magnitude): Mag_out.append(Magnitude) Mag_clipped.append(Sfile_util.readheader(sfile).Mag_1) Event_info.append([Magnitude, Sfile_util.readheader(sfile).Mag_1, \ Sfile_util.readheader(sfile).time, \ (Sfile_util.readheader(sfile).latitude, \ Sfile_util.readheader(sfile).longitude, \ Sfile_util.readheader(sfile).depth)]) if plot: import matplotlib.pyplot as plt try: fig, ax1 = plt.subplots() # Plot histogram bins=np.arange(-1,7,0.2) n, bins, patches=ax1.hist(Mag_in,bins, facecolor='Black', \ alpha=0.5, label='Previous, n='+str(len(Mag_in))) n, bins, patches=ax1.hist(Mag_clipped,bins, facecolor='Black', \ alpha=0.7, label='Previous, n='+str(len(Mag_clipped))) n, bins, patches=ax1.hist(Mag_out, bins, facecolor='Red', \ alpha=0.7, label='Recalculated, n='+str(len(Mag_out))) plt.legend() ax1.set_ylabel('Number of events') ax1.set_ylim([0, max(n)+0.5*max(n)]) plt.xlabel('Local Magnitude $M_L$') Mag_out=np.sort(Mag_out) cdf=np.arange(len(Mag_out))/float(len(Mag_out)) # normalized, useful in a mo cdf=((cdf*-1.0)+1.0)*len(Mag_out) ax2 = ax1.twinx() ax2.plot(Mag_out,np.log10(cdf), 'r', linewidth=2.0, label='Recalculated') Mag_in=np.sort(Mag_in) cdf=np.arange(len(Mag_in))/float(len(Mag_in)) # normalized, useful in a mo cdf=((cdf*-1.0)+1.0)*len(Mag_in) ax2.plot(Mag_in,np.log10(cdf), 'k', linewidth=2.0, label='Previous') ax2.set_ylabel('$Log_{10}$ of cumulative density') plt.show() return Event_info except (AttributeError): print '\nError plotting' return Event_info else: return Event_info