def plot_dist_RMS(path, origin): """ Function to recalculate the magnitudes for a given database and plot the RMS against distance. :type path: Str :param path: Database to convert :type origin: Tuple :param origin: Lat, Long and Depth of origin """ import glob from pro import Sfile_util sfilelist=glob.glob(path+'/*/*/*.S*') RMS=[] dist=[] for sfile in sfilelist: header=Sfile_util.readheader(sfile) RMS.append(header.t_RMS) dist.append(dist_calc(origin, (header.latitude, header.longitude, header.depth))) import matplotlib.pyplot as plt # plt.semilogy(dist,RMS, c='red', marker='o', ls='None') plt.plot(dist,RMS, c='red', marker='o', ls='None') plt.xlabel('Distance in km') plt.ylabel('RMS (s)') plt.xlim([0,350]) plt.ylim([0,10]) plt.title('RMS with distance from '+str(origin[0])+', '+str(origin[1])) # plt.legend() plt.show() return
def summary_table(path, output='csv'): """ Function to generate a summary table of earthquake information. Can output either as a .csv or a .tex file. :type path: str :param path: Database directory :type output: str :param output: Either 'csv' or 'tex' """ # Check that the output is correct if not output in ['csv', 'tex']: print output raise ValueError('Output format not recognised') import glob from pro import Sfile_util sfilelist=glob.glob(path+'/*/*/*.S*') f=open('Summary.'+output, 'w') # Write header if output == 'csv': f.write('Date, Origin time (UTC), Latitude (deg), Longitude (deg), Depth (km), Magnitude(seisan), Magnitude(local)\n') elif output == 'tex': f.write('\begin{table}{c c c c c c c}\n') f.write('\textbf{Date} & \textbf{Origin time (UTC)} & \textbf{Latitude (deg)}'+\ '\textbf{Longitude (deg)} & \textbf{Depth (km)} & \textbf{Magnitude(seisan)}'+\ '\textbf{Magnitude (local)}\\ \n') f.write('\hline\n') # Write contents for sfile in sfilelist: EQ_info=Sfile_util.readheader(sfile) Mag_out, Mag_std = event_magnitude(sfile) if output == 'csv': f.write(str(EQ_info.time.year)+'/'+str(EQ_info.time.month).zfill(2)+'/'+\ str(EQ_info.time.day).zfill(2)+', '+\ str(EQ_info.time.hour).zfill(2)+':'+\ str(EQ_info.time.minute).zfill(2)+':'+\ str(EQ_info.time.second).zfill(2)+'.'+\ str(EQ_info.time.microsecond).zfill(2)+', '+\ str(EQ_info.latitude)+', '+str(EQ_info.longitude)+', '+\ str(EQ_info.depth)+','+str(EQ_info.Mag_1)+','+\ str(Mag_out)+'\n') elif output == 'tex': f.write(str(EQ_info.time.year)+'/'+str(EQ_info.time.month).zfill(2)+'/'+\ str(EQ_info.time.day).zfill(2)+' & '+\ str(EQ_info.time.hour).zfill(2)+':'+\ str(EQ_info.time.minute).zfill(2)+':'+\ str(EQ_info.time.second).zfill(2)+'.'+\ str(EQ_info.time.microsecond).zfill(2)+' & '+\ str(EQ_info.latitude)+' & '+str(EQ_info.longitude)+' & '+\ str(EQ_info.depth)+' & '+str(EQ_info.Mag_1)+' & '+\ str(Mag_out)+'\\ \n') # Write end of table for latex if output == 'tex': f.write('\end{table}') f.close() print 'Written summary file: Summary.'+output return
def recalc_database(path, plot=True): """ Overarching code to recalculate and plot all the magnitudes for a given database in seisan. Must be a databse of S-files in a SEISAN REA structure :type path: String :param path: Path to the top of the rea tree (above the year directories) :return: Event info, list of tuples (Mag_out, Mag_in, Date, Location) """ import glob, sys, warnings from par import mag_conv_par as mag_par from pro import Sfile_util from obspy import UTCDateTime if not glob.glob(path): raise NameError('Path does not exist '+path) sfilelist=glob.glob(path+'/*/*/*.S??????') sfilelist.sort() Mag_in=[] # Only want to take the local magnitudes Mag_out=[] Mag_clipped=[] Date=[] Event_info=[] for sfile in sfilelist: # sys.stdout.write('Working on sfile: '+sfile+'\r') print('Working on sfile: '+sfile+'\r') try: header=Sfile_util.readheader(sfile) del header except IndexError: warnings.warn(sfile+' is corrupt') break # sys.stdout.flush() Date.append(Sfile_util.readheader(sfile).time) if Sfile_util.readheader(sfile).time == UTCDateTime(0): raise ValueError(sfile+' has 0 date') if not np.isnan(Sfile_util.readheader(sfile).Mag_1) and\ Sfile_util.readheader(sfile).Mag_1_type=='L': Mag_in.append(Sfile_util.readheader(sfile).Mag_1) Magnitude=np.nan if Sfile_util.readheader(sfile).Mag_1_type=='L' and\ Sfile_util.readheader(sfile).ev_id != 'E': Magnitude=(event_magnitude(sfile)[0]) if not np.isnan(Magnitude): Mag_out.append(Magnitude) Mag_clipped.append(Sfile_util.readheader(sfile).Mag_1) Event_info.append([Magnitude, Sfile_util.readheader(sfile).Mag_1, \ Sfile_util.readheader(sfile).time, \ (Sfile_util.readheader(sfile).latitude, \ Sfile_util.readheader(sfile).longitude, \ Sfile_util.readheader(sfile).depth)]) if plot: import matplotlib.pyplot as plt try: fig, ax1 = plt.subplots() # Plot histogram bins=np.arange(-1,7,0.2) n, bins, patches=ax1.hist(Mag_in,bins, facecolor='Black', \ alpha=0.5, label='Previous, n='+str(len(Mag_in))) n, bins, patches=ax1.hist(Mag_clipped,bins, facecolor='Black', \ alpha=0.7, label='Previous, n='+str(len(Mag_clipped))) n, bins, patches=ax1.hist(Mag_out, bins, facecolor='Red', \ alpha=0.7, label='Recalculated, n='+str(len(Mag_out))) plt.legend() ax1.set_ylabel('Number of events') ax1.set_ylim([0, max(n)+0.5*max(n)]) plt.xlabel('Local Magnitude $M_L$') Mag_out=np.sort(Mag_out) cdf=np.arange(len(Mag_out))/float(len(Mag_out)) # normalized, useful in a mo cdf=((cdf*-1.0)+1.0)*len(Mag_out) ax2 = ax1.twinx() ax2.plot(Mag_out,np.log10(cdf), 'r', linewidth=2.0, label='Recalculated') Mag_in=np.sort(Mag_in) cdf=np.arange(len(Mag_in))/float(len(Mag_in)) # normalized, useful in a mo cdf=((cdf*-1.0)+1.0)*len(Mag_in) ax2.plot(Mag_in,np.log10(cdf), 'k', linewidth=2.0, label='Previous') ax2.set_ylabel('$Log_{10}$ of cumulative density') plt.show() return Event_info except (AttributeError): print '\nError plotting' return Event_info else: return Event_info