def ffpscatter(passed_staev, all_events=False): """ Plot dT and observed A for all the stations in one event """ if not all_events: for i in range(len(passed_staev)): for j in range(len(passed_staev[i])): plt.plot(passed_staev[i][j][5], passed_staev[i][j][2], 'ko') plt.show() else: ls_all_passed_band = [] ls_all_passed_dt = [] for k in range(len(passed_staev)): for i in range(len(passed_staev[k])): for j in range(len(passed_staev[k][i])): plt.plot(passed_staev[k][i][j][5], passed_staev[k][i][j][2], 'ko') ls_all_passed_band.append(passed_staev[k][i][j][5]) ls_all_passed_dt.append(passed_staev[k][i][j][2]) plt.ion() import py2mat_mod py2mat_mod.py2mat(ls_all_passed_band, 'dispersion_all_passed_band', 'dispersion_all_passed_band') py2mat_mod.py2mat(ls_all_passed_dt, 'dispersion_all_passed_dt', 'dispersion_all_passed_dt') plt.show()
# [0] in all_passed_staev[j][0] shows the current band # in general we have a loop over bands and in each step there is just # one band that we are working with which is accessible by [0] for k in range(len(all_passed_staev[j][0])): if not all_passed_staev[j][0][k] == []: if not nr_cc: t_shift_array.append(all_passed_staev[j][0][k][2]) else: # keep the name as t_shift_array to not change the whole script! # However, if nr_cc is selected, it will be number of stations vs # cross correlation coefficient t_shift_array.append(all_passed_staev[j][0][k][4]) print 'Length of all passed data: %s' % len(t_shift_array) import py2mat_mod py2mat_mod.py2mat(t_shift_array, 't_shift_array_%s' % bands[i], 't_shift_array_%s' % bands[i]) nr_dt(t_shift_array, num_bands=len(bands), enum=i, leg=str(band_period[str(bands[i])]) + 's', line_plot=line_plot) if nr_cc: #Pdiff #plt.vlines(x=0.8, ymin=0.0, ymax=80000, lw=2, linestyle='--') #plt.xlim(-1.1, 1.1) #plt.ylim(ymax=80000) plt.vlines(x=0.8, ymin=0.0, ymax=215000, lw=2, linestyle='--') plt.xlim(-1.1, 1.1) plt.ylim(ymax=10000) plt.xlabel('xcorrelation factor', fontsize = 'xx-large', weight = 'bold') plt.ylabel('nr of data', fontsize = 'xx-large', weight = 'bold') plt.xticks(np.arange(-1.0, 1.1, 0.2), fontsize = 'xx-large', weight = 'bold') plt.yticks(fontsize = 'xx-large', weight = 'bold')
# ------------------ IMPORT --------------------------------- import numpy as np from py2mat_mod import py2mat # ------------------- INPUT --------------------------------- ell_corr_file ='ell_ccor.dataset1_wri_example' # ----------------------------------------------------------- data = np.loadtxt(ell_corr_file, delimiter=',') lat = [] lon = [] rtarg = [] ecorr = [] tau = [] telev = [] for i in range(len(data)): lat.append(data[i][0]) lon.append(data[i][1]) rtarg.append(data[i][2]) ecorr.append(data[i][3]) tau.append(data[i][4]) telev.append(data[i][5]) py2mat(lat, 'lat', 'lat') py2mat(lon, 'lon', 'lon') py2mat(rtarg, 'rtarg', 'rtarg') py2mat(ecorr, 'ecorr', 'ecorr') py2mat(tau, 'tau', 'tau') py2mat(telev, 'telev', 'telev')
counter[str(-int(events[i]['depth']))] += 1 counting += 1 print 'Number of all used events: %s' % counting for i in counter.keys(): #if 100 <= counter[i]: #plt.bar(int(i)-0.5, counter[i], 1, log=True) plt.bar(int(i)-0.5, counter[i], 1) counter_list = [] for i in counter.keys(): counter_list.append([int(i), counter[i]]) counter_list.sort() import py2mat_mod py2mat_mod.py2mat(counter_list, 'counter_list', 'counter_list') plt.ion() plt.xlim(xmin=-2, xmax=100) plt.ylim(ymax=1100) plt.xlabel('Depth (km)', size=36, weight='bold') plt.ylabel('Number of events', size=36, weight='bold') plt.xticks(size=32, weight='bold') plt.yticks(size=32, weight='bold') plt.title('Depth=10km (1002 events) Depth=33km (254 events)\n', size=36, weight='bold') plt.show() # --------------------- TRASH -------------------------- #for i in range(len(events)):
counting += 1 print 'Number of all used events: %s' % counting for i in counter.keys(): #if 100 <= counter[i]: #plt.bar(int(i)-0.5, counter[i], 1, log=True) plt.bar(int(i) - 0.5, counter[i], 1) counter_list = [] for i in counter.keys(): counter_list.append([int(i), counter[i]]) counter_list.sort() import py2mat_mod py2mat_mod.py2mat(counter_list, 'counter_list', 'counter_list') plt.ion() plt.xlim(xmin=-2, xmax=100) plt.ylim(ymax=1100) plt.xlabel('Depth (km)', size=36, weight='bold') plt.ylabel('Number of events', size=36, weight='bold') plt.xticks(size=32, weight='bold') plt.yticks(size=32, weight='bold') plt.title('Depth=10km (1002 events) Depth=33km (254 events)\n', size=36, weight='bold') plt.show() # --------------------- TRASH -------------------------- #for i in range(len(events)):
all_passed_staev.append(passed_staev) t_shift_array = [] for j in range(len(all_passed_staev)): # [0] in all_passed_staev[j][0] shows the current band # in general we have a loop over bands and in each step there is just # one band that we are working with which is accessible by [0] # [6]: epicentral distance for k in range(len(all_passed_staev[j][0])): if not all_passed_staev[j][0][k] == []: t_shift_array.append(all_passed_staev[j][0][k][6]) print 'Length of all passed data: %s' % len(t_shift_array) import py2mat_mod py2mat_mod.py2mat(t_shift_array, 't_shift_array_%s' % bands[i], 'xcorr_epi_t_shift_array_%s' % bands[i]) nr_dt(t_shift_array, num_bands=len(bands), enum=i, leg=str(band_period[str(bands[i])]) + 's') #plt.xlim(97.0, 160.0) plt.xlim(30.0, 90.0) plt.xlabel('Epicentral Distance / degree', fontsize = 'xx-large', weight = 'bold') #plt.ylabel('nr of data', fontsize = 'xx-large', weight = 'bold') plt.ylabel('% of data (xcorr>=0.8)', fontsize = 'xx-large', weight = 'bold') plt.xticks(fontsize = 'xx-large', weight = 'bold') plt.yticks(fontsize = 'xx-large', weight = 'bold') #plt.title(fontsize = 'xx-large', weight = 'bold') #plt.legend(prop={'size':22}) #plt.legend(prop={'size':22}, loc=8) plt.show()
nw_all = np.append(nw_all, len(passed_staev_epi)) for j in range(len(mag_all)): for k in mag_dic: if abs(uf.round_to(mag_all[j], 0.5) - float(k)) < 0.1: mag_dic[k][0] += nw_all[j] mag_dic[k][1] += 1 break mag_sta_list = [] for md in mag_dic: mag_sta_list.append([float(md), mag_dic[md][0], mag_dic[md][1]]) mag_sta_list.sort() import py2mat_mod py2mat_mod.py2mat(mag_sta_list, 'mag_sta_%s' % bands[i], 'mag_sta_%s' % bands[i]) plt.ion() plt.figure() plt.subplot(2, 1, 2) for j in mag_dic: if mag_dic[j][1] > 0.1: plt.bar(left=float(j)-0.05, width=0.1, height=mag_dic[j][0]/mag_dic[j][1]) #plt.bar(left=float(j)-0.05, width=0.1, height=mag_dic[j][0]) plt.xlabel('Magnitude', fontsize='xx-large', weight='bold') plt.ylabel('#waveforms/#events', fontsize='xx-large', weight='bold') plt.xticks(fontsize='xx-large', weight='bold') plt.yticks(fontsize='xx-large', weight='bold') plt.legend() plt.show()
def meanall_ffplot(per, all_dt_mean, all_a_mean, all_tt_single): """ Plot mean dT and observed A for all the stations in all events """ meanall_dt = [] meanall_a = [] for j in range(len(all_dt_mean[0][0])): weight_tt = 0 weight_aa = 0 tt = 0 aa = 0 for i in range(len(all_dt_mean)): tt += all_dt_mean[i][0][j] * all_dt_mean[i][1] weight_tt += all_dt_mean[i][1] # some strange observed A exist... # this is a very simple way and naive to go around it! if not all_a_mean[i][0][j] > 10: aa += all_a_mean[i][0][j] * all_a_mean[i][1] weight_aa += all_a_mean[i][1] else: print 'Error: %s for observed A' % all_a_mean[i][0][j] meanall_dt.append(tt/weight_tt) #meanall_a.append(aa/weight_aa) ### A TEST plt.ion() plt.figure() plt.subplot(1, 1, 1) all_tt_mean = [] all_tt_std = [] for i in range(len(all_tt_single)): all_tt_mean.append(np.mean(all_tt_single[i])) all_tt_std.append(np.std(all_tt_single[i])) plt.xlabel('Dominant Period', fontsize='x-large', weight='bold') x = [2.7, 3.7, 5.3, 7.5, 10.6, 15.0, 21.2, 30.0] plt.xlim(xmin=0.0) plt.vlines(x, 0.0, 1.6, linestyle='--') # !!! change these according to your case! plt.xlim(1.7, 31) plt.ylim(0.0, 1.6) plt.xticks(x, fontsize='x-large', weight='bold') plt.yticks(fontsize='x-large', weight='bold') plt.plot(per, all_tt_mean, lw=3, color='black', label='Mean') plt.plot(per, all_tt_std, lw=3, color='red', label='STD') plt.legend(loc=5, prop={'size': 32}) plt.show() ### FINISH A TEST plt.figure() plt.subplot(1, 1, 1) plt.ylabel('Time difference (dT)', fontsize='x-large', weight='bold') plt.xlabel('Dominant Period', fontsize='x-large', weight='bold') x = [2.7, 3.7, 5.3, 7.5, 10.6, 15.0, 21.2, 30.0] plt.xlim(xmin=0.0) #plt.ylim(ymin=0.25, ymax=0.65) plt.vlines(x, 0.0, 0.65, linestyle='--') plt.ylim(0.0, 0.65) plt.xlim(1.7, 31.) plt.xticks(x, fontsize='x-large', weight='bold') plt.yticks(fontsize='x-large', weight='bold') #pltitle = evname #pltitle += '\nxcorr >= %s' %(xcorr_limit) #pltitle = '#station-event pairs (dT): %s\n' %(weight_tt) #pltitle += '#station-event pairs (A): %s' %(weight_aa) pltitle = '#station-event pairs (dT): %s' % weight_tt plt.title(pltitle, fontsize='x-large', weight='bold') # writing meanall_dt for later usages! import pickle meanall_file = open(os.path.join('.', 'statistics', 'meanall_dt'), 'w') print "start pickling the meanall_file in %s..." % meanall_file pickle.dump(meanall_dt, meanall_file) print "DONE" print '\n\n==========' print 'bands:' print per print 'mean values:' print meanall_dt import py2mat_mod py2mat_mod.py2mat(per, 'dispersion_period', 'dispersion_period') py2mat_mod.py2mat(meanall_dt, 'dispersion_meanall_dt', 'dispersion_meanall_dt') plt.plot(per, meanall_dt, linewidth=3) #plt.subplot(2, 1, 2) #plt.ylabel('Observed Amplitude', fontsize = 'x-large', weight = 'bold') #plt.xlabel('Dominant Period', fontsize = 'x-large', weight = 'bold') #plt.xticks(fontsize = 'x-large', weight = 'bold') #plt.yticks(fontsize = 'x-large', weight = 'bold') #plt.plot(per, meanall_a, linewidth=3) plt.show()