def getMajor(curr_Mc,**kwargs): """ :param curr_Mc: low-cut magnitude :return: - aCluster: list of clusters each cluster is a catalog contains a major shock and its offsprings 'sel_p','sel_c' is used to select related NND data in aNND - aNND: nearest neighbor distance 'aEqID_p': parent's ID of the pair 'aEqID_c': child's ID of the pair """ import numpy as np import os # ------------------------------my modules-------------------------------------- import sys sys.path.append('/auto/home/lhuang/PycharmProjects/clustering-analysis-master') import src.data_utils as data_utils eqCat = EqCat() # original catalog eqCatMc = EqCat() # this catalog will be modified with each Mc iteration # =================================1============================================== # dir, file, params # ================================================================================ data_dir = './data' # Todo: .. or . file_in = 'hs_1981_2018_all.mat' eqCat.loadMatBin(os.path.join(data_dir, file_in)) eqCat.toCart_coordinates() eqCatMc.copy(eqCat) if 'min' in kwargs.keys(): min = kwargs['min'] else: min = 6.0 if 'max' in kwargs.keys(): max = kwargs['max'] else: max = None eqCatMc.selectEvents(min,max,'Mag') # load nearest neighbor distances NND_file = './data/%s_NND_Mc_%.1f_HD.mat' % (file_in.split('.')[0], curr_Mc)# Todo: .. or . dNND = data_utils.loadmat(NND_file) # , struct_as_record=True) aCluster = np.array([]) for i in list(range(eqCatMc.size())): cat = EqCat() cat.copy(eqCat) sel_c = dNND['aEqID_p'] == eqCatMc.data['N'][i] sel_p = dNND['aEqID_c'] == eqCatMc.data['N'][i] sel = np.logical_or(sel_p,sel_c) cat.selEventsFromID(dNND['aEqID_c'][sel],repeats=True) cat.data['sel_p'] = sel_p cat.data['sel_c'] = sel_c aCluster=np.append(aCluster,cat) print("major earthquake:%.1f"%cat.data['Time'][0],"mag:",cat.data['Mag'][0]) print("Total Ms: %d" % aCluster.shape[0]) return aCluster,dNND
eta_0_file = '%s/%s_Mc_%.1f_eta_0.txt'%(data_dir, file_in, f_Mc) # load eta_0 value if os.path.isfile( eta_0_file): print( 'load eta_0 from file'), f_eta_0 = np.loadtxt( eta_0_file, dtype = float) print( f_eta_0) else: print( 'could not find eta_0 file', eta_0_file, 'use value from dPar', dPar['eta_0']) f_eta_0 = dPar['eta_0'] # cut below current completeness eqCatMc.copy( eqCat) eqCatMc.selectEvents( f_Mc, None, 'Mag') print( 'current catalog size: ',eqCatMc.size()) # load nearest neighbor distances NND_file = '%s_NND_Mc_%.1f.mat'%(os.path.basename( file_in).split('.')[0], f_Mc) dNND = dataIO.loadmat( os.path.join( data_dir, NND_file)) print dNND.keys() dNND['aNND'] = np.log10( dNND['aNND']) #==================================3============================================= # "declustering" step #================================================================================ #catChild, catPar = create_parent_child_cat( projCat, dNND) catChild.copy( eqCat) catParent.copy( eqCat) catChild.selEventsFromID( dNND['aEqID_c'], eqCatMc, repeats = True) catParent.selEventsFromID( dNND['aEqID_p'], eqCatMc, repeats = True) print 'tot. ev', eqCatMc.size(), 'parents', np.unique( catParent.data['N']).shape[0], 'children', np.unique( catChild.data['N']).shape[0] #==================================4============================================= # spanning tree #================================================================================ plt.figure( 1)
plot_file = 'plots/k_r_distr_hist.png' #TODO:'plots/k_r_distr_rate_smoothed.png','plots/k_r_distr_rate.png' dPar = {'Mc':2.5, 'minMag':4.0, # minimum magnitude of mainshocks 'maxMag':8.0, # maximum magnitude of mainshocks 'k': 2, 'HD_binsize':0.1, 'lambda_binsize':0.08, # fitting coefficient 'n':-1.35, 'c':1.0, 'q':0.35, 'd':1.2,'gamma':0.6, } # =================================1============================================== # load catalog and select # ================================================================================ dCluster = data_utils.loadmat(input_file) a_MS_mag = dCluster['a_MS_mag'] a_AS_dist = dCluster['a_AS_dist'] ### use different colors to discriminate different sets index = np.arange(0,len(a_MS_mag),1) norm=co.Normalize(vmin=0,vmax=len(a_MS_mag)-1) cmap=plt.cm.RdYlGn pointcolors = plt.cm.ScalarMappable(norm, cmap) cols = pointcolors.to_rgba(index) plt.figure() ax = plt.subplot() #index=[6] # for test, only plot one set for i in index:
} #=================================2============================================== # load data, select events #================================================================================ eqCat.loadMatBin(os.path.join(data_dir, file_in)) print('total no. of events', eqCat.size()) eqCat.selectEvents(dPar['a_Mc'][0], None, 'Mag') #eqCat.selectEvents( tmin, tmax, 'Time') print('no. of events after initial selection', eqCat.size()) iMc = 0 for f_Mc in dPar['a_Mc']: # load file with IDs of events within family clust_file = file_in.replace('all.mat', 'Mc_%.1f_clusters.mat' % (f_Mc)) dClust = data_utils.loadmat(os.path.join(data_dir, clust_file), ) # cut below current completeness eqCatMc.copy(eqCat) eqCatMc.selectEvents(f_Mc, None, 'Mag') n_aboveMc = eqCatMc.size() print('current catalog size: ', eqCatMc.size()) #=================================1========================================================================== # singles are counted as MS with 0 AS #============================================================================================================ print 'total number of clusters', len( dClust.keys()), 'no. of BG events', dClust['0'].shape[0] a_ID_single = dClust['0'] # IDs of BG events
# fractal dimension and b for eq. (1) 'D': 1.6, #1.6 TODO: - these values should be constrained independently 'b': 1.0, #=================plotting============== 'eta_binsize': .2, 'nnd_binsize': .2, 'hd_binsize': .2, 'xmin': 0, 'xmax': 1.0, 'eta_0': -5.0, } #=================================1============================================== # load data, select events #================================================================================ asSets = data_utils.loadmat('data/stack_events_2.5_7_atep1.mat') a_MS_mag = asSets['a_MS_mag'] a_AS_dist = asSets['a_AS_dist'] i = 0 for f_MSmag, dist in zip(a_MS_mag, a_AS_dist): # ==================================3============================================= # plot distance decay # ================================================================================ name = '<m>=%.2f' % f_MSmag fig1 = plt.figure(1) ax1 = plt.subplot(111) ax1.set_title(name) for k in [20, 50, 100, 200]:
eqCat.selectEvents(dPar['aMc'][0], None, 'Mag') #eqCat.selector( tmin, tmax, 'Time') print( 'no. of events after initial selection', eqCat.size()) # cut below current completeness ################################################# curr_Mc=dPar['aMc'][0] k=dPar['k'][2] # cut below current completeness eqCatMc.copy(eqCat) eqCatMc.selectEvents(curr_Mc, None, 'Mag') print('current catalog size: ', eqCatMc.size()) # load nearest neighbor distances NND_file = 'data/%s_NND_Mc_%.1f_HD.mat' % (file_in.split('.')[0], curr_Mc) dNND = data_utils.loadmat(NND_file) # , struct_as_record=True) # ================================================================================ # all event pairs # ================================================================================ catChild.copy(eqCatMc) catParent.copy(eqCatMc) # catChild, catPar = create_parent_child_cat( projCat, dNND) catChild.selEventsFromID(dNND['aEqID_c'], repeats=True) catParent.selEventsFromID(dNND['aEqID_p'], repeats=True) print('before::: size of parent catalog', catParent.size(), 'size of offspring cat', catChild.size()) # ================================================================================ # bigger parent event pairs # ================================================================================ # select event pairs with parent event larger than M_pt
eqcat = EqCat() mins = np.arange(3.5,7.0,0.5) # TODO: min 2.6, 3.0, 3.5 maxs = mins + 0.5 a_MS_mag = [] # average mainshock magnitude of each set a_AS_dist = [] # aftershock haversine distance distribution of each set # load earthquake set, mainshocks [min,max] for min,max in zip(mins,maxs): aCluster, dNND = getMajor(dPar['Mc'], min=min, max=max) aMag = np.zeros(len(aCluster)) aDist = np.array([]) sel = aMag >= 0 for i,cluster in enumerate(aCluster): aMag[i] = cluster.data['Mag'][0] sel_c = cluster.data['sel_c'] # offspring aNND = dNND['aNND'][sel_c] aHD = dNND['aHD'][sel_c] sel_cl = np.log10(aNND) <= -5 aHD = aHD[sel_cl] aDist = np.concatenate((aDist,aHD),axis=0) a_MS_mag.append(np.mean(aMag)) a_AS_dist.append(aDist) scipy.io.savemat('data/stack_events.mat',{'a_MS_mag':a_MS_mag,'a_AS_dist':a_AS_dist},do_compression = True) dCluster = data_utils.loadmat('data/stack_events.mat') print(dCluster['a_MS_mag']) print(dCluster['a_AS_dist'])