def getMajor(curr_Mc,**kwargs): """ :param curr_Mc: low-cut magnitude :return: - aCluster: list of clusters each cluster is a catalog contains a major shock and its offsprings 'sel_p','sel_c' is used to select related NND data in aNND - aNND: nearest neighbor distance 'aEqID_p': parent's ID of the pair 'aEqID_c': child's ID of the pair """ import numpy as np import os # ------------------------------my modules-------------------------------------- import sys sys.path.append('/auto/home/lhuang/PycharmProjects/clustering-analysis-master') import src.data_utils as data_utils eqCat = EqCat() # original catalog eqCatMc = EqCat() # this catalog will be modified with each Mc iteration # =================================1============================================== # dir, file, params # ================================================================================ data_dir = './data' # Todo: .. or . file_in = 'hs_1981_2018_all.mat' eqCat.loadMatBin(os.path.join(data_dir, file_in)) eqCat.toCart_coordinates() eqCatMc.copy(eqCat) if 'min' in kwargs.keys(): min = kwargs['min'] else: min = 6.0 if 'max' in kwargs.keys(): max = kwargs['max'] else: max = None eqCatMc.selectEvents(min,max,'Mag') # load nearest neighbor distances NND_file = './data/%s_NND_Mc_%.1f_HD.mat' % (file_in.split('.')[0], curr_Mc)# Todo: .. or . dNND = data_utils.loadmat(NND_file) # , struct_as_record=True) aCluster = np.array([]) for i in list(range(eqCatMc.size())): cat = EqCat() cat.copy(eqCat) sel_c = dNND['aEqID_p'] == eqCatMc.data['N'][i] sel_p = dNND['aEqID_c'] == eqCatMc.data['N'][i] sel = np.logical_or(sel_p,sel_c) cat.selEventsFromID(dNND['aEqID_c'][sel],repeats=True) cat.data['sel_p'] = sel_p cat.data['sel_c'] = sel_c aCluster=np.append(aCluster,cat) print("major earthquake:%.1f"%cat.data['Time'][0],"mag:",cat.data['Mag'][0]) print("Total Ms: %d" % aCluster.shape[0]) return aCluster,dNND
'aMc': np.array([3.1, 3.2, 3.3, 3.4, 3.6, 3.7, 3.8, 3.9]), #np.array([3.0, 4.0]),np.array( [2.0, 2.5, 3.0, 3.5] # fractal dimension and b for eq. (1) 'D': 1.6, #1.6 TODO: - these values should be contrained independently 'b': 1.0, #=================plotting============== 'eta_binsize': .3, 'xmin': -13, 'xmax': 0, } #=================================2============================================== # load data, select events #================================================================================ eqCat.loadMatBin(os.path.join(dir_in, file_in)) print('total no. of events', eqCat.size()) eqCat.selectEvents(dPar['aMc'][0], None, 'Mag') #eqCat.selector( tmin, tmax, 'Time') print('no. of events after initial selection', eqCat.size()) #=================================1============================================== # to cartesian coordinates #================================================================================ # two ways to do the distance comp: 1 project into equal distance azimuthal , comp Cartersian distance in 3D # 2 get surface distance from lon, lat (haversine), use pythagoras to include depth eqCat.toCart_coordinates(projection='aeqd') for dPar['Mc'] in dPar['aMc']: print('-------------- current Mc:', dPar['Mc'], '---------------------') # select magnitude range eqCat.selectEvents(dPar['Mc'], None, 'Mag')
# dir, file, params #================================================================================ data_dir = 'data' plot_dir = 'plots' file_in = 'hs_1981_2011_all.mat' dPar = { 'a_Mc' : np.array([3.0, 4.0]), #np.array( [2.0, 2.5, 3.0, 3.5]), #separate clustered and background 'eta_0' : -5.0, # run 2_eta_0.py and # if file exists: default = load this value from ASCII file } #=================================2============================================== # load data, select events #================================================================================ eqCat.loadMatBin( os.path.join( data_dir, file_in)) print( 'total no. of events', eqCat.size()) eqCat.selectEvents( dPar['a_Mc'][0], None, 'Mag') #eqCat.selectEvents( tmin, tmax, 'Time') print( 'no. of events after initial selection', eqCat.size()) iMc = 0 for f_Mc in dPar['a_Mc']: eta_0_file = '%s/%s_Mc_%.1f_eta_0.txt'%(data_dir, file_in, f_Mc) # load eta_0 value if os.path.isfile( eta_0_file): print( 'load eta_0 from file'), f_eta_0 = np.loadtxt( eta_0_file, dtype = float) print( f_eta_0) else: print( 'could not find eta_0 file', eta_0_file, 'use value from dPar', dPar['eta_0'])
#print dir( dataUtils) #=================================1============================================== # dir, file, params #================================================================================ dir_in = 'data' file_in = 'hs_1981_2011_all.mat' #xmin, xmax = -122, -114 #ymin, ymax = 34, 38 Mmin, Mmax = 3, None tmin, tmax = 1990, 2018 #=================================2============================================== # load data, select events #================================================================================ os.chdir(dir_in) eqCat.loadMatBin(file_in) print('total no. of events', eqCat.size()) eqCat.selectEvents(Mmin, Mmax, 'Mag') eqCat.selectEvents(tmin, tmax, 'Time') print('no. of events after initial selection', eqCat.size()) #=================================3============================================== # test plot TODO: use basemap #================================================================================ plt.figure() plt.scatter(eqCat.data['Lon'], eqCat.data['Lat'], s=np.exp(eqCat.data['Mag']), c=eqCat.data['Mag'], linewidth=0) plt.savefig(file_in.replace('mat', 'png')) plt.show()
#=================================1============================================== # load data, select events #================================================================================ curr_Mc = dPar['aMc'][0] klist = [20, 20, 20, 20, 20] #150 minklst = [50] #80, , namelist = ['Baja', 'Northridge', 'HectorMine', 'Landers', 'JoshuaTree'] # for name, k in zip(namelist, klist): # load nearest neighbor distances NND_file = 'data/NND_%s.mat' % name dNND = data_utils.loadmat(NND_file) #, struct_as_record=True) #load Landers/Joshua Tree earthquake catalog cat_file = 'data/cat_%s.mat' % name eqCat.loadMatBin(cat_file) # ==================================2============================================= # compute space-time-magnitude distance, histogram # ================================================================================ # select only the clustering event pairs sel_cl = np.log10(dNND['aNND']) <= -5 #-4.7 # HD HD = dNND['aHD'][sel_cl] print('catalog size: ', len(HD)) #print('*************************', HD) ## l #l = 10 ** (-3.22 + 0.69 * eqCat.data['Mag'][0]) / 2 # unit: given by wells and coppersman #l=10**(0.44*eqCat.data['Mag'][0]-2) # given by AGU paper #L = 1.99*10 ** (0.49 * eqCat.data['Mag'][0]-3)# calc by myself according to AGU ## Rl
from src.seis_utils import * #import src.dataIO_utils #------------------------------my modules-------------------------------------- from src.EqCat import * eqCat = EqCat() #=================================1============================================== # dir, file, params #================================================================================ dir_in = 'data' file_in = 'hs_1981_2018_all.txt' #file_in = 'test_HS.txt' #=================================2============================================== # load data #================================================================================ os.chdir(dir_in) eqCat.loadEqCat(file_in, 'HS_reloc', removeColumn=[24, 25, 26]) print(eqCat.size()) print(sorted(eqCat.data.keys())) #=================================3============================================== # test plot and save to .mat binary #================================================================================ eqCat.saveMatBin(file_in.replace('txt', 'mat')) newEqCat = EqCat() newEqCat.loadMatBin(file_in.replace('txt', 'mat')) print(newEqCat.size()) print(sorted(newEqCat.data.keys()))
def getLanders(): eqCat = EqCat() # original catalog eqCatMc = EqCat() # this catalog will be modified with each Mc iteration catLanders = EqCat() catCoso = EqCat() # =================================1============================================== # dir, file, params # ================================================================================ data_dir = '../data' plot_dir = '../plots' file_in = 'hs_1981_2011_all.mat' dPar = { 'a_Mc': np.array([3.0, 4.0]), # np.array( [2.0, 2.5, 3.0, 3.5]), # separate clustered and background 'eta_0': -5.0, 'testPlot': True, 'D': 1.6, # 1.6 TODO: - these values should be contrained independently 'b': 1.0, } curr_Mc = dPar['a_Mc'][0] # =================================2============================================== # load data, select events # ================================================================================ eqCat.loadMatBin(os.path.join(data_dir, file_in)) print('total no. of events', eqCat.size()) eqCat.selectEvents(curr_Mc, None, 'Mag') #eqCat.toCart_coordinates() # eqCat.selector( tmin, tmax, 'Time') print('no. of events after initial selection', eqCat.size()) catLanders.copy(eqCat) catCoso.copy(eqCat) catLanders.selectEvents(7.0, None, 'Mag') catLanders.selectEvents(34, 35, 'Lat') catLanders.selectEvents(-117, -116, 'Lon') catLanders.selectEvents(1992, 1993, 'Time') #print("===========Landers Info============\n",catLanders.data) catCoso.selectEvents(catLanders.data['Time'][0], catLanders.data['Time'][0] + 0.1, 'Time') catCoso.selectEvents(-118.5, -117, 'Lon') catCoso.selectEvents(35.5, 36.5, 'Lat') #print("===========Coso Info===============\nLon\tLat\tMag\tTime\t") #for lon,lat,mag,time in zip(catCoso.data['Lon'],catCoso.data['Lat'],catCoso.data['Mag'],catCoso.data['Time']): # print("%.3f\t%.3f\t%.2f\t%.8f\t"%(lon,lat,mag,time)) aEta = np.array([]) aT = np.array([]) aR = np.array([]) catAll = EqCat() catAll.merge(catLanders, catCoso) catAll.toCart_coordinates() #print(catAll.data) for x, y, time in zip(catAll.data['X'][1:], catAll.data['Y'][1:], catAll.data['Time'][1:]): x = catAll.data['X'][0] y = catAll.data['Y'][0] + 3 t = (time - catAll.data['Time'][0]) * 10**(-dPar['b'] * catAll.data['Mag'][0] / 2) print("distance:", ((x - catAll.data['X'][0])**2 + (y - catAll.data['Y'][0])**2)**0.5) r = ((x - catAll.data['X'][0])**2 + (y - catAll.data['Y'][0])**2)**( dPar['D'] / 2) * 10**(-dPar['b'] * catAll.data['Mag'][0] / 2) eta = r * t aEta = np.append(aEta, np.log10(eta)) aT = np.append(aT, np.log10(t)) aR = np.append(aR, np.log10(r)) print("===========Neareast Neighbor Distance===============\nr\tt\teta\t") for r, t, eta in zip( aR, aT, aEta ): #(catCoso.data['Time']-catLanders.data['Time'][0])*365.25*24*3600 print("%f\t%f\t%f" % (r, t, eta)) return catAll, aT, aR, aEta
# fractal dimension and b for eq. (1) 'D': 1.6, #1.6 TODO: - these values should be constrained independently 'b': 1.0, #=================plotting============== 'eta_binsize': .2, 'nnd_binsize': .2, 'hd_binsize': .2, 'xmin': 0, 'xmax': 1.0, 'eta_0': -5.0, } #=================================1============================================== # load data, select events #================================================================================ eqCat.loadMatBin(os.path.join('data', file_in)) curr_Mc = dPar['aMc'][0] klist = [30, 30, 30] #150 minklst = [50] #80, , namelist = [ 'Northridge', ] #'Baja','HectorMine','Landers' for name, k in zip(namelist, klist): # load nearest neighbor distances NND_file = 'data/NND_%s.mat' % name dNND = data_utils.loadmat(NND_file) #, struct_as_record=True) #load Landers/Joshua Tree earthquake catalog cat_file = 'data/cat_%s.mat' % name