Пример #1
0
def getMajor(curr_Mc,**kwargs):
    """
    :param curr_Mc: low-cut magnitude
    :return:
        - aCluster: list of clusters
          each cluster is a catalog contains a major shock and its offsprings
          'sel_p','sel_c' is used to select related NND data in aNND
        - aNND: nearest neighbor distance
          'aEqID_p': parent's ID of the pair
          'aEqID_c': child's ID of the pair
    """
    import numpy as np
    import os

    # ------------------------------my modules--------------------------------------
    import sys
    sys.path.append('/auto/home/lhuang/PycharmProjects/clustering-analysis-master')
    import src.data_utils as data_utils


    eqCat = EqCat()  # original catalog
    eqCatMc = EqCat()  # this catalog will be modified with each Mc iteration

    # =================================1==============================================
    #                            dir, file, params
    # ================================================================================
    data_dir = './data' # Todo: .. or .
    file_in = 'hs_1981_2018_all.mat'

    eqCat.loadMatBin(os.path.join(data_dir, file_in))
    eqCat.toCart_coordinates()
    eqCatMc.copy(eqCat)
    if 'min' in kwargs.keys():
        min = kwargs['min']
    else:
        min = 6.0
    if 'max' in kwargs.keys():
        max = kwargs['max']
    else:
        max = None
    eqCatMc.selectEvents(min,max,'Mag')

    # load nearest neighbor distances
    NND_file = './data/%s_NND_Mc_%.1f_HD.mat' % (file_in.split('.')[0], curr_Mc)# Todo: .. or .
    dNND = data_utils.loadmat(NND_file)  # ,  struct_as_record=True)

    aCluster = np.array([])
    for i in list(range(eqCatMc.size())):
        cat = EqCat()
        cat.copy(eqCat)
        sel_c = dNND['aEqID_p'] == eqCatMc.data['N'][i]
        sel_p = dNND['aEqID_c'] == eqCatMc.data['N'][i]
        sel = np.logical_or(sel_p,sel_c)
        cat.selEventsFromID(dNND['aEqID_c'][sel],repeats=True)
        cat.data['sel_p'] = sel_p
        cat.data['sel_c'] = sel_c
        aCluster=np.append(aCluster,cat)
        print("major earthquake:%.1f"%cat.data['Time'][0],"mag:",cat.data['Mag'][0])
    print("Total Ms: %d" % aCluster.shape[0])
    return aCluster,dNND
Пример #2
0
#eqCat.selectEvents( tmin, tmax, 'Time')
print( 'no. of events after initial selection', eqCat.size())

iMc = 0
for f_Mc in dPar['a_Mc']:
    eta_0_file = '%s/%s_Mc_%.1f_eta_0.txt'%(data_dir, file_in, f_Mc)
    # load eta_0 value
    if os.path.isfile( eta_0_file):
        print( 'load eta_0 from file'),
        f_eta_0 = np.loadtxt( eta_0_file, dtype = float)
        print( f_eta_0)
    else:
        print( 'could not find eta_0 file', eta_0_file, 'use value from dPar', dPar['eta_0'])
        f_eta_0 = dPar['eta_0']
    # cut below current completeness
    eqCatMc.copy( eqCat)
    eqCatMc.selectEvents( f_Mc, None, 'Mag')
    print( 'current catalog size: ',eqCatMc.size())
    # load nearest neighbor distances
    NND_file = '%s_NND_Mc_%.1f.mat'%(os.path.basename( file_in).split('.')[0], f_Mc)
    dNND = dataIO.loadmat( os.path.join( data_dir, NND_file))
    print dNND.keys()
    dNND['aNND'] = np.log10( dNND['aNND'])
    #==================================3=============================================
    #                          "declustering" step
    #================================================================================  
    #catChild, catPar = create_parent_child_cat( projCat, dNND)
    catChild.copy( eqCat)
    catParent.copy( eqCat)
    catChild.selEventsFromID( dNND['aEqID_c'], eqCatMc, repeats = True)
    catParent.selEventsFromID( dNND['aEqID_p'], eqCatMc, repeats = True)
Пример #3
0
#                            load data, select events
#================================================================================
eqCat.loadMatBin(os.path.join(data_dir, file_in))
print('total no. of events', eqCat.size())
eqCat.selectEvents(dPar['a_Mc'][0], None, 'Mag')
#eqCat.selectEvents( tmin, tmax, 'Time')
print('no. of events after initial selection', eqCat.size())

iMc = 0
for f_Mc in dPar['a_Mc']:
    # load file with IDs of events within family
    clust_file = file_in.replace('all.mat', 'Mc_%.1f_clusters.mat' % (f_Mc))
    dClust = data_utils.loadmat(os.path.join(data_dir, clust_file), )

    # cut below current completeness
    eqCatMc.copy(eqCat)
    eqCatMc.selectEvents(f_Mc, None, 'Mag')
    n_aboveMc = eqCatMc.size()
    print('current catalog size: ', eqCatMc.size())

    #=================================1==========================================================================
    #                     singles are counted as MS with 0 AS
    #============================================================================================================
    print 'total number of clusters', len(
        dClust.keys()), 'no. of BG events', dClust['0'].shape[0]
    a_ID_single = dClust['0']

    # IDs of BG events
    a_iSel = np.zeros(eqCatMc.size(), dtype=int)
    a_mag_single = np.zeros(len(a_ID_single))
    a_N_AS_single = np.zeros(len(a_ID_single))
Пример #4
0
          }

#================================================================================
#                      load data, event selection
#================================================================================
eqCat.loadMatBin(  os.path.join( dir_in, file_in))
print( 'total no. of events', eqCat.size())
eqCat.selectEvents( dPar['aMc'][0], None, 'Mag')
#eqCat.selectEvents( tmin, tmax, 'Time')
print( 'no. of events after initial selection', eqCat.size())
# project to equi-distant coordiante system for cartesian distances
eqCat.toCart_coordinates( projection = 'eqdc')#'eqdc')
for f_Mc in dPar['aMc']:
    print( '-------------- current Mc:', f_Mc, '---------------------')
    # select magnitude range
    eqCatMc.copy( eqCat)
    eqCatMc.selectEvents( f_Mc, None, 'Mag')
    print( 'catalog size after MAG selection', eqCat.size())
    # this dictionary is used in module: clustering
    dConst = {'Mc' : f_Mc,
               'b' : dPar['b'],
               'D' : dPar['D']}

    #=============================2===================================================
    #                    randomize catalog
    #=================================================================================
    a_Eta_0 = np.zeros( dPar['nBoot'])
    for i_Bs in range( dPar['nBoot']):

        ranCat.copy( eqCatMc)
        ranCat.data['X']     = np.random.uniform( eqCatMc.data['X'].min(), eqCatMc.data['X'].max(), size = eqCatMc.size())
Пример #5
0
def getLanders():
    eqCat = EqCat()  # original catalog
    eqCatMc = EqCat()  # this catalog will be modified with each Mc iteration
    catLanders = EqCat()
    catCoso = EqCat()

    # =================================1==============================================
    #                            dir, file, params
    # ================================================================================
    data_dir = '../data'
    plot_dir = '../plots'
    file_in = 'hs_1981_2011_all.mat'

    dPar = {
        'a_Mc': np.array([3.0, 4.0]),  # np.array( [2.0, 2.5, 3.0, 3.5]),
        # separate clustered and background
        'eta_0': -5.0,
        'testPlot': True,
        'D':
        1.6,  # 1.6  TODO: - these values should be contrained independently
        'b': 1.0,
    }

    curr_Mc = dPar['a_Mc'][0]
    # =================================2==============================================
    #                            load data, select events
    # ================================================================================
    eqCat.loadMatBin(os.path.join(data_dir, file_in))
    print('total no. of events', eqCat.size())
    eqCat.selectEvents(curr_Mc, None, 'Mag')
    #eqCat.toCart_coordinates()
    # eqCat.selector( tmin, tmax, 'Time')
    print('no. of events after initial selection', eqCat.size())

    catLanders.copy(eqCat)
    catCoso.copy(eqCat)

    catLanders.selectEvents(7.0, None, 'Mag')
    catLanders.selectEvents(34, 35, 'Lat')
    catLanders.selectEvents(-117, -116, 'Lon')
    catLanders.selectEvents(1992, 1993, 'Time')
    #print("===========Landers Info============\n",catLanders.data)

    catCoso.selectEvents(catLanders.data['Time'][0],
                         catLanders.data['Time'][0] + 0.1, 'Time')
    catCoso.selectEvents(-118.5, -117, 'Lon')
    catCoso.selectEvents(35.5, 36.5, 'Lat')
    #print("===========Coso Info===============\nLon\tLat\tMag\tTime\t")
    #for lon,lat,mag,time in zip(catCoso.data['Lon'],catCoso.data['Lat'],catCoso.data['Mag'],catCoso.data['Time']):
    #    print("%.3f\t%.3f\t%.2f\t%.8f\t"%(lon,lat,mag,time))

    aEta = np.array([])
    aT = np.array([])
    aR = np.array([])

    catAll = EqCat()
    catAll.merge(catLanders, catCoso)
    catAll.toCart_coordinates()

    #print(catAll.data)

    for x, y, time in zip(catAll.data['X'][1:], catAll.data['Y'][1:],
                          catAll.data['Time'][1:]):
        x = catAll.data['X'][0]
        y = catAll.data['Y'][0] + 3
        t = (time - catAll.data['Time'][0]) * 10**(-dPar['b'] *
                                                   catAll.data['Mag'][0] / 2)
        print("distance:", ((x - catAll.data['X'][0])**2 +
                            (y - catAll.data['Y'][0])**2)**0.5)
        r = ((x - catAll.data['X'][0])**2 + (y - catAll.data['Y'][0])**2)**(
            dPar['D'] / 2) * 10**(-dPar['b'] * catAll.data['Mag'][0] / 2)
        eta = r * t
        aEta = np.append(aEta, np.log10(eta))
        aT = np.append(aT, np.log10(t))
        aR = np.append(aR, np.log10(r))
    print("===========Neareast Neighbor Distance===============\nr\tt\teta\t")
    for r, t, eta in zip(
            aR, aT, aEta
    ):  #(catCoso.data['Time']-catLanders.data['Time'][0])*365.25*24*3600
        print("%f\t%f\t%f" % (r, t, eta))

    return catAll, aT, aR, aEta
        # NND_file = 'data/%s_NND_Mc_%.1f_HD.mat'%( file_in.split('.')[0], curr_Mc)
        dNND = data_utils.loadmat(NND_file)  # ,  struct_as_record=True)

        # load Landers/Joshua Tree earthquake catalog
        cat_file = 'data/cat_%s.mat' % name
        eqCat.loadMatBin(cat_file)
        # ================================================================================
        #                       calculate interval time for each pair
        # ================================================================================
        adTime = eqCat.data['Time'][1:]-eqCat.data['Time'][0]
        aNND = dNND['aNND']
        sel_cluster = np.log10(dNND['aNND']) <= -5
        aClTime = adTime[sel_cluster]
        adNND = aNND[sel_cluster] + 5
        cluster = EqCat()
        cluster.copy(eqCat)
        print('all clustering aftershocks:',len(aClTime))
        if len(aClTime)<=200:
            print("***************************************************")
            print("%s, insufficient offsprings." % name)
        else:
            print("***************************************************")
            print("****%s, sufficient offsprings." % name)
            sel_time = aClTime <= 3/365.25
            sel_time1 = aClTime > 3/365.25
            aClTime = aClTime[sel_time]
            aClTime *= 365.25*24 # --> hr
            print('min time interval:',min(aClTime),'max time interval:' ,max(aClTime))
            print('earthquakes within 72h:',len(aClTime))
            # ================================================================================
            #                 plot map