else:
        # ========================================
        #reset time as previous time, reset output paths as previous path name
        #reset cross-correlation dictionaries 
        # ========================================
        print 
        PART_PICKLE = pickle_list[int(res)-1]
        OUTFILESPATH = PART_PICKLE[:-12]
        out_basename = os.path.basename(OUTFILESPATH)
        
        
        print "Opening {} partial file for restart ... ".format(out_basename)

        # re-initialising .part.pickle collection of cross-correlations
        xc = pscrosscorr.load_pickled_xcorr(PART_PICKLE)
        
                    
        for key in xc.keys():
            for key2 in xc[key].keys():
                #help(xc[key][key2])
                #print xc[key][key2].endday
                a=5
                
                
        #most recent day last endday of list
        #read in metadata to find latest time slot. Then assign this to FIRSTDAY
        METADATA_PATH = '{}metadata.pickle'.format(OUTFILESPATH.\
                  replace(os.path.basename(OUTFILESPATH), ""))
      
        metadata = pscrosscorr.load_pickled_xcorr(METADATA_PATH)
Beispiel #2
0
    raise Exception("You must choose one a number betwen {} and {}"\
    .format(1, len(pickle_list)))

else:
    PICKLE_PATH = pickle_list[int(res) - 1]
    OUTFILESPATH = PICKLE_PATH[:-7]
    out_basename = os.path.basename(OUTFILESPATH)
    OUTPATH = os.path.dirname(OUTFILESPATH)
    OUTFOLDERS = os.path.join(OUTPATH, 'XCORR_PLOTS')

    print "\nOpening {} file to process ... ".format(OUTFOLDERS)

    print out_basename
    print "\nOpening {} file to process ... ".format(out_basename)
    # re-initialising .part.pickle collection of cross-correlations
    xc = pscrosscorr.load_pickled_xcorr(PICKLE_PATH)

    # optimizing time-scale: max time = max distance / vmin (vmin = 2.5 km/s)
    maxdist = max([xc[s1][s2].dist() for s1, s2 in xc.pairs()])
    maxt = max(CROSSCORR_TMAX, maxdist / 2.5)

    if plot_distance:
        #for central_freq in central_frequencies:
        #plot distance plot of cross-correlations
        #plot distance plot of cross-correlations

        xc.plot(plot_type='distance', xlim=(-maxt, maxt),
                outfile=out_basename + '_linear'\
                + '.png', showplot=False, stack_type='linear', fill=True)

        xc.plot(plot_type='distance', xlim=(-maxt, maxt),
Beispiel #3
0
                  replace(os.path.basename(OUTFILESPATH), ""))

    else:
        # ========================================
        #reset time as previous time, reset output paths as previous path name
        #reset cross-correlation dictionaries
        # ========================================
        print
        PART_PICKLE = pickle_list[int(res) - 1]
        OUTFILESPATH = PART_PICKLE[:-12]
        out_basename = os.path.basename(OUTFILESPATH)

        print "Opening {} partial file for restart ... ".format(out_basename)

        # re-initialising .part.pickle collection of cross-correlations
        xc = pscrosscorr.load_pickled_xcorr(PART_PICKLE)

        for key in xc.keys():
            for key2 in xc[key].keys():
                #help(xc[key][key2])
                #print xc[key][key2].endday
                a = 5

        #most recent day last endday of list
        #read in metadata to find latest time slot. Then assign this to FIRSTDAY
        METADATA_PATH = '{}metadata.pickle'.format(OUTFILESPATH.\
                  replace(os.path.basename(OUTFILESPATH), ""))

        metadata = pscrosscorr.load_pickled_xcorr(METADATA_PATH)
        #print "metadata: ", metadata[-5:]
        #re-assign FIRSTDAY variable to where the data was cut off
    
del f
#create list of pickle files to process FTAN for
if not res or res == "0":
    pickle_files = [f for f in pickle_list if f[-1] != '~']
else:
    pickle_files = [pickle_list[int(i)-1] for i in res.split()]

#usersuffix = raw_input("\nEnter suffix to append: [none]\n").strip()
usersuffix = ""

# processing each set of cross-correlations
for pickle_file in pickle_files:
    print "\nOpening pickle file ... " #+ os.path.basename(pickle_file)
    file_opent0 = dt.datetime.now()
    xc = pscrosscorr.load_pickled_xcorr(pickle_file)
    delta = (dt.datetime.now() - file_opent0).total_seconds()
    print "\nThe file took {:.1f} seconds to open.".format(delta)

    # copying the suffix of cross-correlations file
    # (everything between 'xcorr_' and the extension)
    suffix = os.path.splitext(os.path.basename(pickle_file))[0].replace('xcorr_', '')
    if usersuffix:
        suffix = '_'.join([suffix, usersuffix])

    # Performing the two-step FTAN, exporting the figures to a
    # pdf file (one page per cross-correlation) and the clean
    # dispersion curves to a binary file using module pickle.
    #
    # The file are saved in dir *FTAN_DIR* (defined in configuration file) as:
    # <prefix>_<suffix>.pdf and <prefix>_<suffix>.pickle
Beispiel #5
0
    raise Exception("You must choose one a number betwen {} and {}"\
    .format(1, len(pickle_list)))
    
else:
    PICKLE_PATH = pickle_list[int(res)-1]
    OUTFILESPATH = PICKLE_PATH[:-7]
    out_basename = os.path.basename(OUTFILESPATH)        
    OUTPATH = os.path.dirname(OUTFILESPATH)    
    OUT_SNR = os.path.join(OUTPATH, 'SNR_PLOTS')


    print "\nOpening {} file to process ... ".format(OUT_SNR)


    # re-initialising .part.pickle collection of cross-correlations
    xc = pscrosscorr.load_pickled_xcorr(PICKLE_PATH)
                            
    # optimizing time-scale: max time = max distance / vmin (vmin = 2.5 km/s)
    maxdist = max([xc[s1][s2].dist() for s1, s2 in xc.pairs()])
    maxt = min(CROSSCORR_TMAX, maxdist / 2.5)
    
    #plot distance plot of cross-correlations
    #xc.plot(plot_type='distance', xlim=(-maxt, maxt), 
    #outfile="/home/boland/Desktop/something1342.png", showplot=False)
    
    #plot individual cross-correlations
    #xc.plot(plot_type='classic', xlim=(-maxt, maxt), 
    #        outfile="/home/boland/Desktop/something1342.png", showplot=False)
            
        
    #xc.plot_SNR(plot_type='all', outfile=OUT_SNR, 
del f
#create list of pickle files to process FTAN for
if not res or res == "0":
    pickle_files = [f for f in pickle_list if f[-1] != '~']
else:
    pickle_files = [pickle_list[int(i)-1] for i in res.split()]

#usersuffix = raw_input("\nEnter suffix to append: [none]\n").strip()
usersuffix = ""

# processing each set of cross-correlations
for pickle_file in pickle_files:
    print "\nOpening pickle file ... " #+ os.path.basename(pickle_file)
    file_opent0 = dt.datetime.now()
    global xc
    xc = pscrosscorr.load_pickled_xcorr(pickle_file)
    delta = (dt.datetime.now() - file_opent0).total_seconds()
    print "\nThe file took {:.1f} seconds to open.".format(delta)
    del delta
    # copying the suffix of cross-correlations file
    # (everything between 'xcorr_' and the extension)
    suffix = os.path.splitext(os.path.basename(pickle_file))[0].replace('xcorr_', '')
    if usersuffix:
        suffix = '_'.join([suffix, usersuffix])

    # Performing the two-step FTAN, exporting the figures to a
    # pdf file (one page per cross-correlation) and the clean
    # dispersion curves to a binary file using module pickle.
    #
    # The file are saved in dir *FTAN_DIR* (defined in configuration file) as:
    # <prefix>_<suffix>.pdf and <prefix>_<suffix>.pickle