예제 #1
0
                location.append(chan1.location_code)
                nsta += 1
    if oput_CSV:
        noise_module.make_stationlist_CSV(inv, direc)

##################################
########DOWNLOAD SECTION##########
##################################

#--------MPI---------
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()

if rank == 0:
    all_chunck = noise_module.get_event_list(start_date[0], end_date[0],
                                             inc_hours)
    if all_chunck < 1:
        raise ValueError('Abort! no data chunck between %s and %s' %
                         (start_date[0], end_date[0]))
    splits = len(all_chunck) - 1
else:
    splits, all_chunck = [None for _ in range(2)]

# broadcast the variables
splits = comm.bcast(splits, root=0)
all_chunck = comm.bcast(all_chunck, root=0)
extra = splits % size

#--------MPI: loop through each time chunck--------
for ick in range(rank, splits + size - extra, size):
    if ick < splits:
예제 #2
0
#if auto_corr and method=='coherence':
#    raise ValueError('Please set method to decon: coherence cannot be applied when auto_corr is wanted!')

#---------MPI-----------
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
#-----------------------

#-------form a station pair to loop through-------
if rank == 0:
    if not os.path.isdir(CCFDIR):
        os.mkdir(CCFDIR)

    sfiles = sorted(glob.glob(os.path.join(FFTDIR, '*.h5')))
    day = noise_module.get_event_list(start_date, end_date, inc_days)
    splits = len(day)

    if not sfiles:
        raise IOError('Abort! No FFT data in %s' % FFTDIR)
else:
    splits, sfiles, day = [None for _ in range(3)]

#------split the common variables------
splits = comm.bcast(splits, root=0)
day = comm.bcast(day, root=0)
sfiles = comm.bcast(sfiles, root=0)
extra = splits % size

for ii in range(rank, splits + size - extra, size):
예제 #3
0
NSV = 2

if not allstation:
    h5files = [
        '/Users/chengxin/Documents/Harvard/Kanto_basin/Mesonet_BW/STACK1/E.ABHM/E.ABHM_E.OHSM.h5'
    ]
    nsta = len(h5files)
else:
    h5files = sta
    nsta = len(sta)

for ista in range(nsta):
    h5file = h5files[ista]

    #--------assume continous recordings for each stacked segments---------
    tlist = noise_module.get_event_list(start_date, end_date, stack_days)
    tags_allstack = ['Allstacked']
    for ii in range(len(tlist) - 1):
        tags_allstack.append('F' + tlist[ii].replace('_', '') + 'T' +
                             tlist[ii + 1].replace('_', ''))
    nstacks = len(tags_allstack)

    #-------open ASDF file to read data-----------
    with pyasdf.ASDFDataSet(h5file, mode='r') as ds:
        slist = ds.auxiliary_data.list()

        #------loop through the reference waveforms------
        if slist[0] == 'Allstacked':

            #------useful parameters from ASDF file------
            rlist = ds.auxiliary_data[slist[0]].list()