예제 #1
0
def do_clustering_setup_and_run(opdict):

  base_path=opdict['base_path']
  verbose=opdict['verbose']

  # stations
  stations_filename=os.path.join(base_path,'lib',opdict['stations'])

  # output directory
  output_dir=os.path.join(base_path,'out',opdict['outdir'])

  # data
  data_dir=os.path.join(base_path,'data',opdict['datadir'])
  data_glob=opdict['dataglob']
  data_files=glob.glob(os.path.join(data_dir,data_glob))
  data_files.sort()

  # location file
  locdir=os.path.join(base_path,'out',opdict['outdir'],'loc')
  loc_filename=os.path.join(locdir,'locations.dat')

  # file containing correlation values
  coeff_file=os.path.join(locdir,opdict['xcorr_corr'])
  # Read correlation values
  b=BinaryFile(coeff_file)
  coeff=b.read_binary_file()

  # file containing time delays
  delay_file=os.path.join(locdir,opdict['xcorr_delay'])

  # INPUT PARAMETERS
  nbmin=int(opdict['nbsta'])
  if nbmin > len(coeff.keys()):
    raise Error('the minimum number of stations cannot be > to the number of stations !!')
  event=len(coeff.values()[0])
  tplot=float(opdict['clus']) # threshold for which we save and plot 
  cluster_file="%s/cluster-%s-%s"%(locdir,str(tplot),str(nbmin))

  corr=[opdict['clus']]
  #corr=np.arange(0,1.1,0.1)
  for threshold in corr:
    threshold=float(threshold)
    nbsta=compute_nbsta(event,coeff,threshold)

    CLUSTER = do_clustering(event,nbsta,nbmin)

    if threshold == tplot:

      print "----------------------------------------------"
      print "THRESHOLD : ",threshold," # STATIONS : ",nbmin
      print "# CLUSTERS : ",len(CLUSTER)
      print CLUSTER

      c=BinaryFile(cluster_file)
      c.write_binary_file(CLUSTER)
      print "Written in %s"%cluster_file

      if verbose: # PLOT
        # Read location file
        locs=read_locs_from_file(loc_filename)
        # Read station file 
        stations=read_stations_file(stations_filename)

        # Look at the waveforms 
        plot_traces(CLUSTER, delay_file, coeff, locs, stations, data_dir, data_files, threshold)
예제 #2
0
def do_clustering_setup_and_run(opdict):

    base_path = opdict['base_path']
    verbose = opdict['verbose']

    # stations
    stations_filename = os.path.join(base_path, 'lib', opdict['stations'])

    # output directory
    output_dir = os.path.join(base_path, 'out', opdict['outdir'])

    # data
    data_dir = os.path.join(base_path, 'data', opdict['datadir'])
    data_glob = opdict['dataglob']
    data_files = glob.glob(os.path.join(data_dir, data_glob))
    data_files.sort()

    # location file
    locdir = os.path.join(base_path, 'out', opdict['outdir'], 'loc')
    loc_filename = os.path.join(locdir, 'locations.dat')

    # file containing correlation values
    coeff_file = os.path.join(locdir, opdict['xcorr_corr'])
    # Read correlation values
    b = BinaryFile(coeff_file)
    coeff = b.read_binary_file()

    # file containing time delays
    delay_file = os.path.join(locdir, opdict['xcorr_delay'])

    # INPUT PARAMETERS
    nbmin = int(opdict['nbsta'])
    if nbmin > len(coeff.keys()):
        raise Error(
            'the minimum number of stations cannot be > to the number of stations !!'
        )
    event = len(coeff.values()[0])
    tplot = float(opdict['clus'])  # threshold for which we save and plot
    cluster_file = "%s/cluster-%s-%s" % (locdir, str(tplot), str(nbmin))

    corr = [opdict['clus']]
    #corr=np.arange(0,1.1,0.1)
    for threshold in corr:
        threshold = float(threshold)
        nbsta = compute_nbsta(event, coeff, threshold)

        CLUSTER = do_clustering(event, nbsta, nbmin)

        if threshold == tplot:

            print "----------------------------------------------"
            print "THRESHOLD : ", threshold, " # STATIONS : ", nbmin
            print "# CLUSTERS : ", len(CLUSTER)
            print CLUSTER

            c = BinaryFile(cluster_file)
            c.write_binary_file(CLUSTER)
            print "Written in %s" % cluster_file

            if verbose:  # PLOT
                # Read location file
                locs = read_locs_from_file(loc_filename)
                # Read station file
                stations = read_stations_file(stations_filename)

                # Look at the waveforms
                plot_traces(CLUSTER, delay_file, coeff, locs, stations,
                            data_dir, data_files, threshold)
예제 #3
0
def do_clustering_setup_and_run(opdict):
    """
    Does clustering by applying the depth first search algorithm and saves the result 
    (= a dictionary containing the event indexes forming each cluster) in a binary file.
    Needs to define the correlation value threshold and the minimum number of stations 
    where this threshold should be reached to form a cluster (should be done in the options
    dictionary)

    :param opdict: Dictionary of waveloc options

    """

    base_path = opdict['base_path']
    verbose = opdict['verbose']

    # stations
    stations_filename = os.path.join(base_path, 'lib', opdict['stations'])

    # data
    data_dir = os.path.join(base_path, 'data', opdict['datadir'])
    data_glob = opdict['dataglob']
    data_files = glob.glob(os.path.join(data_dir, data_glob))
    data_files.sort()

    # location file
    locdir = os.path.join(base_path, 'out', opdict['outdir'], 'loc')
    loc_filename = os.path.join(locdir, 'locations.dat')

    # file containing correlation values
    coeff_file = os.path.join(locdir, opdict['xcorr_corr'])
    # Read correlation values
    b = BinaryFile(coeff_file)
    coeff = b.read_binary_file()

    # INPUT PARAMETERS
    nbmin = int(opdict['nbsta'])
    if nbmin > len(coeff.keys()):
        raise Exception('the minimum number of stations cannot be > to the\
                         number of stations !!')
    event = len(coeff.values()[0])
    tplot = float(opdict['clus'])  # threshold for which we save and plot
    cluster_file = "%s/cluster-%s-%s" % (locdir, str(tplot), str(nbmin))

    corr = [opdict['clus']]
    #corr = np.arange(0, 1.1, 0.1)
    for threshold in corr:
        threshold = float(threshold)
        nbsta = compute_nbsta(event, coeff, threshold)

        CLUSTER = do_clustering(event, nbsta, nbmin)

        if threshold == tplot:

            print "----------------------------------------------"
            print "THRESHOLD : ", threshold, " # STATIONS : ", nbmin
            print "# CLUSTERS : ", len(CLUSTER)
            print CLUSTER

            c = BinaryFile(cluster_file)
            c.write_binary_file(CLUSTER)
            print "Written in %s" % cluster_file

            if verbose:  # PLOT
                # Read location file
                locs = read_locs_from_file(loc_filename)
                # Read station file
                stations = read_stations_file(stations_filename)

                # Look at the waveforms
                #plot_traces(CLUSTER, delay_file, coeff, locs,
                #            data_dir, data_files, threshold)

                # Plot graphs
                plot_graphs(locs, stations, nbsta, CLUSTER, nbmin, threshold)
예제 #4
0
파일: kurtogram.py 프로젝트: amaggi/waveloc
def do_kurtogram_setup_and_run(opdict):
    """
    Run the kurtogram analysis using the parameters contained in the
    WavelocOptions.opdict.

    :param opdict: Dictionary containing the waveloc parameters and options.
    """

    base_path = opdict['base_path']

    # data
    data_dir = os.path.join(base_path, 'data', opdict['datadir'])
    data_glob = opdict['dataglob']
    data_files = glob.glob(os.path.join(data_dir, data_glob))
    data_files.sort()

    kurt_glob = opdict['kurtglob']
    kurt_files = glob.glob(os.path.join(data_dir, kurt_glob))
    kurt_files.sort()

    # output directory
    out_dir = os.path.join(base_path, 'out', opdict['outdir'])

    # location file
    locdir = os.path.join(out_dir, 'loc')
    locfile = os.path.join(locdir, 'locations.dat')
    # Read locations
    locs = read_locs_from_file(locfile)

    # create a file containing the best filtering parameters for each event and
    # each station
    kurto_file = os.path.join(out_dir, 'kurto')

    tdeb = utcdatetime.UTCDateTime(opdict['starttime'])
    tfin = utcdatetime.UTCDateTime(opdict['endtime'])

    # write filenames in a dictionary
    kurtdata = {}
    for filename in kurt_files:
        try:
            wf = Waveform()
            wf.read_from_file(filename)
            sta = wf.station
            kurtdata[sta] = filename
        except UserWarning:
            logging.info('No data around %s for file %s.' %
                         (tdeb.isoformat(), filename))

    data = {}
    for filename in data_files:
        try:
            wf = Waveform()
            wf.read_from_file(filename)
            sta = wf.station
            data[sta] = filename
        except UserWarning:
            logging.info('No data around %s for file %s.' %
                         (tdeb.isoformat(), filename))

    # ------------------------------------------------------------------------
    # Create an empty dictionnary that will contain the filtering parameters
    param = {}

    for station in sorted(data):

        wf1 = Waveform()
        wf1.read_from_file(data[station], starttime=tdeb, endtime=tfin)

        wf2 = Waveform()
        wf2.read_from_file(kurtdata[station], starttime=tdeb, endtime=tfin)

        info = {}
        info['data_file'] = data[station]
        info['station'] = station
        info['tdeb_data'] = wf1.starttime
        info['tdeb_kurt'] = wf2.starttime
        info['kurt_file'] = kurtdata[station]
        info['data_ini'] = wf1.values
        info['kurt_ini'] = wf2.values
        info['dt'] = wf1.dt
        info['filter'] = []

        logging.info('Processing station %s' % info['station'])

        if opdict['new_kurtfile']:
            new_filename = 'filt_kurtogram'
            new_kurt_filename = \
                os.path.join("%s%s" % (data[station].split(data_glob[1:])[0],
                                       new_filename))
            info['new_kurt_file'] = new_kurt_filename
            trace_kurt_fin = Waveform()
            trace_kurt_fin.read_from_file(new_kurt_filename)
            info['new_kurt'] = trace_kurt_fin.values

        for loc in locs:
            origin_time = loc['o_time']
            if opdict['verbose']:
                print "******************************************************"
                print logging.info(origin_time)

            if origin_time > tdeb and origin_time < tfin:
                info = kurto(origin_time, info, opdict)
            else:
                continue

        info['filter'] = np.matrix(info['filter'])
        sta = info['station']
        param[sta] = info['filter']

        if 'new_kurt_file' in info:
            trace_kurt_fin.values[:] = info['new_kurt']
            trace_kurt_fin.write_to_file_filled(info['new_kurt_file'],
                                                format='MSEED', fill_value=0)

    # Write the dictionnary 'param' in a binary file
    if os.path.isfile(kurto_file):
        ans = raw_input('%s file already exists. Do you really want to replace\
it ? (y or n):\n' % kurto_file)
        if ans != 'y':
            kurto_file = "%s_1" % kurto_file

    a = BinaryFile(kurto_file)
    a.write_binary_file(param)

    # read and plot the file you have just written
    read_kurtogram_frequencies(kurto_file)
예제 #5
0
def do_kurtogram_setup_and_run(opdict):
    """
    Run the kurtogram analysis using the parameters contained in the
    WavelocOptions.opdict.

    :param opdict: Dictionary containing the waveloc parameters and options.
    """

    base_path = opdict['base_path']

    # data
    data_dir = os.path.join(base_path, 'data', opdict['datadir'])
    data_glob = opdict['dataglob']
    data_files = glob.glob(os.path.join(data_dir, data_glob))
    data_files.sort()

    kurt_glob = opdict['kurtglob']
    kurt_files = glob.glob(os.path.join(data_dir, kurt_glob))
    kurt_files.sort()

    # output directory
    out_dir = os.path.join(base_path, 'out', opdict['outdir'])

    # location file
    locdir = os.path.join(out_dir, 'loc')
    locfile = os.path.join(locdir, 'locations.dat')
    # Read locations
    locs = read_locs_from_file(locfile)

    # create a file containing the best filtering parameters for each event and
    # each station
    kurto_file = os.path.join(out_dir, 'kurto')

    tdeb = utcdatetime.UTCDateTime(opdict['starttime'])
    tfin = utcdatetime.UTCDateTime(opdict['endtime'])

    # write filenames in a dictionary
    kurtdata = {}
    for filename in kurt_files:
        try:
            wf = Waveform()
            wf.read_from_file(filename)
            sta = wf.station
            kurtdata[sta] = filename
        except UserWarning:
            logging.info('No data around %s for file %s.' %
                         (tdeb.isoformat(), filename))

    data = {}
    for filename in data_files:
        try:
            wf = Waveform()
            wf.read_from_file(filename)
            sta = wf.station
            data[sta] = filename
        except UserWarning:
            logging.info('No data around %s for file %s.' %
                         (tdeb.isoformat(), filename))

    # ------------------------------------------------------------------------
    # Create an empty dictionnary that will contain the filtering parameters
    param = {}

    for station in sorted(data):

        wf1 = Waveform()
        wf1.read_from_file(data[station], starttime=tdeb, endtime=tfin)

        wf2 = Waveform()
        wf2.read_from_file(kurtdata[station], starttime=tdeb, endtime=tfin)

        info = {}
        info['data_file'] = data[station]
        info['station'] = station
        info['tdeb_data'] = wf1.starttime
        info['tdeb_kurt'] = wf2.starttime
        info['kurt_file'] = kurtdata[station]
        info['data_ini'] = wf1.values
        info['kurt_ini'] = wf2.values
        info['dt'] = wf1.dt
        info['filter'] = []

        logging.info('Processing station %s' % info['station'])

        if opdict['new_kurtfile']:
            new_filename = 'filt_kurtogram'
            new_kurt_filename = \
                os.path.join("%s%s" % (data[station].split(data_glob[1:])[0],
                                       new_filename))
            info['new_kurt_file'] = new_kurt_filename
            trace_kurt_fin = Waveform()
            trace_kurt_fin.read_from_file(new_kurt_filename)
            info['new_kurt'] = trace_kurt_fin.values

        for loc in locs:
            origin_time = loc['o_time']
            if opdict['verbose']:
                print "******************************************************"
                print logging.info(origin_time)

            if origin_time > tdeb and origin_time < tfin:
                info = kurto(origin_time, info, opdict)
            else:
                continue

        info['filter'] = np.matrix(info['filter'])
        sta = info['station']
        param[sta] = info['filter']

        if 'new_kurt_file' in info:
            trace_kurt_fin.values[:] = info['new_kurt']
            trace_kurt_fin.write_to_file_filled(info['new_kurt_file'],
                                                format='MSEED',
                                                fill_value=0)

    # Write the dictionnary 'param' in a binary file
    if os.path.isfile(kurto_file):
        ans = raw_input('%s file already exists. Do you really want to replace\
it ? (y or n):\n' % kurto_file)
        if ans != 'y':
            kurto_file = "%s_1" % kurto_file

    a = BinaryFile(kurto_file)
    a.write_binary_file(param)

    # read and plot the file you have just written
    read_kurtogram_frequencies(kurto_file)