예제 #1
0
def volume_plot(stationpath, database, limits):
    """
    Function to read in station information from a file and earthquake info
    from sfiles.

    :type stationpath: str
    :type databse: str
    """
    from eqcorrscan.utils import Sfile_util
    import glob

    sfiles = glob.glob(database + "/*/*/*")
    eqlocs = []
    for sfile in sfiles:
        try:
            eqlocs += [
                (
                    Sfile_util.readheader(sfile).latitude,
                    Sfile_util.readheader(sfile).longitude,
                    Sfile_util.readheader(sfile).depth,
                )
            ]
        except:
            continue
    stalocs = []
    f = open(stationpath, "r")
    for line in f:
        stalocs += [(float(line.split(",")[1]), float(line.split(",")[0]), float(line.split(",")[4]) / 1000)]
    f.close()
    from utils import EQcorrscan_plotting

    EQcorrscan_plotting.threeD_seismplot(stalocs, eqlocs, limits)
    return
예제 #2
0
def volume_plot(stationpath, database, limits):
    """
    Function to read in station information from a file and earthquake info
    from sfiles.

    :type stationpath: str
    :type databse: str
    """
    from eqcorrscan.utils import Sfile_util
    import glob
    sfiles = glob.glob(database + '/*/*/*')
    eqlocs = []
    for sfile in sfiles:
        try:
            eqlocs+=[(Sfile_util.readheader(sfile).latitude,\
                    Sfile_util.readheader(sfile).longitude,\
                    Sfile_util.readheader(sfile).depth)]
        except:
            continue
    stalocs = []
    f = open(stationpath, 'r')
    for line in f:
        stalocs+=[(float(line.split(',')[1]),\
                float(line.split(',')[0]), float(line.split(',')[4])/1000)]
    f.close()
    from utils import EQcorrscan_plotting
    EQcorrscan_plotting.threeD_seismplot(stalocs, eqlocs, limits)
    return
예제 #3
0
    template = template_dummy
    for tr in template:
        for i in xrange(len(stations)):
            if tr.stats.station == stations[i]:
                if not "alllags" in locals():
                    alllags = [lags[i]]
                else:
                    # print stations[i]
                    alllags = np.concatenate((alllags, [lags[i]]), axis=0)
                    # print np.shape(alllags)
    lags = alllags
    print "Lags is shaped: " + str(np.shape(lags))
    print "I have " + str(len(template)) + " channels of data"
    # Indexing will be an issue, currently don't check that stations match between data and lags
    possible_locations = moveout_check(
        template,
        nodes,
        lags,
        defaults.threshold,
        defaults.threshtype,
        tempdef.lowcut,
        tempdef.highcut,
        tempdef.filter_order,
    )
    from utils import EQcorrscan_plotting as plotting

    if not len(possible_locations) == 0:
        plotting.threeD_gridplot(possible_locations)
    else:
        raise ValueError("No possible location found")
예제 #4
0
                                for tr in stream)
            stream=Stream(stream)
            print stream
    if not Prep:
        #stream_copy=stream.copy() # Keep the stream safe
        print "Running the detection routine"
        # Check that the data are okay
        detect_templates, detect_nodes=bright_lights.brightness(stations, \
                        nodes, lags, stream,
                        brightdef.threshold, brightdef.thresh_type,\
                        brightdef.coherance, instance, matchdef, templatedef)
        del detect_templates#, stream # Delete templates from memory to conserve RAM!
        #stream=stream_copy
        nodesout+=detect_nodes
        if Split:
            plotting.threeD_gridplot(nodesout, save=brightdef.plotsave,\
                                 savefile='Detected_nodes_'+str(instance)+'.png')
        else:
            plotting.threeD_gridplot(nodesout, save=brightdef.plotsave,\
                                 savefile='Detected_nodes.png')

    else:
        for tr in stream:
            print "Writing data as: test_data/"+tr.stats.station+'-'+tr.stats.channel+\
                    '-'+str(tr.stats.starttime.year)+\
                    '-'+str(tr.stats.starttime.month).zfill(2)+\
                    '-'+str(tr.stats.starttime.day).zfill(2)+\
                    '-processed.ms'
            tr.write('test_data/'+tr.stats.station+'-'+tr.stats.channel+\
                    '-'+str(tr.stats.starttime.year)+\
                    '-'+str(tr.stats.starttime.month).zfill(2)+\
                    '-'+str(tr.stats.starttime.day).zfill(2)+\
예제 #5
0
                                for tr in stream)
            stream = Stream(stream)
            print stream
    if not Prep:
        #stream_copy=stream.copy() # Keep the stream safe
        print "Running the detection routine"
        # Check that the data are okay
        detect_templates, detect_nodes=bright_lights.brightness(stations, \
                        nodes, lags, stream,
                        brightdef.threshold, brightdef.thresh_type,\
                        brightdef.coherance, instance, matchdef, templatedef)
        del detect_templates  #, stream # Delete templates from memory to conserve RAM!
        #stream=stream_copy
        nodesout += detect_nodes
        if Split:
            plotting.threeD_gridplot(nodesout, save=brightdef.plotsave,\
                                 savefile='Detected_nodes_'+str(instance)+'.png')
        else:
            plotting.threeD_gridplot(nodesout, save=brightdef.plotsave,\
                                 savefile='Detected_nodes.png')

    else:
        for tr in stream:
            print "Writing data as: test_data/"+tr.stats.station+'-'+tr.stats.channel+\
                    '-'+str(tr.stats.starttime.year)+\
                    '-'+str(tr.stats.starttime.month).zfill(2)+\
                    '-'+str(tr.stats.starttime.day).zfill(2)+\
                    '-processed.ms'
            tr.write('test_data/'+tr.stats.station+'-'+tr.stats.channel+\
                    '-'+str(tr.stats.starttime.year)+\
                    '-'+str(tr.stats.starttime.month).zfill(2)+\
                    '-'+str(tr.stats.starttime.day).zfill(2)+\
예제 #6
0
                sys.path.insert(0,path[0:len(path)-5])
                from par import template_gen_par as templatedef
                from par import bright_lights_par as brightdef
                from core import bright_lights
                from utils import EQcorrscan_plotting as plotting
                # Use the brightness function to search for possible templates
                # First read in the travel times
                print 'Reading in the original grids'
                stations, allnodes, alllags = \
                        bright_lights._read_tt(brightdef.nllpath,brightdef.stations,\
                                    brightdef.phase)
                # Resample the grid to allow us to run it quickly!
                print 'Cutting the grid'
                stations, nodes, lags = bright_lights._resample_grid(stations, allnodes,
                                                     alllags,
                                                     brightdef.volume,
                                                     brightdef.resolution)
                # Remove lags that have a similar network moveout, e.g. the sum of the
                # differences in moveouts is small.
                print "Removing simlar lags"
                stations, nodes, lags = bright_lights._rm_similarlags(stations, nodes, lags,
                                                      brightdef.nodesimthresh)
                print "Plotting new grid"
                plotting.threeD_gridplot(nodes)
                dailycoherence = coherence_test(stream, stations, nodes, lags, \
                                templatedef.length)
            else:
                raise IOError("No traces read in for this day, are they processed?")
        else:
            raise IOError("I only know --coherence at the moment")
예제 #7
0
def find_peaks2(arr,thresh, trig_int, debug=0, maxwidth=10,\
                starttime=UTCDateTime('1970-01-01'), samp_rate=1.0):
    """
    Function to determine peaks in an array of data using scipy find_peaks_cwt,
    works fast in certain cases, but for match_filter cccsum peak finding,
    find_peaks2_short works better.  Test it out and see which works best for
    your application.

    :type arr: ndarray
    :param arr: 1-D numpy array is required
    :type thresh: float
    :param thresh: The threshold below which will be considered noise and peaks\
    will not be found in.
    :type trig_int: int
    :param trig_int: The minimum difference in samples between triggers,\
    if multiple peaks within this window this code will find the highest.
    :type debug: int
    :param debug: Optional, debug level 0-5
    :type maxwidth: int
    :param maxwidth: Maximum peak width to look for in samples

    :return: peaks, locs: Lists of peak values and locations.

    """
    from scipy.signal import find_peaks_cwt
    # Set everything below the threshold to zero
    image=np.copy(arr)
    image=np.abs(image)
    image[image<thresh]=thresh
    # We need to check if the number of samples in the image is prime, if it
    # is this method will be really slow, so we add a pad to the end to make
    # it not of prime length!
    if is_prime(len(image)):
        image=np.append(image, 0.0)
        print 'Input array has a prime number of samples, appending a zero'
        print len(image)
    if len(image[image>thresh])==0:
        print 'No values over threshold found'
        return []
    if debug > 0:
        print 'Found '+str(len(image[image>thresh]))+' samples above the threshold'
    initial_peaks=[]
    peaks=[]
    # Find the peaks
    print 'Finding peaks'
    peakinds = find_peaks_cwt(image, np.arange(1,maxwidth))
    initial_peaks=[(image[peakind], peakind) for peakind in peakinds]
    # Sort initial peaks according to amplitude
    print 'sorting peaks'
    peaks_sort=sorted(initial_peaks, key=lambda amplitude:amplitude[0],\
                      reverse=True)
    # Debugging
    if debug>=4:
        for peak in initial_peaks:
            print peak
    if initial_peaks:
        peaks.append(peaks_sort[0]) # Definitely take the biggest peak
        if debug > 3:
            print 'Added the biggest peak of '+str(peaks[0][0])+' at sample '+\
                    str(peaks[0][1])
        if len(initial_peaks) > 1:
            if debug>3:
                print 'Multiple peaks found, checking them now to see if they overlap'
            for next_peak in peaks_sort:#i in xrange(1,len(peaks_sort)): # Loop through the amplitude sorted peaks
                # if the next highest amplitude peak is within trig_int of any
                # peak already in peaks then we don't want it, else, add it
                #next_peak=peaks_sort[i]
                if debug>3:
                    print next_peak
                for peak in peaks:
                    add=False # Use add as a switch for whether or not to append
                    # next peak to peaks, if once gone through all the peaks
                    # it is True, then we will add it, otherwise we won't!
                    if abs(next_peak[1]-peak[1]) < trig_int:
                        if debug>3:
                            print 'Difference in time is '+str(next_peak[1]-peak[1])
                            print 'Which is less than '+str(trig_int)
                        add=False
                        # Need to exit the loop here if false
                        break
                    else:
                        add=True
                if add:
                    if debug>3:
                        print 'Adding peak of '+str(next_peak[0])+' at sample '+\
                                str(next_peak[1])
                    peaks.append(next_peak)
                elif debug >3:
                    print 'I did not add peak of '+str(next_peak[0])+\
                            ' at sample '+str(next_peak[1])

        if debug >= 3:
            from utils import EQcorrscan_plotting
            EQcorrscan_plotting.peaks_plot(image, starttime, samp_rate, True, peaks,
                                            'debug_output/peaks_'+\
                                              str(starttime.year)+'-'+\
                                              str(starttime.month)+'-'+\
                                              str(starttime.day)+'.pdf')
        peaks=sorted(peaks, key=lambda time:time[1], reverse=False)
        return peaks
    else:
        print 'No peaks for you!'
        return peaks
예제 #8
0
def find_peaks_dep(arr, thresh, trig_int, debug=0,\
               starttime=UTCDateTime('1970-01-01'), samp_rate=1.0):
    """
    Function to determine peaks in an array of data above a certain threshold.

    Depreciated peak-finding routine, very slow, but accurate.  If all else fails
    this one should work.

    :type arr: ndarray
    :param arr: 1-D numpy array is required
    :type thresh: float
    :param thresh: The threshold below which will be considered noise and peaks\
    will not be found in.
    :type trig_int: int
    :param trig_int: The minimum difference in samples between triggers,\
    if multiple peaks within this window this code will find the highest.

    :return: peaks, locs: Lists of peak values and locations.
    """

    # Perform some checks
    if trig_int < 3:
        import sys
        print 'Trigger interval must be greater than two samples to find maxima'
        sys.exit()
    #from joblib import Parallel, delayed
    # Will find peaks in the absolute then transfer these to the true values
    sig=np.abs(arr)-thresh
    true_peaks=[]
    for i in xrange(int(trig_int),int(len(sig)-trig_int), int(trig_int)):
        window=sig[i-trig_int:i+trig_int] # Define a moving window containing
                                          # data from +/- the trigger iterval
        peaks=[]
        locs=[]
        for j in xrange(1,len(window)-1):
            # Find all turning points within the window
            if window[j] > 0.0 and window[j] > window[j+1] and window[j] > window[j-1]:
                peaks.append(window[j])
                locs.append(i-trig_int+j)
        # Find maximum peak in window
        if peaks:
            true_peaks.append((np.max(np.array(peaks)),\
                               locs[np.argmax(np.array(peaks))]))
    # Get unique values
    peaks=sorted(list(set(true_peaks)), key=lambda loc: loc[1])
    # Find highest peak in peaks within trig_int of each other
    for i in xrange(1,len(peaks)-1):
        if peaks[i+1][1]-peaks[i][1] < trig_int:
            if peaks[i][0] < peaks[i+1][0]:
                peaks[i]=peaks[i+1]
            else:
                peaks[i+1]=peaks[i]
        elif peaks[i][1]-peaks[i-1][1] < trig_int:
            if peaks[i][0] < peaks[i-1][0]:
                peaks[i]=peaks[i-1]
            else:
                peaks[i-1]=peaks[i]
    peaks=sorted(list(set(peaks)), key=lambda loc: loc[1])
    if debug >= 3:
        from utils import EQcorrscan_plotting
        EQcorrscan_plotting.peaks_plot(arr, starttime, samp_rate, True, peaks,
                                        'debug_output/peaks_'+\
                                        str(starttime.year)+'-'+\
                                        str(starttime.month)+'-'+\
                                        str(starttime.day)+'.pdf')
    return peaks
예제 #9
0
def find_peaks2_short(arr,thresh, trig_int, debug=0, \
                starttime=UTCDateTime('1970-01-01'), samp_rate=1.0):
    """
    Function to determine peaks in an array of data above a certain threshold.
    Uses a mask to remove data below threshold and finds peaks in what is left.

    :type arr: ndarray
    :param arr: 1-D numpy array is required
    :type thresh: float
    :param thresh: The threshold below which will be considered noise and peaks\
    will not be found in.
    :type trig_int: int
    :param trig_int: The minimum difference in samples between triggers,\
    if multiple peaks within this window this code will find the highest.
    :type debug: int
    :param debug: Optional, debug level 0-5

    :return: peaks, locs: Lists of peak values and locations.

    """
    from scipy import ndimage
    # Set everything below the threshold to zero
    image=np.copy(arr)
    image=np.abs(image)
    image[image<thresh]=0
    if len(image[image>thresh])==0:
        print 'No values over threshold found'
        return []
    if debug > 0:
        print 'Found '+str(len(image[image>thresh]))+' samples above the threshold'
    initial_peaks=[]
    peaks=[]
    # Find the peaks
    labeled_image, number_of_objects= ndimage.label(image)
    peak_slices = ndimage.find_objects(labeled_image)
    for peak_slice in peak_slices:
        #print 'Width of peak='+str(peak_slice[0].stop-peak_slice[0].start)
        window=arr[peak_slice[0].start:peak_slice[0].stop]
        initial_peaks.append((max(window),peak_slice[0].start+np.argmax(window)))
    # Sort initial peaks according to amplitude
    peaks_sort=sorted(initial_peaks, key=lambda amplitude:amplitude[0],\
                      reverse=True)
    # Debugging
    if debug>=4:
        for peak in initial_peaks:
            print peak
    if initial_peaks:
        peaks.append(peaks_sort[0]) # Definitely take the biggest peak
        if debug > 3:
            print 'Added the biggest peak of '+str(peaks[0][0])+' at sample '+\
                    str(peaks[0][1])
        if len(initial_peaks) > 1:
            if debug>3:
                print 'Multiple peaks found, checking them now to see if they overlap'
            for next_peak in peaks_sort:#i in xrange(1,len(peaks_sort)): # Loop through the amplitude sorted peaks
                # if the next highest amplitude peak is within trig_int of any
                # peak already in peaks then we don't want it, else, add it
                #next_peak=peaks_sort[i]
                if debug>3:
                    print next_peak
                for peak in peaks:
                    add=False # Use add as a switch for whether or not to append
                    # next peak to peaks, if once gone through all the peaks
                    # it is True, then we will add it, otherwise we won't!
                    if abs(next_peak[1]-peak[1]) < trig_int:
                        if debug>3:
                            print 'Difference in time is '+str(next_peak[1]-peak[1])
                            print 'Which is less than '+str(trig_int)
                        add=False
                        # Need to exit the loop here if false
                        break
                    else:
                        add=True
                if add:
                    if debug>3:
                        print 'Adding peak of '+str(next_peak[0])+' at sample '+\
                                str(next_peak[1])
                    peaks.append(next_peak)
                elif debug >3:
                    print 'I did not add peak of '+str(next_peak[0])+\
                            ' at sample '+str(next_peak[1])

        if debug >= 3:
            from utils import EQcorrscan_plotting
            EQcorrscan_plotting.peaks_plot(image, starttime, samp_rate, True, peaks,
                                            'debug_output/peaks_'+\
                                              str(starttime.year)+'-'+\
                                              str(starttime.month)+'-'+\
                                              str(starttime.day)+'.pdf')
        peaks=sorted(peaks, key=lambda time:time[1], reverse=False)
        return peaks
    else:
        print 'No peaks for you!'
        return peaks
예제 #10
0
    stations, nodes, lags = _resample_grid(stations, nodes, lags, brightdef.volume,\
                                           brightdef.resolution)
    # print np.shape(lags)
    for station in stations:
        if not 'template_dummy' in locals():
            template_dummy = template.select(station=station)
        else:
            template_dummy += template.select(station=station)
    template = template_dummy
    for tr in template:
        for i in xrange(len(stations)):
            if tr.stats.station == stations[i]:
                if not 'alllags' in locals():
                    alllags = [lags[i]]
                else:
                    # print stations[i]
                    alllags = np.concatenate((alllags, [lags[i]]), axis=0)
                    # print np.shape(alllags)
    lags = alllags
    print 'Lags is shaped: ' + str(np.shape(lags))
    print 'I have ' + str(len(template)) + ' channels of data'
    # Indexing will be an issue, currently don't check that stations match between data and lags
    possible_locations = moveout_check(template, nodes, lags, defaults.threshold,\
                                       defaults.threshtype, tempdef.lowcut,\
                                       tempdef.highcut, tempdef.filter_order)
    from utils import EQcorrscan_plotting as plotting
    if not len(possible_locations) == 0:
        plotting.threeD_gridplot(possible_locations)
    else:
        raise ValueError("No possible location found")