예제 #1
0
 def test_str_accepted(self):
     """ ensure a str of the xcorr function can be passed as well """
     old_default = corr.get_array_xcorr()
     old_default_stream = corr.get_stream_xcorr()
     with corr.set_xcorr('numpy'):
         func = corr.get_array_xcorr()
         assert func is corr.numpy_normxcorr
     assert corr.get_array_xcorr() == old_default
     assert corr.get_stream_xcorr() == old_default_stream
예제 #2
0
    def test_using_custom_function_doesnt_change_default(self):
        """ ensure a custom function will not change the default """
        def func(templates, streams, pads):
            pass

        default = corr.get_array_xcorr(None)

        corr.get_array_xcorr(func)

        assert corr.get_array_xcorr(None) is default
예제 #3
0
def array_ccs_low_amp(array_template, array_stream, pads):
    """ Use each function stored in the normxcorr cache to correlate the
     templates and arrays, return a dict with keys as func names and values
     as the cc calculated by said function.
     This specifically tests low amplitude streams as raised in issue #181."""
    out = {}
    arr_stream = array_stream * 10e-8
    for name in list(corr.XCORR_FUNCS_ORIGINAL.keys()):
        func = corr.get_array_xcorr(name)
        print("Running {0} with low-variance".format(name))
        _log_handler.reset()
        cc, _ = time_func(func, name, array_template, arr_stream, pads)
        out[name] = (cc, copy.deepcopy(log_messages['warning']))
        if "fftw" in name:
            print("Running fixed len fft")
            _log_handler.reset()
            fft_len = next_fast_len(
                max(len(array_stream) // 4, len(array_template)))
            cc, _ = time_func(func,
                              name,
                              array_template,
                              array_stream,
                              pads,
                              fft_len=fft_len)
            out[name + "_fixed_len"] = (cc,
                                        copy.deepcopy(log_messages['warning']))
    return out
예제 #4
0
def cross_chan_coherence(st1,
                         st2,
                         allow_shift=False,
                         shift_len=0.2,
                         i=0,
                         xcorr_func='time_domain'):
    """
    Calculate cross-channel coherency.

    Determine the cross-channel coherency between two streams of multichannel
    seismic data.

    :type st1: obspy.core.stream.Stream
    :param st1: Stream one
    :type st2: obspy.core.stream.Stream
    :param st2: Stream two
    :type allow_shift: bool
    :param allow_shift:
        Whether to allow the optimum alignment to be found for coherence,
        defaults to `False` for strict coherence
    :type shift_len: int
    :param shift_len: Samples to shift, only used if `allow_shift=True`
    :type i: int
    :param i: index used for parallel async processing, returned unaltered
    :type xcorr_func: str, callable
    :param xcorr_func:
        The method for performing correlations. Accepts either a string or
         callabe. See :func:`eqcorrscan.utils.correlate.register_array_xcorr`
         for more details

    :returns:
        cross channel coherence, float - normalized by number of channels,
        and i, where i is int, as input.
    :rtype: tuple
    """
    cccoh = 0.0
    kchan = 0
    array_xcorr = get_array_xcorr(xcorr_func)
    for tr in st1:
        tr2 = st2.select(station=tr.stats.station, channel=tr.stats.channel)
        if len(tr2) > 0 and tr.stats.sampling_rate != \
                tr2[0].stats.sampling_rate:
            warnings.warn('Sampling rates do not match, not using: %s.%s' %
                          (tr.stats.station, tr.stats.channel))
        if len(tr2) > 0 and allow_shift:
            index, corval = xcorr(tr, tr2[0],
                                  int(shift_len * tr.stats.sampling_rate))
            cccoh += corval
            kchan += 1
        elif len(tr2) > 0:
            min_len = min(len(tr.data), len(tr2[0].data))
            cccoh += array_xcorr(np.array([tr.data[0:min_len]]),
                                 tr2[0].data[0:min_len], [0])[0][0][0]
            kchan += 1
    if kchan:
        cccoh /= kchan
        return np.round(cccoh, 6), i
    else:
        warnings.warn('No matching channels')
        return 0, i
예제 #5
0
def array_ccs(array_template, array_stream, pads):
    """ Use each function stored in the normxcorr cache to correlate the
     templates and arrays, return a dict with keys as func names and values
     as the cc calculated by said function"""
    out = {}

    for name in list(corr.XCORR_FUNCS_ORIGINAL.keys()):
        func = corr.get_array_xcorr(name)
        print("Running %s" % name)
        cc, _ = time_func(func, name, array_template, array_stream, pads)
        out[name] = cc
    return out
예제 #6
0
def array_ccs_low_amp(array_template, array_stream, pads):
    """ Use each function stored in the normxcorr cache to correlate the
     templates and arrays, return a dict with keys as func names and values
     as the cc calculated by said function.
     This specifically tests low amplitude streams as raised in issue #181."""
    out = {}
    for name in list(corr.XCORR_FUNCS_ORIGINAL.keys()):
        func = corr.get_array_xcorr(name)
        print("Running %s" % name)
        cc, _ = time_func(func, name, array_template, array_stream * 10e-8,
                          pads)
        out[name] = cc
    return out
예제 #7
0
def corr_cluster(trace_list, thresh=0.9):
    """
    Group traces based on correlations above threshold with the stack.

    Will run twice, once with 80% of threshold threshold to remove large
    outliers that would negatively affect the stack, then again with your
    threshold.

    :type trace_list: list
    :param trace_list:
        List of :class:`obspy.core.stream.Trace` to compute similarity between
    :type thresh: float
    :param thresh: Correlation threshold between -1-1

    :returns:
        :class:`numpy.ndarray` of bool of whether that trace correlates well
        enough (above your given threshold) with the stack.

    .. note::
        We recommend that you align the data before computing the clustering,
        e.g., the P-arrival on all templates for the same channel should
        appear at the same time in the trace.  See the
        :func:`eqcorrscan.utils.stacking.align_traces` function for a way to do
        this.
    """
    init_thresh = thresh * .8
    stack = stacking.linstack([Stream(tr) for tr in trace_list])[0]
    output = np.array([False] * len(trace_list))
    group1 = []
    array_xcorr = get_array_xcorr()
    for i, tr in enumerate(trace_list):
        cc = array_xcorr(np.array([tr.data]), stack.data, [0])[0][0][0]
        if cc > init_thresh:
            output[i] = True
            group1.append(tr)
    if len(group1) == 0:
        Logger.warning('Nothing made it past the first 80% threshold')
        return output
    stack = stacking.linstack([Stream(tr) for tr in group1])[0]
    group2 = []
    for i, tr in enumerate(trace_list):
        if array_xcorr(
                np.array([tr.data]), stack.data, [0])[0][0][0] > thresh:
            group2.append(tr)
            output[i] = True
        else:
            output[i] = False
    return output
예제 #8
0
def normxcorr2(template, image):
    """
    Thin wrapper to eqcorrscan.utils.correlate functions.

    :type template: numpy.ndarray
    :param template: Template array
    :type image: numpy.ndarray
    :param image:
        Image to scan the template through.  The order of these
        matters, if you put the template after the image you will get a
        reversed correlation matrix

    :return:
        New :class:`numpy.ndarray` of the correlation values for the
        correlation of the image with the template.
    :rtype: numpy.ndarray

    .. note::
        If your data contain gaps these must be padded with zeros before
        using this function. The `eqcorrscan.utils.pre_processing` functions
        will provide gap-filled data in the appropriate format.  Note that if
        you pad your data with zeros before filtering or resampling the gaps
        will not be all zeros after filtering. This will result in the
        calculation of spurious correlations in the gaps.
    """
    array_xcorr = get_array_xcorr()
    # Check that we have been passed numpy arrays
    if type(template) != np.ndarray or type(image) != np.ndarray:
        Logger.error(
            'You have not provided numpy arrays, I will not convert them')
        return 'NaN'
    if len(template) > len(image):
        ccc = array_xcorr(
            templates=np.array([image]).astype(np.float32),
            stream=template.astype(np.float32), pads=[0],
            threaded=False)[0][0]
    else:
        ccc = array_xcorr(
            templates=np.array([template]).astype(np.float32),
            stream=image.astype(np.float32), pads=[0], threaded=False)[0][0]
    ccc = ccc.reshape((1, len(ccc)))
    return ccc
예제 #9
0
def template_remove(tr, template, cc_thresh, windowlength, interp_len):
    """
    Looks for instances of template in the trace and removes the matches.

    :type tr: obspy.core.trace.Trace
    :param tr: Trace to remove spikes from.
    :type template: osbpy.core.trace.Trace
    :param template: Spike template to look for in data.
    :type cc_thresh: float
    :param cc_thresh: Cross-correlation threshold (-1 - 1).
    :type windowlength: float
    :param windowlength: Length of window to look for spikes in in seconds.
    :type interp_len: float
    :param interp_len: Window length to remove and fill in seconds.

    :returns: tr, works in place.
    :rtype: :class:`obspy.core.trace.Trace`
    """
    _interp_len = int(tr.stats.sampling_rate * interp_len)
    if _interp_len < len(template.data):
        Logger.warning('Interp_len is less than the length of the template, '
                       'will used the length of the template!')
        _interp_len = len(template.data)
    if isinstance(template, Trace):
        template = np.array([template.data])
    with Timer() as t:
        normxcorr = get_array_xcorr("fftw")
        cc, _ = normxcorr(stream=tr.data.astype(np.float32),
                          templates=template.astype(np.float32),
                          pads=[0])
        peaks = find_peaks2_short(arr=cc.flatten(),
                                  thresh=cc_thresh,
                                  trig_int=windowlength *
                                  tr.stats.sampling_rate)
        for peak in peaks:
            tr.data = _interp_gap(data=tr.data,
                                  peak_loc=peak[1] + int(0.5 * _interp_len),
                                  interp_len=_interp_len)
    Logger.info("Despiking took: {0:.4f} s".format(t.secs))
    return tr
예제 #10
0
def array_ccs(array_template, array_stream, pads):
    """ Use each function stored in the normxcorr cache to correlate the
     templates and arrays, return a dict with keys as func names and values
     as the cc calculated by said function"""
    out = {}

    for name in list(corr.XCORR_FUNCS_ORIGINAL.keys()):
        func = corr.get_array_xcorr(name)
        print("Running %s" % name)
        cc, _ = time_func(func, name, array_template, array_stream, pads)
        out[name] = cc
        if "fftw" in name:
            print("Running fixed len fft")
            fft_len = next_fast_len(
                max(len(array_stream) // 4, len(array_template)))
            cc, _ = time_func(func,
                              name,
                              array_template,
                              array_stream,
                              pads,
                              fft_len=fft_len)
            out[name + "_fixed_len"] = cc
    return out
예제 #11
0
 def test_str_accepted(self):
     """ ensure a str of the xcorr function can be passed as well """
     with corr.set_xcorr('numpy'):
         func = corr.get_array_xcorr()
         assert func is corr.numpy_normxcorr