コード例 #1
0
    def detect(self, stream, threshold, threshold_type, trig_int, plot=False,
               plotdir=None, daylong=False, parallel_process=True,
               xcorr_func=None, concurrency=None, cores=None,
               ignore_length=False, ignore_bad_data=False, group_size=None,
               overlap="calculate", full_peaks=False, save_progress=False,
               process_cores=None, **kwargs):
        """
        Detect using a Tribe of templates within a continuous stream.

        :type stream: `obspy.core.stream.Stream`
        :param stream: Continuous data to detect within using the Template.
        :type threshold: float
        :param threshold:
            Threshold level, if using `threshold_type='MAD'` then this will be
            the multiple of the median absolute deviation.
        :type threshold_type: str
        :param threshold_type:
            The type of threshold to be used, can be MAD, absolute or
            av_chan_corr.  See Note on thresholding below.
        :type trig_int: float
        :param trig_int:
            Minimum gap between detections from one template in seconds.
            If multiple detections occur within trig_int of one-another, the
            one with the highest cross-correlation sum will be selected.
        :type plot: bool
        :param plot: Turn plotting on or off.
        :type plotdir: str
        :param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type daylong: bool
        :param daylong:
            Set to True to use the
            :func:`eqcorrscan.utils.pre_processing.dayproc` routine, which
            preforms additional checks and is more efficient for day-long data
            over other methods.
        :type parallel_process: bool
        :param parallel_process:
        :type xcorr_func: str or callable
        :param xcorr_func:
            A str of a registered xcorr function or a callable for implementing
            a custom xcorr function. For more information see:
            :func:`eqcorrscan.utils.correlate.register_array_xcorr`
        :type concurrency: str
        :param concurrency:
            The type of concurrency to apply to the xcorr function. Options are
            'multithread', 'multiprocess', 'concurrent'. For more details see
            :func:`eqcorrscan.utils.correlate.get_stream_xcorr`
        :type cores: int
        :param cores: Number of workers for procesisng and detection.
        :type ignore_length: bool
        :param ignore_length:
            If using daylong=True, then dayproc will try check that the data
            are there for at least 80% of the day, if you don't want this check
            (which will raise an error if too much data are missing) then set
            ignore_length=True.  This is not recommended!
        :type ignore_bad_data: bool
        :param ignore_bad_data:
            If False (default), errors will be raised if data are excessively
            gappy or are mostly zeros. If True then no error will be raised,
            but an empty trace will be returned (and not used in detection).
        :type group_size: int
        :param group_size:
            Maximum number of templates to run at once, use to reduce memory
            consumption, if unset will use all templates.
        :type overlap: float
        :param overlap:
            Either None, "calculate" or a float of number of seconds to
            overlap detection streams by.  This is to counter the effects of
            the delay-and-stack in calculating cross-correlation sums. Setting
            overlap = "calculate" will work out the appropriate overlap based
            on the maximum lags within templates.
        :type full_peaks: bool
        :param full_peaks: See `eqcorrscan.utils.findpeak.find_peaks2_short`
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting party at every data step or not.
            Useful for long-running processes.
        :type process_cores: int
        :param process_cores:
            Number of processes to use for pre-processing (if different to
            `cores`).

        :return:
            :class:`eqcorrscan.core.match_filter.Party` of Families of
            detections.

        .. Note::
            When using the "fftw" correlation backend the length of the fft
            can be set. See :mod:`eqcorrscan.utils.correlate` for more info.

        .. Note::
            `stream` must not be pre-processed. If your data contain gaps
            you should *NOT* fill those gaps before using this method.
            The pre-process functions (called within) will fill the gaps
            internally prior to processing, process the data, then re-fill
            the gaps with zeros to ensure correlations are not incorrectly
            calculated within gaps. If your data have gaps you should pass a
            merged stream without the `fill_value` argument
            (e.g.: `stream = stream.merge()`).

        .. note::
            **Data overlap:**

            Internally this routine shifts and trims the data according to the
            offsets in the template (e.g. if trace 2 starts 2 seconds after
            trace 1 in the template then the continuous data will be shifted
            by 2 seconds to align peak correlations prior to summing).
            Because of this, detections at the start and end of continuous
            data streams **may be missed**.  The maximum time-period that
            might be missing detections is the maximum offset in the template.

            To work around this, if you are conducting matched-filter
            detections through long-duration continuous data, we suggest
            using some overlap (a few seconds, on the order of the maximum
            offset in the templates) in the continuous data.  You will then
            need to post-process the detections (which should be done anyway
            to remove duplicates).  See below note for how `overlap` argument
            affects data internally if `stream` is longer than the processing
            length.

        .. Note::
            If `stream` is longer than processing length, this routine will
            ensure that data overlap between loops, which will lead to no
            missed detections at data start-stop points (see above note).
            This will result in end-time not being strictly
            honoured, so detections may occur after the end-time set.  This is
            because data must be run in the correct process-length.

        .. note::
            **Thresholding:**

            **MAD** threshold is calculated as the:

            .. math::

                threshold {\\times} (median(abs(cccsum)))

            where :math:`cccsum` is the cross-correlation sum for a given
            template.

            **absolute** threshold is a true absolute threshold based on the
            cccsum value.

            **av_chan_corr** is based on the mean values of single-channel
            cross-correlations assuming all data are present as required for
            the template, e.g:

            .. math::

                av\_chan\_corr\_thresh=threshold \\times (cccsum /
                len(template))

            where :math:`template` is a single template from the input and the
            length is the number of channels within this template.
        """
        party = Party()
        template_groups = group_templates(self.templates)
        # now we can compute the detections for each group
        for group in template_groups:
            group_party = _group_detect(
                templates=group, stream=stream.copy(), threshold=threshold,
                threshold_type=threshold_type, trig_int=trig_int,
                plot=plot, group_size=group_size, pre_processed=False,
                daylong=daylong, parallel_process=parallel_process,
                xcorr_func=xcorr_func, concurrency=concurrency, cores=cores,
                ignore_length=ignore_length, overlap=overlap, plotdir=plotdir,
                full_peaks=full_peaks, process_cores=process_cores,
                ignore_bad_data=ignore_bad_data, arg_check=False, **kwargs)
            party += group_party
            if save_progress:
                party.write("eqcorrscan_temporary_party")
        if len(party) > 0:
            for family in party:
                if family is not None:
                    family.detections = family._uniq().detections
        return party
コード例 #2
0
    def relative_magnitudes(self,
                            stream,
                            pre_processed,
                            process_cores=1,
                            ignore_bad_data=False,
                            parallel=False,
                            min_cc=0.4,
                            **kwargs):
        """
        Compute relative magnitudes for the detections.

        Works in place on events in the Family

        :type stream: obspy.core.stream.Stream
        :param stream:
            All the data needed to cut from - can be a gappy Stream.
        :type pre_processed: bool
        :param pre_processed:
            Whether the stream has been pre-processed or not to match the
            templates. See note below.
        :param parallel: Turn parallel processing on or off.
        :type process_cores: int
        :param process_cores:
            Number of processes to use for pre-processing (if different to
            `cores`).
        :type ignore_bad_data: bool
        :param ignore_bad_data:
            If False (default), errors will be raised if data are excessively
            gappy or are mostly zeros. If True then no error will be raised,
            but an empty trace will be returned (and not used in detection).
        :type min_cc: float
        :param min_cc: Minimum correlation for magnitude to be computed.
        :param kwargs:
            Keyword arguments passed to `utils.mag_calc.relative_mags`

        .. Note::
            Note on pre-processing: You can provide a pre-processed stream,
            which may be beneficial for detections over large time periods
            (the stream can have gaps, which reduces memory usage).  However,
            in this case the processing steps are not checked, so you must
            ensure that the template in the Family has the same sampling
            rate and filtering as the stream.
            If pre-processing has not be done then the data will be processed
            according to the parameters in the template.
        """
        template_groups = group_templates(
            [_f.template for _f in self.families])
        for template_group in template_groups:
            family = [
                _f for _f in self.families if _f.template == template_group[0]
            ][0]
            processed_stream = family._process_streams(
                stream=stream,
                pre_processed=pre_processed,
                process_cores=process_cores,
                parallel=parallel,
                ignore_bad_data=ignore_bad_data)
            for template in template_group:
                family = [
                    _f for _f in self.families if _f.template == template
                ][0]
                family.relative_magnitudes(stream=processed_stream,
                                           pre_processed=True,
                                           min_cc=min_cc,
                                           parallel=parallel,
                                           process_cores=process_cores,
                                           ignore_bad_data=ignore_bad_data,
                                           **kwargs)
        return self.get_catalog()
コード例 #3
0
ファイル: party.py プロジェクト: zjzzqs/EQcorrscan
    def lag_calc(self,
                 stream,
                 pre_processed,
                 shift_len=0.2,
                 min_cc=0.4,
                 min_cc_from_mean_cc_factor=None,
                 horizontal_chans=['E', 'N', '1', '2'],
                 vertical_chans=['Z'],
                 cores=1,
                 interpolate=False,
                 plot=False,
                 plotdir=None,
                 parallel=True,
                 process_cores=None,
                 ignore_length=False,
                 ignore_bad_data=False,
                 export_cc=False,
                 cc_dir=None,
                 **kwargs):
        """
        Compute picks based on cross-correlation alignment.

        Works in-place on events in Party.

        :type stream: obspy.core.stream.Stream
        :param stream:
            All the data needed to cut from - can be a gappy Stream.
        :type pre_processed: bool
        :param pre_processed:
            Whether the stream has been pre-processed or not to match the
            templates. See note below.
        :type shift_len: float
        :param shift_len:
            Shift length allowed for the pick in seconds, will be plus/minus
            this amount - default=0.2
        :type min_cc: float
        :param min_cc:
            Minimum cross-correlation value to be considered a pick,
            default=0.4.
        :type min_cc_from_mean_cc_factor: float
        :param min_cc_from_mean_cc_factor:
            If set to a value other than None, then the minimum cross-
            correlation value for a trace is set individually for each
            detection based on:
            min(detect_val / n_chans * min_cc_from_mean_cc_factor, min_cc).
        :type horizontal_chans: list
        :param horizontal_chans:
            List of channel endings for horizontal-channels, on which S-picks
            will be made.
        :type vertical_chans: list
        :param vertical_chans:
            List of channel endings for vertical-channels, on which P-picks
            will be made.
        :type cores: int
        :param cores:
            Number of cores to use in parallel processing, defaults to one.
        :type interpolate: bool
        :param interpolate:
            Interpolate the correlation function to achieve sub-sample
            precision.
        :type plot: bool
        :param plot:
            To generate a plot for every detection or not, defaults to False
        :type plotdir: str
        :param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type export_cc: bool
        :param export_cc:
            To generate a binary file in NumPy for every detection or not,
            defaults to False
        :type cc_dir: str
        :param cc_dir:
            Path to saving folder, NumPy files will be output here.
        :type parallel: bool
        :param parallel: Turn parallel processing on or off.
        :type process_cores: int
        :param process_cores:
            Number of processes to use for pre-processing (if different to
            `cores`).
        :type ignore_length: bool
        :param ignore_length:
            If using daylong=True, then dayproc will try check that the data
            are there for at least 80% of the day, if you don't want this check
            (which will raise an error if too much data are missing) then set
            ignore_length=True.  This is not recommended!
        :type ignore_bad_data: bool
        :param ignore_bad_data:
            If False (default), errors will be raised if data are excessively
            gappy or are mostly zeros. If True then no error will be raised,
            but an empty trace will be returned (and not used in detection).

        :returns:
            Catalog of events with picks.  No origin information is included.
            These events can then be written out via
            :func:`obspy.core.event.Catalog.write`, or to Nordic Sfiles using
            :func:`eqcorrscan.utils.sfile_util.eventtosfile` and located
            externally.
        :rtype: obspy.core.event.Catalog

        .. Note::
            Note on pre-processing: You can provide a pre-processed stream,
            which may be beneficial for detections over large time periods
            (the stream can have gaps, which reduces memory usage).  However,
            in this case the processing steps are not checked, so you must
            ensure that all the template in the Party have the same sampling
            rate and filtering as the stream.
            If pre-processing has not be done then the data will be processed
            according to the parameters in the templates, in this case
            templates will be grouped by processing parameters and run with
            similarly processed data.  In this case, all templates do not have
            to have the same processing parameters.

        .. Note::
            Picks are corrected for the template pre-pick time.
        """
        process_cores = process_cores or cores
        template_groups = group_templates([
            _f.template for _f in self.families if len(_f) > 0
        ])  # Fix for #341
        catalog = Catalog()
        for template_group in template_groups:
            family = [
                _f for _f in self.families if _f.template == template_group[0]
            ][0]
            group_seed_ids = {
                tr.id
                for template in template_group for tr in template.st
            }
            template_stream = Stream()
            for seed_id in group_seed_ids:
                net, sta, loc, chan = seed_id.split('.')
                template_stream += stream.select(network=net,
                                                 station=sta,
                                                 location=loc,
                                                 channel=chan)
            # Process once and only once for each group.
            processed_stream = family._process_streams(
                stream=template_stream,
                pre_processed=pre_processed,
                process_cores=process_cores,
                parallel=parallel,
                ignore_bad_data=ignore_bad_data,
                ignore_length=ignore_length,
                select_used_chans=False)
            for template in template_group:
                family = [
                    _f for _f in self.families if _f.template == template
                ][0]
                catalog += family.lag_calc(
                    stream=processed_stream,
                    pre_processed=True,
                    shift_len=shift_len,
                    min_cc=min_cc,
                    min_cc_from_mean_cc_factor=min_cc_from_mean_cc_factor,
                    horizontal_chans=horizontal_chans,
                    vertical_chans=vertical_chans,
                    cores=cores,
                    interpolate=interpolate,
                    plot=plot,
                    plotdir=plotdir,
                    export_cc=export_cc,
                    cc_dir=cc_dir,
                    parallel=parallel,
                    process_cores=process_cores,
                    ignore_bad_data=ignore_bad_data,
                    ignore_length=ignore_length,
                    **kwargs)
        return catalog
コード例 #4
0
    def lag_calc(self,
                 stream,
                 pre_processed,
                 shift_len=0.2,
                 min_cc=0.4,
                 horizontal_chans=['E', 'N', '1', '2'],
                 vertical_chans=['Z'],
                 cores=1,
                 interpolate=False,
                 plot=False,
                 plotdir=None,
                 parallel=True,
                 process_cores=None,
                 ignore_bad_data=False,
                 relative_magnitudes=False,
                 **kwargs):
        """
        Compute picks based on cross-correlation alignment.

        Works in-place on events in Party.

        :type stream: obspy.core.stream.Stream
        :param stream:
            All the data needed to cut from - can be a gappy Stream.
        :type pre_processed: bool
        :param pre_processed:
            Whether the stream has been pre-processed or not to match the
            templates. See note below.
        :type shift_len: float
        :param shift_len:
            Shift length allowed for the pick in seconds, will be plus/minus
            this amount - default=0.2
        :type min_cc: float
        :param min_cc:
            Minimum cross-correlation value to be considered a pick,
            default=0.4.
        :type horizontal_chans: list
        :param horizontal_chans:
            List of channel endings for horizontal-channels, on which S-picks
            will be made.
        :type vertical_chans: list
        :param vertical_chans:
            List of channel endings for vertical-channels, on which P-picks
            will be made.
        :type cores: int
        :param cores:
            Number of cores to use in parallel processing, defaults to one.
        :type interpolate: bool
        :param interpolate:
            Interpolate the correlation function to achieve sub-sample
            precision.
        :type plot: bool
        :param plot:
            To generate a plot for every detection or not, defaults to False
        :type plotdir: str
    	:param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type parallel: bool
        :param parallel: Turn parallel processing on or off.
        :type process_cores: int
        :param process_cores:
            Number of processes to use for pre-processing (if different to
            `cores`).
        :type relative_magnitudes: bool
        :param relative_magnitudes:
            Whether to calculate relative magnitudes or not. See
            :func:`eqcorrscan.utils.mag_calc.relative_magnitude` for more
            information. Keyword arguments `noise_window`, `signal_window` and
            `min_snr` can be passed as additional keyword arguments to pass
            through to `eqcorrscan.utils.mag_calc.relative_magnitude`.
        :type ignore_bad_data: bool
        :param ignore_bad_data:
            If False (default), errors will be raised if data are excessively
            gappy or are mostly zeros. If True then no error will be raised,
            but an empty trace will be returned (and not used in detection).

        :returns:
            Catalog of events with picks.  No origin information is included.
            These events can then be written out via
            :func:`obspy.core.event.Catalog.write`, or to Nordic Sfiles using
            :func:`eqcorrscan.utils.sfile_util.eventtosfile` and located
            externally.
        :rtype: obspy.core.event.Catalog

        .. Note::
            Note on pre-processing: You can provide a pre-processed stream,
            which may be beneficial for detections over large time periods
            (the stream can have gaps, which reduces memory usage).  However,
            in this case the processing steps are not checked, so you must
            ensure that all the template in the Party have the same sampling
            rate and filtering as the stream.
            If pre-processing has not be done then the data will be processed
            according to the parameters in the templates, in this case
            templates will be grouped by processing parameters and run with
            similarly processed data.  In this case, all templates do not have
            to have the same processing parameters.

        .. Note::
            Picks are corrected for the template pre-pick time.
        """
        process_cores = process_cores or cores
        template_groups = group_templates(
            [_f.template for _f in self.families])
        catalog = Catalog()
        for template_group in template_groups:
            family = [
                _f for _f in self.families if _f.template == template_group[0]
            ][0]
            group_seed_ids = {
                tr.id
                for template in template_group for tr in template.st
            }
            template_stream = Stream()
            for seed_id in group_seed_ids:
                net, sta, loc, chan = seed_id.split('.')
                template_stream += stream.select(network=net,
                                                 station=sta,
                                                 location=loc,
                                                 channel=chan)
            processed_stream = family._process_streams(
                stream=template_stream,
                pre_processed=pre_processed,
                process_cores=process_cores,
                parallel=parallel,
                ignore_bad_data=ignore_bad_data,
                select_used_chans=False)
            for template in template_group:
                family = [
                    _f for _f in self.families if _f.template == template
                ][0]
                catalog += family.lag_calc(
                    stream=processed_stream,
                    pre_processed=True,
                    shift_len=shift_len,
                    min_cc=min_cc,
                    horizontal_chans=horizontal_chans,
                    vertical_chans=vertical_chans,
                    cores=cores,
                    interpolate=interpolate,
                    plot=plot,
                    plotdir=plotdir,
                    parallel=parallel,
                    process_cores=process_cores,
                    ignore_bad_data=ignore_bad_data,
                    relative_magnitudes=relative_magnitudes,
                    **kwargs)
        return catalog