Exemplo n.º 1
0
def _day_loop(detection_streams, template, min_cc, interpolate=False,
              cores=False, debug=0):
    """
    Function to loop through multiple detections for one template.

    Designed to run for the same day of data for I/O simplicity, but as you
    are passing stream objects it could run for all the detections ever, as
    long as you have the RAM!

    :type detection_streams: list
    :param detection_streams: List of all the detections for this template that
        you want to compute the optimum pick for. Individual things in list
        should be of obspy.core.stream.Stream type.
    :type template: obspy.core.stream.Stream
    :param template: The original template used to detect the detections passed
    :type min_cc: float
    :param min_cc: Minimum cross-correlation value to be allowed for a pick.
    :type interpolate: bool
    :param interpolate: Interpolate the correlation function to achieve \
        sub-sample precision.

    :returns: Catalog object containing Event objects for each detection
              created by this template.
    :rtype: obspy.core.event.Catalog
    """
    from multiprocessing import Pool, cpu_count
    # Used to run detections in parallel
    from obspy.core.event import Catalog
    if not cores:
        num_cores = cpu_count()
    else:
        num_cores = cores
    if num_cores > len(detection_streams):
        num_cores = len(detection_streams)
    pool = Pool(processes=num_cores)
    # Parallelize generation of events for each detection:
    # results is a list of (i, event class)
    results = [pool.apply_async(_channel_loop, args=(detection_streams[i],
                                                     template, min_cc,
                                                     interpolate, i, debug))
               for i in range(len(detection_streams))]
    pool.close()
    events_list = [p.get() for p in results]
    pool.join()
    events_list.sort(key=lambda tup: tup[0])  # Sort based on i.
    temp_catalog = Catalog()
    temp_catalog.events = [event_tup[1] for event_tup in events_list]
    return temp_catalog
Exemplo n.º 2
0
def _day_loop(detection_streams,
              template,
              min_cc,
              detections,
              horizontal_chans,
              vertical_chans,
              interpolate,
              cores,
              parallel,
              debug=0):
    """
    Function to loop through multiple detections for one template.

    Designed to run for the same day of data for I/O simplicity, but as you
    are passing stream objects it could run for all the detections ever, as
    long as you have the RAM!

    :type detection_streams: list
    :param detection_streams:
        List of all the detections for this template that you want to compute
        the optimum pick for. Individual things in list should be of
        :class:`obspy.core.stream.Stream` type.
    :type template: obspy.core.stream.Stream
    :param template: The original template used to detect the detections passed
    :type min_cc: float
    :param min_cc: Minimum cross-correlation value to be allowed for a pick.
    :type detections: list
    :param detections:
        List of detections to associate events with an input detection.
    :type horizontal_chans: list
    :param horizontal_chans:
        List of channel endings for horizontal-channels, on which S-picks will
        be made.
    :type vertical_chans: list
    :param vertical_chans:
        List of channel endings for vertical-channels, on which P-picks will
        be made.
    :type interpolate: bool
    :param interpolate:
        Interpolate the correlation function to achieve sub-sample precision.
    :type debug: int
    :param debug: debug output level 0-5.

    :returns:
        Catalog object containing Event objects for each detection created by
        this template.
    :rtype: :class:`obspy.core.event.Catalog`
    """
    if len(detection_streams) == 0:
        return Catalog()
    if not cores:
        num_cores = cpu_count()
    else:
        num_cores = cores
    if num_cores > len(detection_streams):
        num_cores = len(detection_streams)
    if parallel:
        pool = Pool(processes=num_cores)
        debug_print('Made pool of %i workers' % num_cores, 4, debug)
        # Parallel generation of events for each detection:
        # results will be a list of (i, event class)
        results = [
            pool.apply_async(
                _channel_loop, (detection_streams[i], ), {
                    'template': template,
                    'min_cc': min_cc,
                    'detection_id': detections[i].id,
                    'interpolate': interpolate,
                    'i': i,
                    'pre_lag_ccsum': detections[i].detect_val,
                    'detect_chans': detections[i].no_chans,
                    'horizontal_chans': horizontal_chans,
                    'vertical_chans': vertical_chans
                }) for i in range(len(detection_streams))
        ]
        pool.close()
        events_list = [p.get() for p in results]
        pool.join()
        events_list.sort(key=lambda tup: tup[0])  # Sort based on index.
    else:
        events_list = []
        for i in range(len(detection_streams)):
            events_list.append(
                _channel_loop(detection=detection_streams[i],
                              template=template,
                              min_cc=min_cc,
                              detection_id=detections[i].id,
                              interpolate=interpolate,
                              i=i,
                              pre_lag_ccsum=detections[i].detect_val,
                              detect_chans=detections[i].no_chans,
                              horizontal_chans=horizontal_chans,
                              vertical_chans=vertical_chans,
                              debug=debug))
    temp_catalog = Catalog()
    temp_catalog.events = [event_tup[1] for event_tup in events_list]
    return temp_catalog