Esempio n. 1
0
 def setUpClass(cls):
     samp_rate = 50
     cls.t_length = .75
     # Make some synthetic templates
     templates, data, seeds = generate_synth_data(nsta=5,
                                                  ntemplates=5,
                                                  nseeds=10,
                                                  samp_rate=samp_rate,
                                                  t_length=cls.t_length,
                                                  max_amp=10,
                                                  max_lag=15,
                                                  phaseout="both",
                                                  jitter=0,
                                                  noise=False,
                                                  same_phase=True)
     # Rename channels
     channel_mapper = {"SYN_Z": "HHZ", "SYN_H": "HHN"}
     for tr in data:
         tr.stats.channel = channel_mapper[tr.stats.channel]
     for template in templates:
         for tr in template:
             tr.stats.channel = channel_mapper[tr.stats.channel]
     cls.party = Party()
     t = 0
     data_start = data[0].stats.starttime
     for template, template_seeds in zip(templates, seeds):
         template_name = "template_{0}".format(t)
         detections = []
         for i, sample in enumerate(template_seeds["time"]):
             det = Detection(template_name=template_name,
                             detect_time=data_start + (sample / samp_rate),
                             detect_val=template_seeds["SNR"][i] /
                             len(data),
                             no_chans=len(data),
                             chans=[(tr.stats.station, tr.stats.channel)
                                    for tr in data],
                             threshold=0.0,
                             threshold_input=0.0,
                             threshold_type="abs",
                             typeofdet="ccc")
             det._calculate_event(template_st=template,
                                  estimate_origin=False)
             detections.append(det)
         # Make a fully formed Template
         _template = Template(name=template_name,
                              st=template,
                              lowcut=2.0,
                              highcut=15.0,
                              samp_rate=samp_rate,
                              filt_order=4,
                              process_length=86400,
                              prepick=10. / samp_rate,
                              event=None)
         family = Family(template=_template, detections=detections)
         cls.party += family
         t += 1
     cls.data = data
Esempio n. 2
0
def mseed_2_Party(wav_dir, cat, temp_cat, lowcut, highcut, filt_order,
                  process_length, prepick):
    """
    Take waveforms and catalog and create a Party object
    :param wav_dir:
    :param cat:
    :return:
    """

    partay = Party()
    # Get templates first
    temp_tup = [(ev, str(ev.resource_id).split('/')[-1].split('_')[0])
                for ev in cat
                if str(ev.resource_id).split('/')[-1].split('_')[-1]=='self']
    temp_evs, temp_ids = zip(*temp_tup)
    temp_evs = list(temp_evs)
    wav_files = ['%s/%s.mseed' % (wav_dir, str(ev.resource_id).split('/')[-1])
                 for ev in temp_evs]
    temp_wavs = [read(wav) for wav in wav_files if os.path.isfile(wav)]
    for temp_wav, temp_ev in zip(temp_wavs, temp_evs):
        #Create a Template object, assign it to Family and then to Party
        tid = str(temp_ev.resource_id).split('/')[-1].split('_')[0]
        if len([ev for ev in temp_cat
                if str(ev.resource_id).split('/')[-1] == tid]) > 0:
            temp_ev = [ev for ev in temp_cat
                           if str(ev.resource_id).split('/')[-1] == tid][0]
        tmp = Template(name=tid, st=temp_wav, lowcut=lowcut, highcut=highcut,
                       samp_rate=temp_wav[0].stats.sampling_rate,
                       filt_order=filt_order, process_length=process_length,
                       prepick=prepick, event=temp_ev)
        fam_det_evs = [ev for ev in cat
                       if str(ev.resource_id).split('/')[-1].split('_')[-1]!='self'
                       and str(ev.resource_id).split('/')[-1].split('_')[0]==tid]
        fam_dets = [Detection(template_name=str(ev.resource_id).split('/')[-1].split('_')[0],
                              detect_time=UTCDateTime([com.text.split('=')[-1]
                                                       for com in ev.comments
                                                       if com.text.split('=')[0]=='det_time'][0]),
                              no_chans=len(ev.picks),
                              chans=[pk.waveform_id.station_code
                                     for pk in ev.picks],
                              detect_val=float([com.text.split('=')[-1]
                                                for com in ev.comments
                                                if com.text.split('=')[0]=='detect_val'][0]),
                              threshold=float([com.text.split('=')[-1]
                                               for com in ev.comments
                                               if com.text.split('=')[0]=='threshold'][0]),
                              typeofdet='corr',
                              threshold_type='MAD',
                              threshold_input=8.0,
                              event=ev, id=str(ev.resource_id).split('/')[-1])
                    for ev in fam_det_evs]
        fam_cat = Catalog(events=[det.event for det in fam_dets])
        fam = Family(template=tmp, detections=fam_dets, catalog=fam_cat)
        partay.families.append(fam)
    return partay
Esempio n. 3
0
def _detect(detector,
            st,
            threshold,
            trig_int,
            moveout=0,
            min_trig=0,
            process=True,
            extract_detections=False,
            cores=1):
    """
    Detect within continuous data using the subspace method.

    Not to be called directly, use the detector.detect method.

    :type detector: eqcorrscan.core.subspace.Detector
    :param detector: Detector to use.
    :type st: obspy.core.stream.Stream
    :param st: Un-processed stream to detect within using the subspace \
        detector
    :type threshold: float
    :param threshold: Threshold value for detections between 0-1
    :type trig_int: float
    :param trig_int: Minimum trigger interval in seconds.
    :type moveout: float
    :param moveout: Maximum allowable moveout window for non-multiplexed,
        network detection.  See note.
    :type min_trig: int
    :param min_trig: Minimum number of stations exceeding threshold for \
        non-multiplexed, network detection. See note.
    :type process: bool
    :param process: Whether or not to process the stream according to the \
        parameters defined by the detector.  Default is to process the \
        data (True).
    :type extract_detections: bool
    :param extract_detections: Whether to extract waveforms for each \
        detection or not, if true will return detections and streams.

    :return: list of detections
    :rtype: list of eqcorrscan.core.match_filter.Detection
    """
    detections = []
    # First process the stream
    if process:
        Logger.info('Processing Stream')
        stream, stachans = _subspace_process(
            streams=[st.copy()],
            lowcut=detector.lowcut,
            highcut=detector.highcut,
            filt_order=detector.filt_order,
            sampling_rate=detector.sampling_rate,
            multiplex=detector.multiplex,
            stachans=detector.stachans,
            parallel=True,
            align=False,
            shift_len=None,
            reject=False,
            cores=cores)
    else:
        # Check the sampling rate at the very least
        for tr in st:
            if not tr.stats.sampling_rate == detector.sampling_rate:
                raise ValueError('Sampling rates do not match.')
        stream = [st]
        stachans = detector.stachans
    outtic = time.clock()
    # If multiplexed, how many samples do we increment by?
    if detector.multiplex:
        Nc = len(detector.stachans)
    else:
        Nc = 1
    # Here do all ffts
    fft_vars = _do_ffts(detector, stream, Nc)
    Logger.info('Computing detection statistics')
    Logger.info('Preallocating stats matrix')
    stats = np.zeros(
        (len(stream[0]), (len(stream[0][0]) // Nc) - (fft_vars[4] // Nc) + 1))
    for det_freq, data_freq_sq, data_freq, i in zip(fft_vars[0], fft_vars[1],
                                                    fft_vars[2],
                                                    np.arange(len(stream[0]))):
        # Calculate det_statistic in frequency domain
        stats[i] = _det_stat_freq(det_freq, data_freq_sq, data_freq,
                                  fft_vars[3], Nc, fft_vars[4], fft_vars[5])
        Logger.info('Stats matrix is shape %s' % str(stats[i].shape))
    trig_int_samples = detector.sampling_rate * trig_int
    Logger.info('Finding peaks')
    peaks = []
    for i in range(len(stream[0])):
        peaks.append(
            findpeaks.find_peaks2_short(arr=stats[i],
                                        thresh=threshold,
                                        trig_int=trig_int_samples))
    if not detector.multiplex:
        # Conduct network coincidence triggering
        peaks = findpeaks.coin_trig(peaks=peaks,
                                    samp_rate=detector.sampling_rate,
                                    moveout=moveout,
                                    min_trig=min_trig,
                                    stachans=stachans,
                                    trig_int=trig_int)
    else:
        peaks = peaks[0]
    if len(peaks) > 0:
        for peak in peaks:
            detecttime = st[0].stats.starttime + \
                (peak[1] / detector.sampling_rate)
            rid = ResourceIdentifier(id=detector.name + '_' + str(detecttime),
                                     prefix='smi:local')
            ev = Event(resource_id=rid)
            cr_i = CreationInfo(author='EQcorrscan',
                                creation_time=UTCDateTime())
            ev.creation_info = cr_i
            # All detection info in Comments for lack of a better idea
            thresh_str = 'threshold=' + str(threshold)
            ccc_str = 'detect_val=' + str(peak[0])
            used_chans = 'channels used: ' +\
                ' '.join([str(pair) for pair in detector.stachans])
            ev.comments.append(Comment(text=thresh_str))
            ev.comments.append(Comment(text=ccc_str))
            ev.comments.append(Comment(text=used_chans))
            for stachan in detector.stachans:
                tr = st.select(station=stachan[0], channel=stachan[1])
                if tr:
                    net_code = tr[0].stats.network
                else:
                    net_code = ''
                pick_tm = detecttime
                wv_id = WaveformStreamID(network_code=net_code,
                                         station_code=stachan[0],
                                         channel_code=stachan[1])
                ev.picks.append(Pick(time=pick_tm, waveform_id=wv_id))
            detections.append(
                Detection(template_name=detector.name,
                          detect_time=detecttime,
                          no_chans=len(detector.stachans),
                          detect_val=peak[0],
                          threshold=threshold,
                          typeofdet='subspace',
                          threshold_type='abs',
                          threshold_input=threshold,
                          chans=detector.stachans,
                          event=ev))
    outtoc = time.clock()
    Logger.info('Detection took %s seconds' % str(outtoc - outtic))
    if extract_detections:
        detection_streams = extract_from_stream(st, detections)
        return detections, detection_streams
    return detections
Esempio n. 4
0
def _find_detections(cum_net_resp, nodes, threshold, thresh_type, samp_rate,
                     realstations, length):
    """
    Find detections within the cumulative network response.

    :type cum_net_resp: numpy.ndarray
    :param cum_net_resp: Array of cumulative network response for nodes
    :type nodes: list
    :param nodes: Nodes associated with the source of energy in the \
        cum_net_resp
    :type threshold: float
    :param threshold: Threshold value
    :type thresh_type: str
    :param thresh_type: Either MAD (Median Absolute Deviation) or abs \
        (absolute) or RMS (Root Mean Squared)
    :type samp_rate: float
    :param samp_rate: Sampling rate in Hz
    :type realstations: list
    :param realstations:
        List of stations used to make the cumulative network response, will be
        reported in the :class:`eqcorrscan.core.match_filter.Detection`
    :type length: float
    :param length: Maximum length of peak to look for in seconds

    :returns:
        Detections as :class:`eqcorrscan.core.match_filter.Detection` objects.
    :rtype: list
    """
    cum_net_resp = np.nan_to_num(cum_net_resp)  # Force no NaNs
    if np.isnan(cum_net_resp).any():
        raise ValueError("Nans present")
    print('Mean of data is: ' + str(np.median(cum_net_resp)))
    print('RMS of data is: ' + str(np.sqrt(np.mean(np.square(cum_net_resp)))))
    print('MAD of data is: ' + str(np.median(np.abs(cum_net_resp))))
    if thresh_type == 'MAD':
        thresh = (np.median(np.abs(cum_net_resp)) * threshold)
    elif thresh_type == 'abs':
        thresh = threshold
    elif thresh_type == 'RMS':
        thresh = _rms(cum_net_resp) * threshold
    print('Threshold is set to: ' + str(thresh))
    print('Max of data is: ' + str(max(cum_net_resp)))
    peaks = findpeaks.find_peaks2_short(cum_net_resp,
                                        thresh,
                                        length * samp_rate,
                                        debug=0)
    detections = []
    if peaks:
        for peak in peaks:
            node = nodes[peak[1]]
            detections.append(
                Detection(template_name=str(node[0]) + '_' + str(node[1]) +
                          '_' + str(node[2]),
                          detect_time=peak[1] / samp_rate,
                          no_chans=len(realstations),
                          detect_val=peak[0],
                          threshold=thresh,
                          typeofdet='brightness',
                          chans=realstations,
                          id=str(node[0]) + '_' + str(node[1]) + '_' +
                          str(node[2]) + str(peak[1] / samp_rate),
                          threshold_type=thresh_type,
                          threshold_input=threshold))
    else:
        detections = []
    print('I have found ' + str(len(peaks)) + ' possible detections')
    return detections
Esempio n. 5
0
 def setUpClass(cls):
     cls.testing_path = os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'test_data', 'REA',
         'TEST_')
     cls.template = from_sfile(sfile=os.path.join(cls.testing_path,
                                                  '21-1412-02L.S201309'),
                               lowcut=5,
                               highcut=15,
                               samp_rate=40,
                               filt_order=4,
                               length=3,
                               swin='all',
                               prepick=0.05)
     cls.detection = from_sfile(sfile=os.path.join(cls.testing_path,
                                                   '21-1759-04L.S201309'),
                                lowcut=5,
                                highcut=15,
                                samp_rate=40,
                                filt_order=4,
                                length=4,
                                swin='all',
                                prepick=0.55)
     cls.template_spicks = from_sfile(sfile=os.path.join(
         cls.testing_path, '18-2120-53L.S201309'),
                                      lowcut=5,
                                      highcut=15,
                                      samp_rate=40,
                                      filt_order=4,
                                      length=3,
                                      swin='all',
                                      prepick=0.05)
     cls.detection_spicks = from_sfile(sfile=os.path.join(
         cls.testing_path, '18-2350-08L.S201309'),
                                       lowcut=5,
                                       highcut=15,
                                       samp_rate=40,
                                       filt_order=4,
                                       length=4,
                                       swin='all',
                                       prepick=0.55)
     detection_event = read_event(
         os.path.join(cls.testing_path, '21-1759-04L.S201309'))
     detection_spicks_event = read_event(
         os.path.join(cls.testing_path, '18-2350-07L.S201309'))
     cls.detections = [
         Detection(detect_time=detection_event.origins[0].time,
                   detect_val=2.0,
                   no_chans=5,
                   threshold=1.9,
                   typeofdet='corr',
                   event=detection_event,
                   template_name='test_template',
                   threshold_type='MAD',
                   threshold_input=8.0),
         Detection(detect_time=detection_spicks_event.origins[0].time,
                   detect_val=2.0,
                   no_chans=5,
                   threshold=1.9,
                   typeofdet='corr',
                   event=detection_spicks_event,
                   template_name='test_template',
                   threshold_type='MAD',
                   threshold_input=8.0)
     ]
     tstart = min(tr.stats.starttime for tr in cls.template)
     cls.delays = {}
     for tr in cls.template:
         cls.delays.update({
             tr.stats.station + '.' + tr.stats.channel:
             tr.stats.starttime - tstart
         })
     warnings.simplefilter("always")
Esempio n. 6
0
 def setUpClass(cls):
     cls.testing_path = os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'test_data', 'REA',
         'TEST_')
     cls.wave_path = os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'test_data', 'WAV',
         'TEST_')
     key_dict = [{
         'name': 'template',
         'sfile': '21-1412-02L.S201309'
     }, {
         'name': 'detection',
         'sfile': '21-1759-04L.S201309'
     }, {
         'name': 'template_spicks',
         'sfile': '18-2120-53L.S201309'
     }, {
         'name': 'detection_spicks',
         'sfile': '18-2350-08L.S201309'
     }]
     for item in key_dict:
         st = read(
             os.path.join(
                 cls.wave_path,
                 readwavename(os.path.join(cls.testing_path,
                                           item['sfile']))[0]))
         for tr in st:
             tr.stats.channel = tr.stats.channel[0] + tr.stats.channel[-1]
         item.update({
             'st': st,
             'sfile': os.path.join(cls.testing_path, item['sfile'])
         })
         setattr(
             cls, item['name'],
             from_meta_file(meta_file=item['sfile'],
                            lowcut=5,
                            highcut=15,
                            samp_rate=40,
                            filt_order=4,
                            length=3,
                            swin='all',
                            prepick=0.05,
                            st=item['st'])[0])
     detection_event = read_events(
         os.path.join(cls.testing_path, '21-1759-04L.S201309'))[0]
     detection_spicks_event = read_events(
         os.path.join(cls.testing_path, '18-2350-07L.S201309'))[0]
     cls.detections = [
         Detection(detect_time=detection_event.origins[0].time,
                   detect_val=2.0,
                   no_chans=5,
                   threshold=1.9,
                   typeofdet='corr',
                   event=detection_event,
                   template_name='test_template',
                   threshold_type='MAD',
                   threshold_input=8.0),
         Detection(detect_time=detection_spicks_event.origins[0].time,
                   detect_val=2.0,
                   no_chans=5,
                   threshold=1.9,
                   typeofdet='corr',
                   event=detection_spicks_event,
                   template_name='test_template',
                   threshold_type='MAD',
                   threshold_input=8.0)
     ]
     tstart = min(tr.stats.starttime for tr in cls.template)
     cls.delays = {}
     for tr in cls.template:
         cls.delays.update({
             tr.stats.station + '.' + tr.stats.channel:
             tr.stats.starttime - tstart
         })
     warnings.simplefilter("always")
Esempio n. 7
0
    def test_real_time_plotting(self):
        """Test the real-time plotter - must be run interactively."""

        seed_list = [
            "NZ.INZ.10.HHZ", "NZ.JCZ.10.HHZ", "NZ.FOZ.11.HHZ", "NZ.MSZ.10.HHZ",
            "NZ.PYZ.10.HHZ", "NZ.DCZ.10.HHZ", "NZ.WVZ.10.HHZ"
        ]
        client = Client("GEONET")
        inv = client.get_stations(network=seed_list[0].split(".")[0],
                                  station=seed_list[0].split(".")[1],
                                  location=seed_list[0].split(".")[2],
                                  channel=seed_list[0].split(".")[3])
        for seed_id in seed_list[1:]:
            net, sta, loc, chan = seed_id.split('.')
            inv += client.get_stations(network=net,
                                       station=sta,
                                       channel=chan,
                                       location=loc)

        now = UTCDateTime.now()
        template_cat = client.get_events(starttime=now - 3600, endtime=now)
        tribe = Tribe(templates=[
            Template(event=event, name=event.resource_id.id.split("/")[-1])
            for event in template_cat
        ])
        template_names = cycle([t.name for t in tribe])

        buffer_capacity = 1200
        rt_client = RealTimeClient(server_url="link.geonet.org.nz",
                                   buffer_capacity=buffer_capacity)
        for seed_id in seed_list:
            net, station, _, selector = seed_id.split(".")
            rt_client.select_stream(net=net,
                                    station=station,
                                    selector=selector)

        rt_client.background_run()
        while len(rt_client.buffer) < 7:
            # Wait until we have some data
            time.sleep(SLEEP_STEP)

        detections = []
        plotter = EQcorrscanPlot(rt_client=rt_client,
                                 plot_length=60,
                                 tribe=tribe,
                                 inventory=inv,
                                 update_interval=1000,
                                 detections=detections)
        plotter.background_run()

        duration = 0
        step = 5
        while duration < MAX_DURATION:
            detections.append(
                Detection(
                    template_name=next(template_names),
                    detect_time=UTCDateTime.now(),
                    no_chans=999,
                    detect_val=999,
                    threshold=999,
                    threshold_type="MAD",
                    threshold_input=999,
                    typeofdet="unreal",
                    event=Event(picks=[
                        Pick(time=UTCDateTime.now(),
                             waveform_id=WaveformStreamID(seed_string=seed_id))
                        for seed_id in seed_list
                    ])))
            time.sleep(step)
            duration += step
        rt_client.background_stop()