Exemple #1
0
def make_pick(pick_str, origin_time):
    """ Creates an ObsPy Pick object from a line of STP
    phase output.

    Sample pick_str:
    CI    CLC HHZ --   35.8157  -117.5975   775.0 P c. i  1.0    6.46   1.543
    """

    fields = pick_str.split()
    if len(fields) != 13:
        raise Exception('Invalid STP phase output')

    new_pick = Pick()
    (net, sta, chan, loc) = fields[:4]
    new_pick.waveform_id = WaveformStreamID(network_code=net,
                                            station_code=sta,
                                            channel_code=chan,
                                            location_code=loc)

    # Determine polarity from first motion.
    polarity = POLARITY_MAPPING[fields[8][0]]
    if polarity == '':
        polarity = POLARITY_MAPPING[fields[8][1]]
    if polarity != '':
        new_pick.polarity = polarity

    # Determine signal onset.
    if fields[9] == 'i':
        new_pick.onset = 'impulsive'
    elif fields[9] == 'e':
        new_pick.onset = 'emergent'
    else:
        new_pick.onset = 'questionable'

    # Determine time error from STP quality.
    # Use Jiggle standard and assume sample rate of 100 sps.
    quality = float(fields[10])
    if quality == 0.0:
        new_pick.time_errors = QuantityError(lower_uncertainty=0.03)
    elif quality <= 0.3:
        new_pick.time_errors = QuantityError(upper_uncertainty=0.03)
    elif quality <= 0.5:
        new_pick.time_errors = QuantityError(upper_uncertainty=0.02)
    elif quality <= 0.8:
        new_pick.time_errors = QuantityError(upper_uncertainty=0.01)
    elif quality == 1.0:
        new_pick.time_errors = QuantityError(upper_uncertainty=0.0)

    # Determine pick time.
    offset = float(fields[12])
    new_pick.time = origin_time + offset

    return new_pick
Exemple #2
0
 def test_quantity_error_equality(self):
     """
     Comparisons between empty quantity errors and None should return True.
     Non-empty quantity errors should return False.
     """
     err1 = QuantityError()
     self.assertEqual(err1, None)
     err2 = QuantityError(uncertainty=10)
     self.assertNotEqual(err2, None)
     self.assertNotEqual(err2, err1)
     err3 = QuantityError(uncertainty=10)
     self.assertEqual(err3, err2)
Exemple #3
0
 def test_quantity_error_equality(self):
     """
     Comparisons between empty quantity errors and None should return True.
     Non-empty quantity errors should return False.
     """
     err1 = QuantityError()
     assert err1 == None  # NOQA needs to be ==
     err2 = QuantityError(uncertainty=10)
     assert err2 is not None
     assert err2 != err1
     err3 = QuantityError(uncertainty=10)
     assert err3 == err2
Exemple #4
0
 def test_nordic_write(self):
     """
     Test calculating amplitudes
     """
     compare_file = str(self.data_path / "test.nordic")
     otime = UTCDateTime('2019-05-19T06:09:48')
     wid = WaveformStreamID(network_code='4G', station_code='STAT')
     wid.channel_code = 'SHZ'
     Ppick = Pick(time=otime + 2.2,
                  phase_hint='P',
                  waveform_id=wid,
                  evaluation_mode='automatic',
                  time_errors=QuantityError(0.01))
     Parrival = Arrival(pick_id=Ppick.resource_id, time_weight=0)
     wid.channel_code = 'SH1'
     Spick = Pick(time=otime + 3.5,
                  phase_hint='S',
                  waveform_id=wid,
                  evaluation_mode='automatic',
                  time_errors=QuantityError(0.05))
     wid.channel_code = 'SH2'
     Sarrival = Arrival(pick_id=Spick.resource_id, time_weight=1)
     Apick = Pick(time=otime + 4.0,
                  phase_hint='IAML',
                  waveform_id=wid,
                  evaluation_mode='automatic',
                  time_errors=QuantityError(0.1))
     Amp = Amplitude(generic_amplitude=410.e-9,
                     type='IAML',
                     unit='m',
                     period=0.22,
                     magnitude_hint='ML',
                     category='period',
                     pick_id=Apick.resource_id,
                     waveform_id=Apick.waveform_id)
     picks = [Ppick, Spick, Apick]
     amps = [Amp]
     arrivals = [Parrival, Sarrival]
     PSPicker.save_nordic_event(picks,
                                otime,
                                '.',
                                'test.nordic',
                                amps,
                                arrivals=arrivals,
                                debug=False)
     self.assertTextFilesEqual('test.nordic',
                               compare_file,
                               ignore_lines=[1])
     Path("test.nordic").unlink()
     for p in Path(".").glob('run_*.log'):
         p.unlink()
def add_picks(tr, method, prev_picks, pick_tol=0.025):
    wav_id = WaveformStreamID(station_code=tr.stats.station,
                              channel_code=tr.stats.channel,
                              network_code=tr.stats.network)
    scnl, tpicks, polarity, snr, uncert = get_picks(tr,
                                                    picker=method,
                                                    show_plot=False)
    for ind, tpick in enumerate(tpicks):
        p = Pick(time=tpick,
                 waveform_id=wav_id,
                 time_errors=QuantityError(uncertainty=uncert[ind]),
                 method_id=method,
                 comments=[Comment(text="SNR = %f" % snr[ind])])
        # Check if there is a pick within pick tolerance threshold
        if prev_picks:
            prev_tpick = [pick.time for pick in prev_picks]
            if np.abs(np.array(prev_tpick) - p.time).min() < pick_tol:
                ix = np.abs(np.array(prev_tpick) - p.time).argmin()
                if prev_picks[ix].time < p.time:
                    #print("This pick is within pick_tol from previous pick. Keeping previous pick.")
                    continue  # Don't add pick
                else:
                    #print("This pick is within pick_tol from previous pick. Keeping this new pick.")
                    prev_picks.remove(prev_picks[ix])
                    prev_picks.append(p)
        else:
            #print("No previous pick. Appending this one.")
            prev_picks = [p]

    return prev_picks
Exemple #6
0
    def _time_error(self,):
        """
        Return approximate pick time errors corresponding to pick weight

        Errors are multiplied by 2 for self.phase_guess == 'S'
        :returns: time_errors
        :rtype: obspy QuantityError
        """
        if not isinstance(self.weight, int):
            return None
        elif self.sampling_rate is None:
            return None

        assert self.phase_guess in 'PS',\
            "phase_guess '{self.phase_guess}' not in 'PS'"

        if self.weight == 0:
            uncertainty = 2. / self.sampling_rate
        elif self.weight == 1:
            uncertainty = 8. / self.sampling_rate
        elif self.weight == 2:
            uncertainty = 32. / self.sampling_rate
        elif self.weight == 3:
            uncertainty = 128. / self.sampling_rate
        else:
            uncertainty = 2000. / self.sampling_rate
        if self.phase_guess == 'S':
            uncertainty *= 2.
        return QuantityError(uncertainty)
Exemple #7
0
def assign_stefan_picks(cat, name_map, pk_file, uncert_cutoff):
    """
    Take output from Stefans Spicker and add to catalog
    :param cat:
    :param name_map:
    :param pk_file:
    :param uncert_cutoff:
    :return:
    """

    boreholes = ['NS12', 'NS13', 'NS14', 'THQ2']  # For channel naming
    alph = make_alph()
    picks = make_pk_dict(name_map, pk_file)
    for ev in cat:
        print('For ev: %s' % str(ev.resource_id))
        if ev.resource_id in picks:
            for pk in picks[ev.resource_id]:
                # (Sigh) Build the datetime from the time string...
                o_time = ev.preferred_origin().time
                hour = int(pk['time'].split(':')[0])
                minute = int(pk['time'].split(':')[1])
                second = int(pk['time'].split(':')[2].split('.')[0])
                sta_nm = '{}{}{}'.format(pk['sta'][:2],
                                         str(alph[pk['sta'][2]]),
                                         str(alph[pk['sta'][3]]))
                if sta_nm in boreholes:
                    chan_nm = 'EH1'
                else:
                    chan_nm = 'EHE'
                if len(pk['time'].split(':')[2].split('.')) == 1:
                    microsecond = 0
                else:
                    microsecond = int(
                        pk['time'].split(':')[2].split('.')[1]) * 1000
                pk_time = UTCDateTime(year=o_time.year,
                                      month=o_time.month,
                                      day=o_time.day,
                                      hour=hour,
                                      minute=minute,
                                      second=second,
                                      microsecond=microsecond)
                if pk['sta'][0] == 'N' or pk['sta'][0] == 'R':
                    wv_id = WaveformStreamID(station_code=sta_nm,
                                             channel_code=chan_nm)
                else:
                    wv_id = WaveformStreamID(station_code=pk['sta'],
                                             channel_code=chan_nm)
                if float(pk['error']) < uncert_cutoff:
                    uncert = QuantityError(uncertainty=float(pk['error']))
                    pk = Pick(time=pk_time,
                              waveform_id=wv_id,
                              phase_hint='S',
                              time_errors=uncert)
                    ev.picks.append(pk)
    return cat
Exemple #8
0
def write_pdf_to_dataset(predict,
                         dataset_list,
                         dataset_output_dir,
                         remove_dir=False):
    if remove_dir:
        shutil.rmtree(dataset_output_dir, ignore_errors=True)
    os.makedirs(dataset_output_dir, exist_ok=True)

    print("Output file:")
    with tqdm(total=len(dataset_list)) as pbar:
        for i, prob in enumerate(predict):
            try:
                trace = read(dataset_list[i]).traces[0]

            except IndexError:
                break

            trace_length = trace.data.size
            pdf = prob.reshape(trace_length, )

            if pdf.max():
                trace.pdf = pdf / pdf.max()
            else:
                trace.pdf = pdf
            pdf_picks = get_picks_from_pdf(trace)

            if trace.picks:
                for val_pick in trace.picks:
                    for pre_pick in pdf_picks:
                        pre_pick.evaluation_mode = "automatic"

                        residual = get_time_residual(val_pick, pre_pick)
                        pre_pick.time_errors = QuantityError(residual)

                        if is_close_pick(val_pick, pre_pick, delta=0.1):
                            pre_pick.evaluation_status = "confirmed"
                        elif is_close_pick(val_pick, pre_pick, delta=1):
                            pre_pick.evaluation_status = "rejected"

            else:
                trace.picks = []
                for pre_pick in pdf_picks:
                    pre_pick.evaluation_mode = "automatic"

            trace.picks.extend(pdf_picks)
            time_stamp = trace.stats.starttime.isoformat()
            trace.write(dataset_output_dir + '/' + time_stamp +
                        trace.get_id() + ".pkl",
                        format="PICKLE")
            pbar.update()
Exemple #9
0
 def test_quantity_error_warn_on_non_default_key(self):
     """
     """
     err = QuantityError()
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         err.uncertainty = 0.01
         err.lower_uncertainty = 0.1
         err.upper_uncertainty = 0.02
         err.confidence_level = 80
         self.assertEqual(len(w), 0)
         # setting a typoed or custom field should warn!
         err.confidence_levle = 80
         self.assertEqual(len(w), 1)
Exemple #10
0
 def test_quantity_error_warn_on_non_default_key(self):
     """
     """
     err = QuantityError()
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         err.uncertainty = 0.01
         err.lower_uncertainty = 0.1
         err.upper_uncertainty = 0.02
         err.confidence_level = 80
         self.assertEqual(len(w), 0)
         # setting a typoed or custom field should warn!
         err.confidence_levle = 80
         self.assertEqual(len(w), 1)
def assign_stefan_picks(cat,
                        pk_file,
                        uncert_cutoff,
                        name_map=None,
                        temps=False,
                        temp_sac_dir=False):
    """
    Take output from Stefans Spicker and add to catalog (in place)
    :param cat: Catalog which we want to populate with S-picks
    :param pk_file: File including all of the S-picks
    :param uncert_cutoff: Cutoff for the pick error in seconds
    :param name_map: In the case of detections, we need to map new eids back
        to the original based on this file provided by stefan
    :param temps: Whether or not we are using template resource_id and not
        detection resource_id
    :param temp_sac_dir: Directory of self detections for templates. This is
        so that we can map the self_detection name (which may be the basis
        for rids in a catalog) to the basic template name.
    :return:
    """

    boreholes = ['NS12', 'NS13', 'NS14', 'THQ2']  # For channel naming
    alph = make_alph()
    picks = make_pk_dict(pk_file, name_map)
    if temps and temp_sac_dir:
        self_names = [
            nm.split('/')[-1] for nm in glob('{}/*'.format(temp_sac_dir))
        ]
        temp_map = {
            ResourceIdentifier('smi:local/{}'.format(nm)):
            ResourceIdentifier('smi:local/{}'.format(nm.split('_')[0]))
            for nm in self_names
        }
    for ev in cat:
        print('For ev: %s' % str(ev.resource_id))
        if temps and temp_sac_dir:
            if ev.resource_id in temp_map:
                id = temp_map[ev.resource_id]
            else:
                print('Event not in SAC directory')
                continue
        else:
            id = ev.resource_id
        print(id)
        if id in picks:
            for pk in picks[id]:
                # Build the datetime from the time string...
                o_time = ev.preferred_origin().time
                hour = int(pk['time'].split(':')[0])
                minute = int(pk['time'].split(':')[1])
                second = int(pk['time'].split(':')[2].split('.')[0])
                sta_nm = '{}{}{}'.format(pk['sta'][:2],
                                         str(alph[pk['sta'][2]]),
                                         str(alph[pk['sta'][3]]))
                if sta_nm in boreholes:
                    chan_nm = 'EH1'
                else:
                    chan_nm = 'EHE'
                if len(pk['time'].split(':')[2].split('.')) == 1:
                    microsecond = 0
                else:
                    microsecond = int(
                        pk['time'].split(':')[2].split('.')[1]) * 1000
                pk_time = UTCDateTime(year=o_time.year,
                                      month=o_time.month,
                                      day=o_time.day,
                                      hour=hour,
                                      minute=minute,
                                      second=second,
                                      microsecond=microsecond)
                if pk['sta'][0] == 'N' or pk['sta'][0] == 'R':
                    wv_id = WaveformStreamID(station_code=sta_nm,
                                             channel_code=chan_nm)
                else:
                    wv_id = WaveformStreamID(station_code=pk['sta'],
                                             channel_code=chan_nm)
                if float(pk['error']) < uncert_cutoff:
                    uncert = QuantityError(uncertainty=float(pk['error']))
                    pk = Pick(time=pk_time,
                              waveform_id=wv_id,
                              phase_hint='S',
                              time_errors=uncert)
                    ev.picks.append(pk)
        else:
            print('id not in picks')
    return cat
Exemple #12
0
def main(st, fname, verbose=False):
    fs = st[0].stats.sampling_rate

    # Detect STA/LTA for all geodes, with minimum number of stations included
    proc1 = time.time()
    detection_list, cft_stream = network_detection(st, cft_return=True)
    proc2 = time.time()
    Logger.info("Network detection search done in %f s." % (proc2 - proc1))
    Logger.info("Number of network detections = %d" % len(detection_list))

    # Get picks and stats, iterating detection by detection, then station by station
    # Buffer window before and after detection
    buffer1 = 3.0  # 0.2
    buffer2 = 10.0

    # Load ERT data
    ert_surveys_file = "survey_times_ERT.csv"
    dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
    ert_surveys = pd.read_csv(ert_surveys_file,
                              parse_dates=["time_local_start"],
                              date_parser=dateparse)
    ert_surveys["time_local_start"] = ert_surveys[
        "time_local_start"].dt.tz_localize("America/Edmonton",
                                           ambiguous="infer")
    ert_surveys["time_utc_start"] = ert_surveys[
        "time_local_start"].dt.tz_convert(None)
    ert_surveys["time_utc_end"] = ert_surveys["time_utc_start"] + pd.Timedelta(
        25, unit="m")
    ert_surveys["time_utc_end"] = pd.to_datetime(ert_surveys["time_utc_end"])
    ert_surveys["time_utc_start"] = pd.to_datetime(
        ert_surveys["time_utc_start"])

    catalog = Catalog()
    # Loop over each STA/LTA detection
    for detection in detection_list:

        # Skip if detection happens during ERT survey
        tmin = detection["time"]._get_datetime()
        is_ert_on = \
            ert_surveys.loc[(ert_surveys['time_utc_start'] <= tmin) & (ert_surveys['time_utc_end'] >= tmin)].shape[
                0] > 0
        if is_ert_on:
            Logger.warning("Skip false detection during ERT survey.")
            continue

        Logger.info("DETECTION TIME: %s\n\t DURATION_SEC: %f" %
                    (detection["time"], detection["duration"]))
        det_start = detection["time"]
        det_end = detection["time"] + detection["duration"]

        # Detection stream
        det_st = st.slice(starttime=det_start - buffer1,
                          endtime=det_end + buffer2)
        det_st.detrend()
        det_st_to_save = det_st.copy()
        t_plt = det_st[0].times("matplotlib")
        t_utc = det_st[0].times("utcdatetime")
        det_cft = cft_stream.slice(starttime=det_start - buffer1,
                                   endtime=det_end + buffer2)

        # Stations in detection stream
        station_list = list(set(detection["stations"]))
        station_list.sort()

        # Check if frequencies within window are anomalous
        highf_ratio_threshold = 0.6
        for station in station_list:
            tmp = det_st.select(station=station).copy()
            nbad = 0
            for tr in tmp:
                ratio = highf_ratio(data=tr.data, sampling_rate=fs)
                if ratio > highf_ratio_threshold:
                    nbad += 1
            if nbad > 0:
                for tr in tmp:
                    Logger.warning(
                        "Removing station %s because for %d traces, ratio of frequencies above %f is above %f"
                        % (station, nbad, 0.25 * fs, highf_ratio_threshold))
                    det_st.remove(tr)

        # Stations in detection stream
        station_list = list(set(detection["stations"]))
        station_list.sort()

        if len(station_list) < 4:
            Logger.warning(
                "Only %d stations left, less than 4, so skipping this detection"
                % len(station_list))

        # Search window for phase around STA/LTA detection time
        idet_start = (np.abs(t_utc - det_start)).argmin()
        idet_end = (np.abs(t_utc - det_end)).argmin()
        idx_search_max = range(idet_start, idet_end)

        # Analyze stations one by one
        pol_st = Stream()
        event_phases = []
        for ista, station in enumerate(station_list):

            # Select waveform and STA-LTA streams
            sta_st = det_st.select(station=station).copy()
            network = sta_st[0].stats.network
            sta_st.detrend()
            sta_cft = det_cft.select(station=station).copy()
            sta_cft_stack = (sta_cft.select(channel="DPZ")[0].data +
                             sta_cft.select(channel="DPN")[0].data +
                             sta_cft.select(channel="DPE")[0].data) / 3

            # Polarization properties
            tpol, pol_dict, pol_st_sta = modified_polarization_analysis(
                sta_st, dominant_period=DOM_PERIOD, interpolate=True)
            pol_st += pol_st_sta

            # Energy response curve for pick detection
            per = peak_eigenvalue_ratio(pol_dict["eigenvalue1"],
                                        win_len=int(2 * DOM_PERIOD * fs))
            per = eps_smooth(per, w=int(EPS_WINLEN * fs))
            jer = joint_energy_ratio(sta_cft_stack, t_plt, per, tpol)

            # Extract phases
            sta_phases = get_phases(response_curve=jer,
                                    idx_search_max=idx_search_max,
                                    time=t_utc,
                                    pol=pol_dict,
                                    verbose=False)
            if sta_phases:

                # Now do some quality control
                snr_threshold = 2.5
                win_len_s = 0.2
                sta_phases["station"] = station
                sta_phases["network"] = network

                if sta_phases["P"]["arrival_time"]:
                    arr_time = sta_phases["P"]["arrival_time"] - 0.02

                    snr, channel = get_snr_phase(sta_st,
                                                 time=arr_time,
                                                 win_len_s=win_len_s,
                                                 verbose=False,
                                                 tnoise=None)
                    Logger.info("SNR for P pick %s.%s..%s: %f \t at t = %s" %
                                (network, station, channel, snr, arr_time))
                    if snr < snr_threshold:
                        #Logger.info("P pick below SNR threshold of %f" % snr_threshold)
                        sta_phases["P"]["arrival_time"] = None
                    else:
                        sta_phases["P"]["SNR"] = snr
                        sta_phases["P"]["channel"] = channel

                if sta_phases["S"]["arrival_time"]:
                    arr_time = sta_phases["S"]["arrival_time"] - 0.02
                    if sta_phases["P"]["arrival_time"]:
                        tnoise = sta_phases["P"]["arrival_time"] - 0.02
                    else:
                        tnoise = None
                    snr, channel = get_snr_phase(sta_st.select(),
                                                 time=arr_time,
                                                 win_len_s=win_len_s,
                                                 verbose=False,
                                                 tnoise=tnoise)

                    Logger.info("SNR for S pick %s.%s..%s: %f \t at t = %s" %
                                (network, station, channel, snr, arr_time))
                    if snr < snr_threshold:
                        Logger.info("S pick below SNR threshold of %f" %
                                    snr_threshold)
                        sta_phases["S"]["arrival_time"] = None
                    else:
                        sta_phases["S"]["SNR"] = snr
                        sta_phases["S"]["channel"] = channel

                Logger.info("Station %s: t_P = %s\tt_S = %s" %
                            (station, sta_phases["P"]["arrival_time"],
                             sta_phases["S"]["arrival_time"]))
                event_phases.append(sta_phases)
            else:
                Logger.info("No phase found for station %s" % station)
            # End of for loop over stations

        if not event_phases:
            Logger.info("No picks found at all for this detection.")
            continue
        else:
            nump = len([p for p in event_phases if p["P"]["arrival_time"]])
            nums = len([p for p in event_phases if p["S"]["arrival_time"]])
            Logger.info("Number of initial picks before MCCC: P = %d, S = %d" %
                        (nump, nums))
        if nump + nums == 0:
            Logger.info("No picks found at all for this detection.")
            continue
        # if verbose:
        #     plot_phases(event_phases, det_st)
        #     wadati_plot(event_phases, det_st)

        # Align with mccc
        Logger.info("Refining picks with MCCC")
        event_phases = align_mccc(event_phases=event_phases,
                                  stream=det_st,
                                  verbose=False)

        nump = len([p for p in event_phases if p["P"]["arrival_time"]])
        nums = len([p for p in event_phases if p["S"]["arrival_time"]])
        if nump == 0 and nums == 0:
            Logger.warning("No remaining picks after MCCC!")
            continue
        elif nump + nums < 5:
            Logger.info("Less than 5 picks remaining. Skipping event.")
            continue
        if verbose:
            Logger.info("Number of picks after MCCC: P = %d, S = %d" %
                        (nump, nums))
            wadati_plot(event_phases, det_st)
            plot_phases(event_phases, det_st)

        # Update polarization statistics
        Logger.info("Updating polarization attributes")
        phase_len_tol = int(10 * DOM_PERIOD * fs)
        for i, staph in enumerate(event_phases):
            sta_st = det_st.select(station=staph["station"]).copy()
            t = sta_st[0].times("utcdatetime")
            tpol, pol_dict, _ = modified_polarization_analysis(
                sta_st, dominant_period=DOM_PERIOD, interpolate=True)
            tp = staph["P"]["arrival_time"]
            if tp:
                idxP = np.argmin(np.abs(t - tp))
                stats = pol_window_stats(pol_dict,
                                         idxP,
                                         phase_len_tol,
                                         show_stats=False)
                event_phases[i]["P"]["pol_stats"] = stats
            ts = staph["S"]["arrival_time"]
            if ts:
                idxS = np.argmin(np.abs(t - ts))
                stats = pol_window_stats(pol_dict,
                                         idxS,
                                         phase_len_tol,
                                         show_stats=False)
                event_phases[i]["S"]["pol_stats"] = stats

        # Convert to obspy Picks and Event
        event_picks = []
        for i, staph in enumerate(event_phases):
            event_picks += sta_phases_to_pick(staph=staph)
        event = Event(picks=event_picks)

        # Estimate average event distance using availables pairs of P and S picks
        r_med = distance_from_tstp(event.picks, min_estim=1)
        if not r_med:  # We cannot estimate r, hence magnitude
            Logger.warning(
                "Couldn't estimate hypocentral distance from ts-tp. No magnitude calculation."
            )
            # Add event to catalog
            if verbose:
                Logger.info(
                    "Adding event to catalog: *******************************************"
                )
                Logger.info(event)
            catalog.events.append(event)
            stfilepath = os.path.join("detections_waveforms",
                                      det_start.strftime("%Y%m%d"))
            if not os.path.exists(stfilepath):
                os.mkdir(stfilepath)
            det_st_to_save.write(os.path.join(
                stfilepath,
                "bhdetect_%s.mseed" % det_start.strftime("%Y%m%d%H%M%S")),
                                 format="MSEED")

            continue

        # Calculate magnitudes
        Logger.info("Computing magnitudes...")
        magtime_contriblist = []
        magspec_contriblist = []
        for ista, station in enumerate(station_list):
            sta_picks = [
                p for p in event.picks if p.waveform_id.station_code == station
            ]
            r = distance_from_tstp(sta_picks, min_estim=2)
            if not r:
                r = r_med
            ts = get_pick(event.picks, station, "S")
            if not ts:  # No ts pick
                Logger.warning("There is no S pick for station %s." % station)
                continue
            sta_st = det_st.select(station=station).copy()
            sta_st.detrend()

            # Estimate coda
            tp = get_pick(event.picks, station, "P")
            if not tp:
                tsig = ts - 0.5
            else:
                tsig = tp - 0.02
            tcoda, s_len, snr = get_coda_duration(sta_st.copy(),
                                                  tsig=tsig,
                                                  ts=ts,
                                                  win_len_s=0.2)
            if not tcoda:
                if verbose:
                    Logger.info(
                        "Couldn't calculate coda duration for station %s skipping..."
                        % station)
                continue

            # Save coda info
            amp = Amplitude(generic_amplitude=tcoda,
                            snr=snr,
                            type="END",
                            category="duration",
                            unit="s",
                            magnitude_hint="Md")
            event.amplitudes.append(amp)

            # Estimate energy flux
            if tp:
                Logger.info("Calculating energy flux fr station %s" % station)
                epsilonS = 0
                for tr in sta_st.copy():
                    tr_cut = tr.trim(starttime=ts, endtime=ts + (ts - tp)).data
                    cumsum_u2 = scipy.integrate.cumtrapz(tr_cut**2,
                                                         dx=tr.stats.delta)
                    epsilonS += cumsum_u2[-1]
                amp = Amplitude(generic_amplitude=epsilonS,
                                snr=snr,
                                type="A",
                                category="integral",
                                unit="other",
                                time_window=TimeWindow(begin=ts - tp,
                                                       end=2 * (ts - tp),
                                                       reference=tp),
                                waveform_id=WaveformStreamID(
                                    network_code=tr.stats.network,
                                    station_code=tr.stats.station))
                event.amplitudes.append(amp)

            # Estimate Mw for each component
            Mw_spec_sta = []
            Mw_time_sta = []
            Q_spec_sta = []
            fc_spec_sta = []
            for tr in sta_st:
                # Cut noise window and S waveform
                noise_len = s_len
                taper_perc = 0.1
                trnoise = tr.copy()
                trnoise.trim(starttime=tsig - (1 + taper_perc) * noise_len,
                             endtime=tsig - taper_perc * noise_len)
                trnoise.taper(type="hann",
                              max_percentage=taper_perc,
                              side="both")
                tr.trim(starttime=ts - taper_perc * s_len,
                        endtime=ts + (1 + taper_perc) * s_len)
                tr.taper(type="hann", max_percentage=taper_perc, side="both")

                # Check SNR
                snr_trace = np.median(tr.slice(starttime=ts, endtime=ts + s_len).data) / \
                            np.median(trnoise.data)

                if snr_trace < 3:
                    Logger.info(
                        "SNR < 3, skipping trace for magnitude calculation.")
                    # Poor SNR, skip trace
                    continue

                # Displacement waveform
                trdisp = tr.copy()
                trdisp.integrate()
                trdisp.detrend()

                # Estimate magnitude: time method
                Mw_time, M0_time, omega0_time = estimate_magnitude_time(
                    trdisp, r, disp=False)
                Mw_time_sta.append(Mw_time)

                # Estimate magnitude: spectral method
                Mw_o, M0_o, omega0_o, fc_o, Q_o = estimate_magnitude_spectral(
                    trdisp, r, omega0_time, trnoise=None, disp=False)
                if not Mw_o:
                    Logger.warning("No magnitude found due to errors.")
                    continue
                elif fc_o < 2 or Q_o > 40 or Q_o < 1:  # Qs Attenuation larger than Sandstone=31, shale=10
                    # Reject spectral estimate
                    Logger.warning(
                        "Rejecting spectral estimate with: fc = %f, Q = %f" %
                        (fc_o, Q_o))
                    continue
                else:
                    Mw_spec_sta.append(Mw_o)
                    Q_spec_sta.append(Q_o)
                    fc_spec_sta.append(fc_o)

            # Now get average for station as a whole
            Logger.info(
                "Found %d estimates of Mw using time method for station %s." %
                (len(Mw_time_sta), station))
            Logger.info(
                "Found %d estimates of Mw using spectral method for station %s."
                % (len(Mw_spec_sta), station))
            if Mw_time_sta:
                smagt = StationMagnitude(
                    mag=np.mean(Mw_time_sta),
                    mag_errors=QuantityError(uncertainty=np.std(Mw_time_sta)),
                    station_magnitude_type="Mw_time",
                    comments=[Comment(text="snr = %f" % snr)])
                event.station_magnitudes.append(smagt)
                contrib = StationMagnitudeContribution(
                    station_magnitude_id=smagt.resource_id, weight=snr)
                magtime_contriblist.append(contrib)
                Logger.info("Magnitude time estimate = %f" %
                            np.mean(Mw_time_sta))

            if Mw_spec_sta:
                smags = StationMagnitude(
                    mag=np.mean(Mw_spec_sta),
                    mag_errors=QuantityError(uncertainty=np.std(Mw_spec_sta)),
                    station_magnitude_type="Mw_spectral",
                    comments=[
                        Comment(text="Q_mean = %f, Q_std = %f" %
                                (np.mean(Q_spec_sta), np.std(Q_spec_sta))),
                        Comment(text="Fc_mean = %f, Fc_std = %f" %
                                (np.mean(fc_spec_sta), np.std(fc_spec_sta))),
                        Comment(text="snr = %f" % snr)
                    ])
                event.station_magnitudes.append(smags)
                contrib = StationMagnitudeContribution(
                    station_magnitude_id=smags.resource_id, weight=snr)
                magspec_contriblist.append(contrib)
                Logger.info("Magnitude spectral estimate = %f" %
                            np.mean(Mw_spec_sta))
                Logger.info("Fc = %f, Q = %f" %
                            (np.mean(fc_spec_sta), np.mean(Q_spec_sta)))

            # End of for loop over stations

        # Get magnitude for event
        if magspec_contriblist:
            Logger.info(
                "Found %d station estimates of Mw using spectral method." %
                len(magspec_contriblist))
            wave_num = 0
            wave_den = 0
            val_list = []
            for m in magspec_contriblist:
                mval = [
                    sm.mag for sm in event.station_magnitudes
                    if sm.resource_id == m.station_magnitude_id
                ][0]
                wave_num += mval * m.weight
                wave_den += m.weight
                val_list.append(mval)
            mag = wave_num / wave_den
            mags = Magnitude(
                mag=mag,
                mag_errors=np.std(val_list),
                magnitude_type="Mw_spectral",
                station_count=len(magspec_contriblist),
                station_magnitude_contributions=magspec_contriblist)
            event.magnitudes.append(mags)
            Logger.info(
                "Event magnitude estimate using spectral method: Mw = %f" %
                mags.mag)
        if magtime_contriblist:
            Logger.info("Found %d station estimates of Mw using time method." %
                        len(magtime_contriblist))
            wave_num = 0
            wave_den = 0
            val_list = []
            for m in magtime_contriblist:
                mval = [
                    sm.mag for sm in event.station_magnitudes
                    if sm.resource_id == m.station_magnitude_id
                ][0]
                wave_num += mval * m.weight
                wave_den += m.weight
                val_list.append(mval)
            mag = wave_num / wave_den
            magt = Magnitude(
                mag=mag,
                mag_errors=np.std(val_list),
                magnitude_type="Mw_time",
                station_count=len(magtime_contriblist),
                station_magnitude_contributions=magtime_contriblist)
            event.magnitudes.append(magt)
            Logger.info("Event magnitude estimate using time method: Mw = %f" %
                        magt.mag)

        # Add event to catalog
        if verbose:
            Logger.info(
                "Adding event to catalog: *******************************************"
            )
            Logger.info(event)
        catalog.events.append(event)
        stfilepath = os.path.join("detections_waveforms",
                                  det_start.strftime("%Y%m%d"))
        if not os.path.exists(stfilepath):
            os.mkdir(stfilepath)
            det_st_to_save.write(os.path.join(
                stfilepath,
                "bhdetect_%s.mseed" % det_start.strftime("%Y%m%d%H%M%S")),
                                 format="MSEED")

    if len(catalog) > 0:
        # Decluster
        declustered_catalog = decluster_bh(catalog, trig_int=2.0)
        if not os.path.exists(os.path.split(fname)[0]):
            os.mkdir(os.path.split(fname)[0])
        declustered_catalog.write(fname, format="QUAKEML")