def fromParams(self, id, time, lat, lon, depth, magnitude, mag_type=None):
        """Create a ScalarEvent (subclass of Event).

        Args:
            id (str):
                Desired ID for the event, usually ComCat ID.
            time (UTCDateTime):
                Origin time of the event.
            lat (float):
                Latitude of origin.
            lon (float):
                Longitude of origin.
            depth (float):
                Depth of origin in **kilometers**.
            magnitude (float):
                Magnitude of earthquake.
            mag_type (str):
                Magnitude type of earthqake.
        """
        if isinstance(time, str):
            try:
                time = UTCDateTime(time)
            except BaseException as e:
                fmt = 'Can\'t make UTCDateTime from string "%s" - error "%s"'
                raise TypeError(fmt % (time, str(e)))

        origin = Origin(
            resource_id=id, time=time, longitude=lon, latitude=lat, depth=depth * 1000
        )

        self.origins = [origin]
        magnitude = Magnitude(resource_id=id, mag=magnitude, magnitude_type=mag_type)
        self.magnitudes = [magnitude]
        self.resource_id = id
Exemple #2
0
def make_event(catalog_entry):
    """ Creates an ObsPy Event object from 
    a line of STP event output.
    """
    #print(catalog_entry)
    fields = catalog_entry.split()

    evid = fields[0]
    etype = fields[1]
    origin_time = UTCDateTime(
        datetime.strptime(fields[3], "%Y/%m/%d,%H:%M:%S.%f"))

    lat = float(fields[4])
    lon = float(fields[5])
    depth = float(fields[6])
    mag = float(fields[7])
    magtype = fields[8]

    res_id = ResourceIdentifier(id=evid)
    origin = Origin(latitude=lat, longitude=lon, depth=depth, time=origin_time)

    magnitude = Magnitude(mag=mag, magnitude_type=MAGTYPE_MAPPING[magtype])
    event = Event(resource_id=res_id,
                  event_type=ETYPE_MAPPING[etype],
                  origins=[origin],
                  magnitudes=[magnitude])
    return event
Exemple #3
0
def get_event_object(dict_or_id):
    """Get Obspy Event object using event ID or dictionary (see get_event_dict).

    Args:
        eventid (dict_or_id): Event ID that can be found in ComCat, or dict.

    Returns:
        Event: Obspy Event object.
    """
    if isinstance(dict_or_id, str):
        event_dict = get_event_dict(dict_or_id)
    elif isinstance(dict_or_id, dict):
        event_dict = dict_or_id.copy()
    else:
        raise Exception('Unknown input parameter to get_event_info()')

    origin = Origin()
    origin.resource_id = event_dict['id']
    origin.latitude = event_dict['lat']
    origin.longitude = event_dict['lon']
    origin.depth = event_dict['depth']

    magnitude = Magnitude(mag=event_dict['magnitude'])
    event = Event()
    event.resource_id = event_dict['id']
    event.origins = [origin]
    event.magnitudes = [magnitude]

    return event
Exemple #4
0
def test_get_mag_src():
    mag = Magnitude()
    mag.resource_id.id = "gcmt1000"
    assert get_mag_src(mag) == "gcmt"
    mag.resource_id.id = "us1000"
    assert get_mag_src(mag) == "us"
    mag.resource_id.id = "duputel1000"
    assert get_mag_src(mag) == "duputel"
    mag.resource_id.id = "at1000"
    assert get_mag_src(mag) == "at"
    mag.resource_id.id = "pt1000"
    assert get_mag_src(mag) == "pt"
    mag.resource_id.id = "ak1000"
    assert get_mag_src(mag) == "ak"
    mag.resource_id.id = "pr1000"
    assert get_mag_src(mag) == "pr"
    mag.resource_id.id = "none1000"
    assert get_mag_src(mag) == "unknown"
def test_get_mag_src():
    mag = Magnitude()
    mag.resource_id.id = 'gcmt1000'
    assert get_mag_src(mag) == 'gcmt'
    mag.resource_id.id = 'us1000'
    assert get_mag_src(mag) == 'us'
    mag.resource_id.id = 'duputel1000'
    assert get_mag_src(mag) == 'duputel'
    mag.resource_id.id = 'at1000'
    assert get_mag_src(mag) == 'at'
    mag.resource_id.id = 'pt1000'
    assert get_mag_src(mag) == 'pt'
    mag.resource_id.id = 'ak1000'
    assert get_mag_src(mag) == 'ak'
    mag.resource_id.id = 'pr1000'
    assert get_mag_src(mag) == 'pr'
    mag.resource_id.id = 'none1000'
    assert get_mag_src(mag) == 'unknown'
Exemple #6
0
def test_scalar():
    eid = "usp000hat0"
    time = UTCDateTime("2010-04-06 22:15:01.580")
    lat = 2.383
    lon = 97.048
    depth = 31.0
    mag = 7.8
    mag_type = "Mwc"

    event = ScalarEvent()
    origin = Origin(resource_id=eid,
                    time=time,
                    latitude=lat,
                    longitude=lon,
                    depth=depth * 1000)
    magnitude = Magnitude(mag=mag, magnitude_type=mag_type)
    event.origins = [origin]
    event.magnitudes = [magnitude]

    assert event.id == eid
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag
    assert event.magnitude_type == mag_type

    subdir = os.path.join("data", "testdata", "usp000hat0_quakeml.xml")
    quakeml = pkg_resources.resource_filename("gmprocess", subdir)
    catalog = read_events(quakeml)
    tevent = catalog.events[0]
    event = ScalarEvent.fromEvent(tevent)
    assert event.id == "quakeml:us.anss.org/origin/pde20100406221501580_31"
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag
    assert event.magnitude_type == mag_type

    event = ScalarEvent()
    event.fromParams(eid, time, lat, lon, depth, mag, mag_type)
    assert isinstance(event, Event)
    assert event.origins[0].resource_id == eid
    assert event.origins[0].time == time
    assert event.origins[0].latitude == lat
    assert event.origins[0].longitude == lon
    assert event.origins[0].depth == depth * 1000
    assert event.magnitudes[0].mag == mag
    assert event.magnitudes[0].magnitude_type == mag_type

    tevent = Event()
    origin = Origin(resource_id=eid,
                    time=time,
                    longitude=lon,
                    latitude=lat,
                    depth=depth * 1000)
    magnitude = Magnitude(resource_id=eid, mag=mag, magnitude_type=mag_type)
    tevent.origins = [origin]
    tevent.magnitudes = [magnitude]
    event2 = ScalarEvent.fromEvent(tevent)
    assert isinstance(event2, Event)
    assert event2.origins[0].resource_id == eid
    assert event2.origins[0].time == time
    assert event2.origins[0].latitude == lat
    assert event2.origins[0].longitude == lon
    assert event2.origins[0].depth == depth * 1000
    assert event2.magnitudes[0].mag == mag
    assert event2.magnitudes[0].magnitude_type == mag_type
Exemple #7
0
def test_scalar():
    eid = 'usp000hat0'
    time = UTCDateTime('2010-04-06 22:15:01.580')
    lat = 2.383
    lon = 97.048
    depth = 31.0
    mag = 7.8

    event = ScalarEvent()
    origin = Origin(resource_id=eid,
                    time=time,
                    latitude=lat,
                    longitude=lon,
                    depth=depth * 1000)
    magnitude = Magnitude(mag=mag)
    event.origins = [origin]
    event.magnitudes = [magnitude]

    assert event.id == eid
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag

    subdir = os.path.join('data', 'testdata', 'usp000hat0_quakeml.xml')
    quakeml = pkg_resources.resource_filename('gmprocess', subdir)
    catalog = read_events(quakeml)
    tevent = catalog.events[0]
    event = ScalarEvent.fromEvent(tevent)
    assert event.id == 'quakeml:us.anss.org/origin/pde20100406221501580_31'
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag

    event = ScalarEvent()
    event.fromParams(eid, time, lat, lon, depth, mag)
    assert isinstance(event, Event)
    assert event.origins[0].resource_id == eid
    assert event.origins[0].time == time
    assert event.origins[0].latitude == lat
    assert event.origins[0].longitude == lon
    assert event.origins[0].depth == depth * 1000
    assert event.magnitudes[0].mag == mag

    tevent = Event()
    origin = Origin(resource_id=eid,
                    time=time,
                    longitude=lon,
                    latitude=lat,
                    depth=depth * 1000)
    magnitude = Magnitude(resource_id=eid, mag=mag)
    tevent.origins = [origin]
    tevent.magnitudes = [magnitude]
    event2 = ScalarEvent.fromEvent(tevent)
    assert isinstance(event2, Event)
    assert event2.origins[0].resource_id == eid
    assert event2.origins[0].time == time
    assert event2.origins[0].latitude == lat
    assert event2.origins[0].longitude == lon
    assert event2.origins[0].depth == depth * 1000
    assert event2.magnitudes[0].mag == mag
Exemple #8
0
    def get_results(self):
        cids = []
        clusters = []
        results_file = "{}/{}".format(self.hypoDD_control.control_directory,
                              self.hypoDD_control.relocated_hypocenters_output
                              )
        residuals_file = "{}/{}".format(self.hypoDD_control.control_directory,
                                        self.hypoDD_control.data_residual_output
                                        )
        with open(results_file, "r") as f:
            for line in f:
                num = line.split()
                evid = num[0]
                lat = float(num[1])
                lon = float(num[2])
                dep = 1000 * float(num[3])  # km to m
                errx = num[7]
                erry = num[8]
                errz = num[9]
                yr = int(num[10])
                mo = int(num[11])
                dy = int(num[12])
                hr = int(num[13])
                mi = int(num[14])
                sc = float(num[15])
                mag = num[16]
                nccp = num[17]
                nccs = num[18]
                nctp = num[19]
                ncts = num[20]
                rcc = num[21]
                rct = num[22]
                cid = num[23]
                if cid not in cids:
                    cids.append(cid)
                    clusters.append(Cluster())
                    clusters[-1].hypoDD_id=cid
                    clusters[-1].successful_relocation=True
                    clusters[-1].catalog=Catalog()
                    clusters[-1].event_ids=[]
                origin=Origin()
                isec = int ( math.floor( sc ))
                micsec = int ( ( sc - isec) * 1000000 )
                origin.time = UTCDateTime(yr, mo, dy, hr, mi, isec, micsec)
                origin.longitude = lon
                origin.latitude = lat
                origin.depth = dep
                origin.method_id = "hypoDD"
                # TODO (@ogalanis): Add time/location errors (when
                # appropriate. Add quality and origin_uncertainty. Add arrivals.
                event=Event()
                event.creation_info=CreationInfo()
                event.creation_info.author = __package__
                event.creation_info.version = info.__version__
                event.origins=[origin]
                event.magnitude=Magnitude()
                event.magnitude.mag=mag
                idx=cids.index(cid)
                clusters[idx].catalog.events.append(event)
                clusters[idx].event_ids.append(evid)

        if self.hypoDD_control.cid != 0 :
            my_list = []
            clusters[0].connectedness = Connectedness()
            with open(residuals_file, "r") as f:
                for line in f:
                    num = line.split()
                    evid_1 = num[2]
                    evid_2 = num[3]
                    obs_type = num[4]
                    if obs_type == "1":
                        my_list = clusters[0].connectedness.cross_corr_P
                    elif obs_type == "2":
                        my_list = clusters[0].connectedness.cross_corr_S
                    elif obs_type == "3":
                        my_list = clusters[0].connectedness.catalog_P
                    elif obs_type == "4":
                        my_list = clusters[0].connectedness.catalog_S
                    else:
                        continue
                    in_list = [x for x in my_list if (( x[0] == evid_1 and
                                                        x[1] == evid_2
                                                        ) or
                                                      ( x[0] == evid_2 and
                                                        x[1] == evid_1
                                                        ))]
                    if in_list:
                        for x in my_list:
                            if (( x[0] == evid_1 and
                                  x[1] == evid_2
                                  ) or
                                ( x[0] == evid_2 and
                                  x[1] == evid_1
                                  )):
                                x[2] += 1
                    else:
                        my_list.append([evid_1,evid_2,1])

        return clusters
Exemple #9
0
def main(st, fname, verbose=False):
    fs = st[0].stats.sampling_rate

    # Detect STA/LTA for all geodes, with minimum number of stations included
    proc1 = time.time()
    detection_list, cft_stream = network_detection(st, cft_return=True)
    proc2 = time.time()
    Logger.info("Network detection search done in %f s." % (proc2 - proc1))
    Logger.info("Number of network detections = %d" % len(detection_list))

    # Get picks and stats, iterating detection by detection, then station by station
    # Buffer window before and after detection
    buffer1 = 3.0  # 0.2
    buffer2 = 10.0

    # Load ERT data
    ert_surveys_file = "survey_times_ERT.csv"
    dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
    ert_surveys = pd.read_csv(ert_surveys_file,
                              parse_dates=["time_local_start"],
                              date_parser=dateparse)
    ert_surveys["time_local_start"] = ert_surveys[
        "time_local_start"].dt.tz_localize("America/Edmonton",
                                           ambiguous="infer")
    ert_surveys["time_utc_start"] = ert_surveys[
        "time_local_start"].dt.tz_convert(None)
    ert_surveys["time_utc_end"] = ert_surveys["time_utc_start"] + pd.Timedelta(
        25, unit="m")
    ert_surveys["time_utc_end"] = pd.to_datetime(ert_surveys["time_utc_end"])
    ert_surveys["time_utc_start"] = pd.to_datetime(
        ert_surveys["time_utc_start"])

    catalog = Catalog()
    # Loop over each STA/LTA detection
    for detection in detection_list:

        # Skip if detection happens during ERT survey
        tmin = detection["time"]._get_datetime()
        is_ert_on = \
            ert_surveys.loc[(ert_surveys['time_utc_start'] <= tmin) & (ert_surveys['time_utc_end'] >= tmin)].shape[
                0] > 0
        if is_ert_on:
            Logger.warning("Skip false detection during ERT survey.")
            continue

        Logger.info("DETECTION TIME: %s\n\t DURATION_SEC: %f" %
                    (detection["time"], detection["duration"]))
        det_start = detection["time"]
        det_end = detection["time"] + detection["duration"]

        # Detection stream
        det_st = st.slice(starttime=det_start - buffer1,
                          endtime=det_end + buffer2)
        det_st.detrend()
        det_st_to_save = det_st.copy()
        t_plt = det_st[0].times("matplotlib")
        t_utc = det_st[0].times("utcdatetime")
        det_cft = cft_stream.slice(starttime=det_start - buffer1,
                                   endtime=det_end + buffer2)

        # Stations in detection stream
        station_list = list(set(detection["stations"]))
        station_list.sort()

        # Check if frequencies within window are anomalous
        highf_ratio_threshold = 0.6
        for station in station_list:
            tmp = det_st.select(station=station).copy()
            nbad = 0
            for tr in tmp:
                ratio = highf_ratio(data=tr.data, sampling_rate=fs)
                if ratio > highf_ratio_threshold:
                    nbad += 1
            if nbad > 0:
                for tr in tmp:
                    Logger.warning(
                        "Removing station %s because for %d traces, ratio of frequencies above %f is above %f"
                        % (station, nbad, 0.25 * fs, highf_ratio_threshold))
                    det_st.remove(tr)

        # Stations in detection stream
        station_list = list(set(detection["stations"]))
        station_list.sort()

        if len(station_list) < 4:
            Logger.warning(
                "Only %d stations left, less than 4, so skipping this detection"
                % len(station_list))

        # Search window for phase around STA/LTA detection time
        idet_start = (np.abs(t_utc - det_start)).argmin()
        idet_end = (np.abs(t_utc - det_end)).argmin()
        idx_search_max = range(idet_start, idet_end)

        # Analyze stations one by one
        pol_st = Stream()
        event_phases = []
        for ista, station in enumerate(station_list):

            # Select waveform and STA-LTA streams
            sta_st = det_st.select(station=station).copy()
            network = sta_st[0].stats.network
            sta_st.detrend()
            sta_cft = det_cft.select(station=station).copy()
            sta_cft_stack = (sta_cft.select(channel="DPZ")[0].data +
                             sta_cft.select(channel="DPN")[0].data +
                             sta_cft.select(channel="DPE")[0].data) / 3

            # Polarization properties
            tpol, pol_dict, pol_st_sta = modified_polarization_analysis(
                sta_st, dominant_period=DOM_PERIOD, interpolate=True)
            pol_st += pol_st_sta

            # Energy response curve for pick detection
            per = peak_eigenvalue_ratio(pol_dict["eigenvalue1"],
                                        win_len=int(2 * DOM_PERIOD * fs))
            per = eps_smooth(per, w=int(EPS_WINLEN * fs))
            jer = joint_energy_ratio(sta_cft_stack, t_plt, per, tpol)

            # Extract phases
            sta_phases = get_phases(response_curve=jer,
                                    idx_search_max=idx_search_max,
                                    time=t_utc,
                                    pol=pol_dict,
                                    verbose=False)
            if sta_phases:

                # Now do some quality control
                snr_threshold = 2.5
                win_len_s = 0.2
                sta_phases["station"] = station
                sta_phases["network"] = network

                if sta_phases["P"]["arrival_time"]:
                    arr_time = sta_phases["P"]["arrival_time"] - 0.02

                    snr, channel = get_snr_phase(sta_st,
                                                 time=arr_time,
                                                 win_len_s=win_len_s,
                                                 verbose=False,
                                                 tnoise=None)
                    Logger.info("SNR for P pick %s.%s..%s: %f \t at t = %s" %
                                (network, station, channel, snr, arr_time))
                    if snr < snr_threshold:
                        #Logger.info("P pick below SNR threshold of %f" % snr_threshold)
                        sta_phases["P"]["arrival_time"] = None
                    else:
                        sta_phases["P"]["SNR"] = snr
                        sta_phases["P"]["channel"] = channel

                if sta_phases["S"]["arrival_time"]:
                    arr_time = sta_phases["S"]["arrival_time"] - 0.02
                    if sta_phases["P"]["arrival_time"]:
                        tnoise = sta_phases["P"]["arrival_time"] - 0.02
                    else:
                        tnoise = None
                    snr, channel = get_snr_phase(sta_st.select(),
                                                 time=arr_time,
                                                 win_len_s=win_len_s,
                                                 verbose=False,
                                                 tnoise=tnoise)

                    Logger.info("SNR for S pick %s.%s..%s: %f \t at t = %s" %
                                (network, station, channel, snr, arr_time))
                    if snr < snr_threshold:
                        Logger.info("S pick below SNR threshold of %f" %
                                    snr_threshold)
                        sta_phases["S"]["arrival_time"] = None
                    else:
                        sta_phases["S"]["SNR"] = snr
                        sta_phases["S"]["channel"] = channel

                Logger.info("Station %s: t_P = %s\tt_S = %s" %
                            (station, sta_phases["P"]["arrival_time"],
                             sta_phases["S"]["arrival_time"]))
                event_phases.append(sta_phases)
            else:
                Logger.info("No phase found for station %s" % station)
            # End of for loop over stations

        if not event_phases:
            Logger.info("No picks found at all for this detection.")
            continue
        else:
            nump = len([p for p in event_phases if p["P"]["arrival_time"]])
            nums = len([p for p in event_phases if p["S"]["arrival_time"]])
            Logger.info("Number of initial picks before MCCC: P = %d, S = %d" %
                        (nump, nums))
        if nump + nums == 0:
            Logger.info("No picks found at all for this detection.")
            continue
        # if verbose:
        #     plot_phases(event_phases, det_st)
        #     wadati_plot(event_phases, det_st)

        # Align with mccc
        Logger.info("Refining picks with MCCC")
        event_phases = align_mccc(event_phases=event_phases,
                                  stream=det_st,
                                  verbose=False)

        nump = len([p for p in event_phases if p["P"]["arrival_time"]])
        nums = len([p for p in event_phases if p["S"]["arrival_time"]])
        if nump == 0 and nums == 0:
            Logger.warning("No remaining picks after MCCC!")
            continue
        elif nump + nums < 5:
            Logger.info("Less than 5 picks remaining. Skipping event.")
            continue
        if verbose:
            Logger.info("Number of picks after MCCC: P = %d, S = %d" %
                        (nump, nums))
            wadati_plot(event_phases, det_st)
            plot_phases(event_phases, det_st)

        # Update polarization statistics
        Logger.info("Updating polarization attributes")
        phase_len_tol = int(10 * DOM_PERIOD * fs)
        for i, staph in enumerate(event_phases):
            sta_st = det_st.select(station=staph["station"]).copy()
            t = sta_st[0].times("utcdatetime")
            tpol, pol_dict, _ = modified_polarization_analysis(
                sta_st, dominant_period=DOM_PERIOD, interpolate=True)
            tp = staph["P"]["arrival_time"]
            if tp:
                idxP = np.argmin(np.abs(t - tp))
                stats = pol_window_stats(pol_dict,
                                         idxP,
                                         phase_len_tol,
                                         show_stats=False)
                event_phases[i]["P"]["pol_stats"] = stats
            ts = staph["S"]["arrival_time"]
            if ts:
                idxS = np.argmin(np.abs(t - ts))
                stats = pol_window_stats(pol_dict,
                                         idxS,
                                         phase_len_tol,
                                         show_stats=False)
                event_phases[i]["S"]["pol_stats"] = stats

        # Convert to obspy Picks and Event
        event_picks = []
        for i, staph in enumerate(event_phases):
            event_picks += sta_phases_to_pick(staph=staph)
        event = Event(picks=event_picks)

        # Estimate average event distance using availables pairs of P and S picks
        r_med = distance_from_tstp(event.picks, min_estim=1)
        if not r_med:  # We cannot estimate r, hence magnitude
            Logger.warning(
                "Couldn't estimate hypocentral distance from ts-tp. No magnitude calculation."
            )
            # Add event to catalog
            if verbose:
                Logger.info(
                    "Adding event to catalog: *******************************************"
                )
                Logger.info(event)
            catalog.events.append(event)
            stfilepath = os.path.join("detections_waveforms",
                                      det_start.strftime("%Y%m%d"))
            if not os.path.exists(stfilepath):
                os.mkdir(stfilepath)
            det_st_to_save.write(os.path.join(
                stfilepath,
                "bhdetect_%s.mseed" % det_start.strftime("%Y%m%d%H%M%S")),
                                 format="MSEED")

            continue

        # Calculate magnitudes
        Logger.info("Computing magnitudes...")
        magtime_contriblist = []
        magspec_contriblist = []
        for ista, station in enumerate(station_list):
            sta_picks = [
                p for p in event.picks if p.waveform_id.station_code == station
            ]
            r = distance_from_tstp(sta_picks, min_estim=2)
            if not r:
                r = r_med
            ts = get_pick(event.picks, station, "S")
            if not ts:  # No ts pick
                Logger.warning("There is no S pick for station %s." % station)
                continue
            sta_st = det_st.select(station=station).copy()
            sta_st.detrend()

            # Estimate coda
            tp = get_pick(event.picks, station, "P")
            if not tp:
                tsig = ts - 0.5
            else:
                tsig = tp - 0.02
            tcoda, s_len, snr = get_coda_duration(sta_st.copy(),
                                                  tsig=tsig,
                                                  ts=ts,
                                                  win_len_s=0.2)
            if not tcoda:
                if verbose:
                    Logger.info(
                        "Couldn't calculate coda duration for station %s skipping..."
                        % station)
                continue

            # Save coda info
            amp = Amplitude(generic_amplitude=tcoda,
                            snr=snr,
                            type="END",
                            category="duration",
                            unit="s",
                            magnitude_hint="Md")
            event.amplitudes.append(amp)

            # Estimate energy flux
            if tp:
                Logger.info("Calculating energy flux fr station %s" % station)
                epsilonS = 0
                for tr in sta_st.copy():
                    tr_cut = tr.trim(starttime=ts, endtime=ts + (ts - tp)).data
                    cumsum_u2 = scipy.integrate.cumtrapz(tr_cut**2,
                                                         dx=tr.stats.delta)
                    epsilonS += cumsum_u2[-1]
                amp = Amplitude(generic_amplitude=epsilonS,
                                snr=snr,
                                type="A",
                                category="integral",
                                unit="other",
                                time_window=TimeWindow(begin=ts - tp,
                                                       end=2 * (ts - tp),
                                                       reference=tp),
                                waveform_id=WaveformStreamID(
                                    network_code=tr.stats.network,
                                    station_code=tr.stats.station))
                event.amplitudes.append(amp)

            # Estimate Mw for each component
            Mw_spec_sta = []
            Mw_time_sta = []
            Q_spec_sta = []
            fc_spec_sta = []
            for tr in sta_st:
                # Cut noise window and S waveform
                noise_len = s_len
                taper_perc = 0.1
                trnoise = tr.copy()
                trnoise.trim(starttime=tsig - (1 + taper_perc) * noise_len,
                             endtime=tsig - taper_perc * noise_len)
                trnoise.taper(type="hann",
                              max_percentage=taper_perc,
                              side="both")
                tr.trim(starttime=ts - taper_perc * s_len,
                        endtime=ts + (1 + taper_perc) * s_len)
                tr.taper(type="hann", max_percentage=taper_perc, side="both")

                # Check SNR
                snr_trace = np.median(tr.slice(starttime=ts, endtime=ts + s_len).data) / \
                            np.median(trnoise.data)

                if snr_trace < 3:
                    Logger.info(
                        "SNR < 3, skipping trace for magnitude calculation.")
                    # Poor SNR, skip trace
                    continue

                # Displacement waveform
                trdisp = tr.copy()
                trdisp.integrate()
                trdisp.detrend()

                # Estimate magnitude: time method
                Mw_time, M0_time, omega0_time = estimate_magnitude_time(
                    trdisp, r, disp=False)
                Mw_time_sta.append(Mw_time)

                # Estimate magnitude: spectral method
                Mw_o, M0_o, omega0_o, fc_o, Q_o = estimate_magnitude_spectral(
                    trdisp, r, omega0_time, trnoise=None, disp=False)
                if not Mw_o:
                    Logger.warning("No magnitude found due to errors.")
                    continue
                elif fc_o < 2 or Q_o > 40 or Q_o < 1:  # Qs Attenuation larger than Sandstone=31, shale=10
                    # Reject spectral estimate
                    Logger.warning(
                        "Rejecting spectral estimate with: fc = %f, Q = %f" %
                        (fc_o, Q_o))
                    continue
                else:
                    Mw_spec_sta.append(Mw_o)
                    Q_spec_sta.append(Q_o)
                    fc_spec_sta.append(fc_o)

            # Now get average for station as a whole
            Logger.info(
                "Found %d estimates of Mw using time method for station %s." %
                (len(Mw_time_sta), station))
            Logger.info(
                "Found %d estimates of Mw using spectral method for station %s."
                % (len(Mw_spec_sta), station))
            if Mw_time_sta:
                smagt = StationMagnitude(
                    mag=np.mean(Mw_time_sta),
                    mag_errors=QuantityError(uncertainty=np.std(Mw_time_sta)),
                    station_magnitude_type="Mw_time",
                    comments=[Comment(text="snr = %f" % snr)])
                event.station_magnitudes.append(smagt)
                contrib = StationMagnitudeContribution(
                    station_magnitude_id=smagt.resource_id, weight=snr)
                magtime_contriblist.append(contrib)
                Logger.info("Magnitude time estimate = %f" %
                            np.mean(Mw_time_sta))

            if Mw_spec_sta:
                smags = StationMagnitude(
                    mag=np.mean(Mw_spec_sta),
                    mag_errors=QuantityError(uncertainty=np.std(Mw_spec_sta)),
                    station_magnitude_type="Mw_spectral",
                    comments=[
                        Comment(text="Q_mean = %f, Q_std = %f" %
                                (np.mean(Q_spec_sta), np.std(Q_spec_sta))),
                        Comment(text="Fc_mean = %f, Fc_std = %f" %
                                (np.mean(fc_spec_sta), np.std(fc_spec_sta))),
                        Comment(text="snr = %f" % snr)
                    ])
                event.station_magnitudes.append(smags)
                contrib = StationMagnitudeContribution(
                    station_magnitude_id=smags.resource_id, weight=snr)
                magspec_contriblist.append(contrib)
                Logger.info("Magnitude spectral estimate = %f" %
                            np.mean(Mw_spec_sta))
                Logger.info("Fc = %f, Q = %f" %
                            (np.mean(fc_spec_sta), np.mean(Q_spec_sta)))

            # End of for loop over stations

        # Get magnitude for event
        if magspec_contriblist:
            Logger.info(
                "Found %d station estimates of Mw using spectral method." %
                len(magspec_contriblist))
            wave_num = 0
            wave_den = 0
            val_list = []
            for m in magspec_contriblist:
                mval = [
                    sm.mag for sm in event.station_magnitudes
                    if sm.resource_id == m.station_magnitude_id
                ][0]
                wave_num += mval * m.weight
                wave_den += m.weight
                val_list.append(mval)
            mag = wave_num / wave_den
            mags = Magnitude(
                mag=mag,
                mag_errors=np.std(val_list),
                magnitude_type="Mw_spectral",
                station_count=len(magspec_contriblist),
                station_magnitude_contributions=magspec_contriblist)
            event.magnitudes.append(mags)
            Logger.info(
                "Event magnitude estimate using spectral method: Mw = %f" %
                mags.mag)
        if magtime_contriblist:
            Logger.info("Found %d station estimates of Mw using time method." %
                        len(magtime_contriblist))
            wave_num = 0
            wave_den = 0
            val_list = []
            for m in magtime_contriblist:
                mval = [
                    sm.mag for sm in event.station_magnitudes
                    if sm.resource_id == m.station_magnitude_id
                ][0]
                wave_num += mval * m.weight
                wave_den += m.weight
                val_list.append(mval)
            mag = wave_num / wave_den
            magt = Magnitude(
                mag=mag,
                mag_errors=np.std(val_list),
                magnitude_type="Mw_time",
                station_count=len(magtime_contriblist),
                station_magnitude_contributions=magtime_contriblist)
            event.magnitudes.append(magt)
            Logger.info("Event magnitude estimate using time method: Mw = %f" %
                        magt.mag)

        # Add event to catalog
        if verbose:
            Logger.info(
                "Adding event to catalog: *******************************************"
            )
            Logger.info(event)
        catalog.events.append(event)
        stfilepath = os.path.join("detections_waveforms",
                                  det_start.strftime("%Y%m%d"))
        if not os.path.exists(stfilepath):
            os.mkdir(stfilepath)
            det_st_to_save.write(os.path.join(
                stfilepath,
                "bhdetect_%s.mseed" % det_start.strftime("%Y%m%d%H%M%S")),
                                 format="MSEED")

    if len(catalog) > 0:
        # Decluster
        declustered_catalog = decluster_bh(catalog, trig_int=2.0)
        if not os.path.exists(os.path.split(fname)[0]):
            os.mkdir(os.path.split(fname)[0])
        declustered_catalog.write(fname, format="QUAKEML")