Пример #1
0
def read_event_json_files(eventfiles):
    """Read event.json file and return ScalarEvent object.

    Args:
        eventfiles (list):
            Event.json files to be read.
    Returns:
        list: ScalarEvent objects.

    """
    events = []
    for eventfile in eventfiles:
        with open(eventfile, "rt", encoding="utf-8") as f:
            event = json.load(f)
            try:
                origintime = datetime.fromtimestamp(
                    event["properties"]["time"] / 1000.0, pytz.utc)
                evdict = {
                    "id": event["id"],
                    "time": origintime.strftime("%Y-%m-%dT%H:%M:%S.%f"),
                    "lat": event["geometry"]["coordinates"][1],
                    "lon": event["geometry"]["coordinates"][0],
                    "depth": event["geometry"]["coordinates"][2],
                    "magnitude": event["properties"]["mag"],
                    "magnitude_type": event["properties"]["magType"],
                }
                event = get_event_object(evdict)

            except BaseException:
                event = get_event_object(event)

            events.append(event)
    return events
Пример #2
0
def parse_event_file(eventfile):
    """Parse text file containing basic event information.

    Files can contain:
        - one column, in which case that column
          contains ComCat event IDs.
        - Seven columns, in which case those columns should be:
          - id: any string (no spaces)
          - time: Any ISO standard for date/time.
          - lat: Earthquake latitude in decimal degrees.
          - lon: Earthquake longitude in decimal degrees.
          - depth: Earthquake longitude in kilometers.
          - magnitude: Earthquake magnitude.
          - magnitude_type: Earthquake magnitude type.

    NB: THERE SHOULD NOT BE ANY HEADERS ON THIS FILE!

    Args:
        eventfile (str):
            Path to event text file

    Returns:
        list: ScalarEvent objects constructed from list of event information.

    """
    df = pd.read_csv(eventfile, sep=",", header=None)
    nrows, ncols = df.shape
    events = []
    if ncols == 1:
        df.columns = ["eventid"]
        for idx, row in df.iterrows():
            event = get_event_object(row["eventid"])
            events.append(event)
    elif ncols == 7:
        df.columns = [
            "id",
            "time",
            "lat",
            "lon",
            "depth",
            "magnitude",
            "magnitude_type",
        ]
        df["time"] = pd.to_datetime(df["time"])
        for idx, row in df.iterrows():
            rowdict = row.to_dict()
            event = get_event_object(rowdict)
            events.append(event)
    else:
        return None
    return events
def test_event():
    subdir = os.path.join('data', 'testdata', 'vcr_event_test.yaml')
    tape_file = pkg_resources.resource_filename('gmprocess', subdir)
    with vcr.use_cassette(tape_file):
        eid = 'us1000j96d'  # M7.0 Peru Mar 1 2019
        edict = get_event_dict(eid)
        tdict = {
            'id': 'us1000j96d',
            'time': UTCDateTime(2019, 3, 1, 8, 50, 42, 615000),
            'lat': -14.7007,
            'lon': -70.1516,
            'depth': 267,
            'magnitude': 7.0,
            'magnitude_type': 'mww'
        }
        for key, value in tdict.items():
            v1 = edict[key]
            assert value == v1

        event = get_event_object(eid)
        assert event.id == eid
        assert event.magnitude == tdict['magnitude']
        assert event.magnitude_type == tdict['magnitude_type']
        assert event.time == tdict['time']
        assert event.latitude == tdict['lat']
        assert event.longitude == tdict['lon']
        assert event.depth == tdict['depth'] * 1000
        assert event.depth_km == tdict['depth']
Пример #4
0
def test_event():
    subdir = os.path.join("data", "testdata", "vcr_event_test.yaml")
    tape_file = pkg_resources.resource_filename("gmprocess", subdir)
    with vcr.use_cassette(tape_file):
        eid = "us1000j96d"  # M7.0 Peru Mar 1 2019
        edict = get_event_dict(eid)
        tdict = {
            "id": "us1000j96d",
            "time": UTCDateTime(2019, 3, 1, 8, 50, 42, 615000),
            "lat": -14.7007,
            "lon": -70.1516,
            "depth": 267,
            "magnitude": 7.0,
            "magnitude_type": "mww",
        }
        for key, value in tdict.items():
            v1 = edict[key]
            assert value == v1

        event = get_event_object(eid)
        assert event.id == eid
        assert event.magnitude == tdict["magnitude"]
        assert event.magnitude_type == tdict["magnitude_type"]
        assert event.time == tdict["time"]
        assert event.latitude == tdict["lat"]
        assert event.longitude == tdict["lon"]
        assert event.depth == tdict["depth"] * 1000
        assert event.depth_km == tdict["depth"]
Пример #5
0
def test_zero_crossings():
    datapath = os.path.join("data", "testdata", "zero_crossings")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()

    update = {
        "processing": [
            {"detrend": {"detrending_method": "demean"}},
            {"check_zero_crossings": {"min_crossings": 1}},
        ]
    }
    update_dict(conf, update)

    edict = {
        "id": "ak20419010",
        "time": UTCDateTime("2018-11-30T17:29:29"),
        "lat": 61.346,
        "lon": -149.955,
        "depth": 46.7,
        "magnitude": 7.1,
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            assert tr.hasParameter("ZeroCrossingRate")
    np.testing.assert_allclose(
        test[0][0].getParameter("ZeroCrossingRate")["crossing_rate"],
        0.008888888888888889,
        atol=1e-5,
    )
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join('data', 'testdata', 'demo', 'ci38457511', 'raw')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    event = get_event_object('ci38457511')
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config['windows']['signal_end']
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    method='constant',
                                    constant={
                                        'highpass': 0.08,
                                        'lowpass': 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
Пример #7
0
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join("data", "testdata", "demo", "ci38457511", "raw")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    event = get_event_object("ci38457511")
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config["windows"]["signal_end"]
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    event,
                                    method="constant",
                                    constant={
                                        "highpass": 0.08,
                                        "lowpass": 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
def events_from_directory(dir):
    events = []
    eventfiles = get_event_files(dir)
    if len(eventfiles):
        events = read_event_json_files(eventfiles)
    else:
        eventids = [f for f in os.listdir(dir) if not f.startswith('.')]
        for eventid in eventids:
            try:
                event = get_event_object(eventid)
                events.append(event)

                # If the event ID has been updated, make sure to rename
                # the source folder and issue a warning to the user
                if event.id != eventid:
                    old_dir = os.path.join(dir, eventid)
                    new_dir = os.path.join(dir, event.id)
                    os.rename(old_dir, new_dir)
                    logging.warn('Directory %s has been renamed to %s.' %
                                 (old_dir, new_dir))
            except BaseException:
                logging.warning('Could not get info for event id: %s' %
                                eventid)

    return events
Пример #9
0
def update_eq_options(wdir, proj, label, imc, eqid):
    if any([val is None for val in locals().values()]):
        return [], None, [], None, [], []
    df_eq_imc = get_eq_imc_df(wdir, proj, label, eqid, imc)
    imt_options = get_options(df_eq_imc, IMT_REGEX)
    imt_options.append({'label': 'Pass/Fail', 'value': 'Pass/Fail'})
    imt = imt_options[0]['value']
    dist_options = get_options(df_eq_imc, DIST_REGEX)
    dist = dist_options[0]['value']
    model_options = get_model_options(MODELS_DICT, imc, imt)
    rep = get_event_object(eqid).__repr__()
    return imt_options, imt, dist_options, dist, model_options, rep
def read_event_json_files(eventfiles):
    """Read event.json file and return ScalarEvent object.

    Args:
        eventfiles (list):
            Event.json files to be read.
    Returns:
        list: ScalarEvent objects.

    """
    events = []
    for eventfile in eventfiles:
        with open(eventfile, 'rt', encoding='utf-8') as f:
            eventdict = json.load(f)
            event = get_event_object(eventdict)
            events.append(event)
    return events
Пример #11
0
def _test_trim_multiple_events():
    datapath = os.path.join("data", "testdata", "multiple_events")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, "ci38457511"))
    origin = get_event_object("ci38457511")
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, "catalog.csv"), 5, 0.1, "iasp91"
    )
    for st in sc:
        st.detrend("demean")
        remove_response(st, None, None)
        st = corner_frequencies.from_constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(
            st,
            origin.time,
            origin.longitude,
            origin.latitude,
            origin.magnitude,
            method="model",
            model="AS16",
        )
        cut(st, 2)
        trim_multiple_events(
            st, origin, catalog, df, 0.2, 0.7, "B14", {"vs30": 760}, {"rake": 0}
        )

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 2

    failure = sc.select(station="WRV2")[0][0].getParameter("failure")
    assert failure["module"] == "trim_multiple_events"
    assert failure["reason"] == (
        "A significant arrival from another event "
        "occurs within the first 70.0 percent of the "
        "signal window"
    )

    for tr in sc.select(station="JRC2")[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime("2019-07-06T03:20:56.368300Z")
        )
Пример #12
0
def test_check_clipping():
    data_files, _ = read_data_dir("clipping_samples", "hv70907436", "*.mseed")
    data_files.sort()
    origin = get_event_object("hv70907436")
    streams = []
    for f in data_files:
        streams += read_data(f)

    codes = ["HV.TOUO", "HV.MOKD", "HV.MLOD", "HV.HOVE", "HV.HUAD", "HV.HSSD"]
    passed = []
    for code in codes:
        traces = []
        for ss in streams:
            tcode = f"{ss[0].stats.network}.{ss[0].stats.station}"
            if tcode == code:
                traces.append(ss[0])
        st = StationStream(traces)
        check_clipping(st, origin)
        passed.append(st.passed)

    assert np.all(~np.array(passed))
def test_zero_crossings():
    datapath = os.path.join('data', 'testdata', 'zero_crossings')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()

    update = {
        'processing': [{
            'detrend': {
                'detrending_method': 'demean'
            }
        }, {
            'check_zero_crossings': {
                'min_crossings': 1
            }
        }]
    }
    update_dict(conf, update)

    edict = {
        'id': 'ak20419010',
        'time': UTCDateTime('2018-11-30T17:29:29'),
        'lat': 61.346,
        'lon': -149.955,
        'depth': 46.7,
        'magnitude': 7.1
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            assert tr.hasParameter('ZeroCrossingRate')
    np.testing.assert_allclose(
        test[0][0].getParameter('ZeroCrossingRate')['crossing_rate'],
        0.008888888888888889,
        atol=1e-5)
Пример #14
0
def test_trim_multiple_events():
    datapath = os.path.join('data', 'testdata', 'multiple_events')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, 'ci38457511'))
    origin = get_event_object('ci38457511')
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, 'catalog.csv'), 5, 0.1, 'iasp91')
    for st in sc:
        st.detrend('demean')
        remove_response(st, None, None)
        st = corner_frequencies.get_constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(st,
                   origin.time,
                   origin.longitude,
                   origin.latitude,
                   origin.magnitude,
                   method='model',
                   model='AS16')
        cut(st, 2)
        trim_multiple_events(st, origin, catalog, df, 0.2, 0.7, 'B14',
                             {'vs30': 760}, {'rake': 0})

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 1

    failure = sc.select(station='WRV2')[0][0].getParameter('failure')
    assert failure['module'] == 'trim_multiple_events'
    assert failure['reason'] == ('A significant arrival from another event '
                                 'occurs within the first 70.0 percent of the '
                                 'signal window')

    for tr in sc.select(station='JRC2')[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime('2019-07-06T03:20:38.7983Z'))
Пример #15
0
def get_events(eventids, textfile, eventinfo, directory, outdir=None):
    """Find the list of events.

    Args:
        eventids (list or None):
            List of ComCat event IDs.
        textfile (str or None):
            Path to text file containing event IDs or info.
        eventinfo (list or None):
            List containing:
                - id Any string, no spaces.
                - time Any ISO-compatible date/time string.
                - latitude Latitude in decimal degrees.
                - longitude Longitude in decimal degrees.
                - depth Depth in kilometers.
                - magnitude Earthquake magnitude.
                - magnitude_type Earthquake magnitude type.
        directory (str):
            Path to a directory containing event subdirectories, each
            containing an event.json file, where the ID in the json file
            matches the subdirectory containing it.
        outdir (str):
            Output directory.

    Returns:
        list: ScalarEvent objects.

    """
    events = []
    if eventids is not None:
        # Get list of events from directory if it has been provided
        tevents = []
        if directory is not None:
            tevents = events_from_directory(directory)
        elif outdir is not None:
            tevents = events_from_directory(outdir)
        eventidlist = [event.id for event in tevents]
        for eventid in eventids:
            try:
                idx = eventidlist.index(eventid)
                event = tevents[idx]
            except ValueError:
                # This connects to comcat to get event, does not check for a
                # local json file
                event = get_event_object(eventid)
            events.append(event)
    elif textfile is not None:
        events = parse_event_file(textfile)
    elif eventinfo is not None:
        eid = eventinfo[0]
        time = eventinfo[1]
        lat = float(eventinfo[2])
        lon = float(eventinfo[3])
        dep = float(eventinfo[4])
        mag = float(eventinfo[5])
        mag_type = str(eventinfo[6])
        event = ScalarEvent()
        event.fromParams(eid, time, lat, lon, dep, mag, mag_type)
        events = [event]
    elif directory is not None:
        events = events_from_directory(directory)
    elif outdir is not None:
        events = events_from_directory(outdir)

    # "events" elements are None if an error occurred, e.g., bad event id is specified.
    events = [e for e in events if e is not None]

    return events
Пример #16
0
def main():
    desc = """Convert CWB data from web site into form ingestible by gmprocess.

    To obtain CWB strong motion data, create an account on the CWB GDMS-2020 website:

    https://gdmsn.cwb.gov.tw/index.php

    To retrieve strong motion response files:

    Click on the "Data" icon, then click on "Instrument Response". 

     - For "Output Format", choose "RESP file".
     - For "Network", choose "CWBSN".
     - For "Station", check "All Stations".
     - For "Location", choose "*" for all locations.
     - For "Channel", choose "HN?" for all strong motion stations.
     - For "Start Time (UTC)", enter a date before the event of interest.
     - For "End Time (UTC)", enter a date after the event of interest.
     - For "Label", enter any string that is descriptive to you.

     Click the "Submit" button, and you should see a "Success!" pop up.
     Next will be a screen showing a list of all of your downloads. Response data
     links should appear fairly quickly - click on the name of the generated gzipped
     "tarball" file to download it.

    To retrieve a strong motion data file:

    Click on the "Data" icon, then click on "Multi-Station Waveform Data". 

     - For "Output Format", choose "MiniSEED".
     - For "Network", choose "CWBSN".
     - For "Station", check "All Stations".
     - For "Location", choose "*" for all locations.
     - For "Channel", choose "HN?" for all strong motion stations.
     - For "Start Time (UTC)", enter a time 30 seconds before the origin time of 
       interest.
     - For "End Time (UTC)", enter a time 7 minutes after the origin time of interest.
     - For "Label", enter any string that is descriptive to you.

     Click the "Submit" button, and you should see a "Success!" pop up.
     Next will be a screen showing a list of all of your downloads. Data
     links will take a few minutes to process - click on the name of the generated 
     miniseed file to download it.

     Pass these files along with the ComCat ID as described below.
    """
    parser = argparse.ArgumentParser(
        description=desc,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        "event",
        help="ComCat Event ID",
    )
    parser.add_argument(
        "seedfile",
        help="Miniseed file containing all relevant data",
    )
    parser.add_argument(
        "tarball",
        help="Tarball containing response files",
    )
    if len(sys.argv) == 1:
        parser.print_help(sys.stderr)
        sys.exit(1)
    args = parser.parse_args()

    stations = read_stations()
    CONF_PATH = pathlib.Path.home() / ".gmprocess"
    PROJECTS_FILE = CONF_PATH / "projects.conf"
    projects_conf = configobj.ConfigObj(str(PROJECTS_FILE), encoding="utf-8")
    project = projects_conf["project"]
    current_project = projects_conf["projects"][project]
    data_parts = pathlib.PurePath(current_project["data_path"]).parts
    data_path = CONF_PATH.joinpath(*data_parts).resolve()
    event_path = data_path / args.event
    raw_path = event_path / "raw"
    if not event_path.exists():
        event_path.mkdir()
        raw_path.mkdir()
    raw_stream = read(args.seedfile)
    # some of these files are returned from CWB web site in time chunks
    # using this merge method joins up all of the traces with the same
    # NSCL. Thanks Obspy!
    stream = raw_stream.merge(fill_value="interpolate",
                              interpolation_samples=-1)
    for trace in stream:
        network = trace.stats.network
        station = trace.stats.station
        channel = trace.stats.channel
        location = trace.stats.location
        starttime_str = trace.stats.starttime.strftime("%Y%m%dT%H%M%SZ")
        endtime_str = trace.stats.endtime.strftime("%Y%m%dT%H%M%SZ")
        fname = (f"{network}.{station}.{location}.{channel}__"
                 f"{starttime_str}__{endtime_str}.mseed")
        filename = raw_path / fname
        trace.write(str(filename), format="MSEED")
    print(f"{len(stream)} channels written to {raw_path}.")
    responses = {}
    with tarfile.open(args.tarball, "r") as tarball:
        for member in tarball.getmembers():
            if member.isdir():
                continue
            with tarball.extractfile(member) as fh:
                inventory = read_inventory(fh)
                network = inventory.networks[0]
                netcode = network.code
                station = network.stations[0]
                stacode = station.code
                resp_name = f"{netcode}.{stacode}"
                if resp_name not in stations:
                    print(
                        f"No station coordinates available for station {resp_name}. Skipping."
                    )
                    continue
                latitude = stations[resp_name]["latitude"]
                longitude = stations[resp_name]["longitude"]
                elevation = stations[resp_name]["elevation"]
                if resp_name in responses:
                    old_inventory = responses[resp_name]
                    old_station = old_inventory.networks[0].stations[0]
                    new_station = inventory.networks[0].stations[0]
                    new_channel = new_station.channels[0]
                    new_channel.latitude = latitude
                    new_channel.longitude = longitude
                    new_channel.elevation = elevation
                    old_station.channels.append(new_channel)
                else:
                    for station in inventory.networks[0].stations:
                        station.latitude = latitude
                        station.longitude = longitude
                        station.elevation = elevation
                        for channel in station.channels:
                            channel.latitude = latitude
                            channel.longitude = longitude
                            channel.elevation = elevation
                    responses[resp_name] = inventory
    for resp_name, response in responses.items():
        fname = resp_name + ".xml"
        filename = raw_path / fname
        response.write(str(filename), format="stationxml")
    print(f"{len(responses)} station responses written to {raw_path}.")
    scalar_event = get_event_object(args.event)
    create_event_file(scalar_event, str(event_path))
    event_file = event_path / "event.json"
    msg = f"Created event file at {event_file}."
    if not event_file.exists():
        msg = f"Error: Failed to create {event_file}."
    print(msg)
    download_rupture_file(args.event, str(event_path))
    rupture_file = event_path / "rupture.json"
    msg = f"Created rupture file at {rupture_file}."
    if not rupture_file.exists():
        msg = f"Error: Failed to create {rupture_file}."
    print(msg)
Пример #17
0
def test_lowpass_max():
    datapath = os.path.join('data', 'testdata', 'lowpass_max')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()
    update = {
        'processing': [
            {
                'detrend': {
                    'detrending_method': 'demean'
                }
            },
            {
                'remove_response': {
                    'f1': 0.001,
                    'f2': 0.005,
                    'f3': None,
                    'f4': None,
                    'output': 'ACC',
                    'water_level': 60
                }
            },
            #            {'detrend': {'detrending_method': 'linear'}},
            #            {'detrend': {'detrending_method': 'demean'}},
            {
                'get_corner_frequencies': {
                    'constant': {
                        'highpass': 0.08,
                        'lowpass': 20.0
                    },
                    'method': 'constant',
                    'snr': {
                        'same_horiz': True
                    }
                }
            },
            {
                'lowpass_max_frequency': {
                    'fn_fac': 0.9
                }
            }
        ]
    }
    update_dict(conf, update)
    update = {
        'windows': {
            'signal_end': {
                'method': 'model',
                'vmin': 1.0,
                'floor': 120,
                'model': 'AS16',
                'epsilon': 2.0
            },
            'window_checks': {
                'do_check': False,
                'min_noise_duration': 1.0,
                'min_signal_duration': 1.0
            }
        }
    }
    update_dict(conf, update)
    edict = {
        'id': 'ci38038071',
        'time': UTCDateTime('2018-08-30 02:35:36'),
        'lat': 34.136,
        'lon': -117.775,
        'depth': 5.5,
        'magnitude': 4.4
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            freq_dict = tr.getParameter('corner_frequencies')
            np.testing.assert_allclose(freq_dict['lowpass'], 18.0)
Пример #18
0
def test_lowpass_max():
    datapath = os.path.join("data", "testdata", "lowpass_max")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()
    update = {
        "processing": [
            {
                "detrend": {
                    "detrending_method": "demean"
                }
            },
            {
                "remove_response": {
                    "f1": 0.001,
                    "f2": 0.005,
                    "f3": None,
                    "f4": None,
                    "water_level": 60,
                }
            },
            #            {'detrend': {'detrending_method': 'linear'}},
            #            {'detrend': {'detrending_method': 'demean'}},
            {
                "get_corner_frequencies": {
                    "constant": {
                        "highpass": 0.08,
                        "lowpass": 20.0
                    },
                    "method": "constant",
                    "snr": {
                        "same_horiz": True
                    },
                }
            },
            {
                "lowpass_max_frequency": {
                    "fn_fac": 0.9
                }
            },
        ]
    }
    update_dict(conf, update)
    update = {
        "windows": {
            "signal_end": {
                "method": "model",
                "vmin": 1.0,
                "floor": 120,
                "model": "AS16",
                "epsilon": 2.0,
            },
            "window_checks": {
                "do_check": False,
                "min_noise_duration": 1.0,
                "min_signal_duration": 1.0,
            },
        }
    }
    update_dict(conf, update)
    edict = {
        "id": "ci38038071",
        "time": UTCDateTime("2018-08-30 02:35:36"),
        "lat": 34.136,
        "lon": -117.775,
        "depth": 5.5,
        "magnitude": 4.4,
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            freq_dict = tr.getParameter("corner_frequencies")
            np.testing.assert_allclose(freq_dict["lowpass"], 18.0)