예제 #1
0
    def test_channel_str(self):
        """
        Tests the __str__ method of the channel object.
        """
        c = Channel(code="BHE",
                    location_code="10",
                    latitude=1,
                    longitude=2,
                    elevation=3,
                    depth=4,
                    azimuth=5,
                    dip=6)
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n")

        # Adding channel types.
        c.types = ["A", "B"]
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n")

        # Adding channel types.
        c.sample_rate = 10.0
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n")

        # "Adding" response
        c.response = True
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tResponse information available")

        # Adding an empty sensor.
        c.sensor = Equipment(type=None)
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): None (None)\n"
            "\tResponse information available")

        # Adding a sensor with only a type.
        c.sensor = Equipment(type="random")
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): random (None)\n"
            "\tResponse information available")

        # Adding a sensor with only a description
        c.sensor = Equipment(description="some description")
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): None (some description)\n"
            "\tResponse information available")

        # Adding a sensor with type and description
        c.sensor = Equipment(type="random", description="some description")
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.0000, Longitude: 2.0000, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): random (some description)\n"
            "\tResponse information available")
        latitude=station_df["lat"],
        longitude=station_df["lon"],
        elevation=station_df["nm_elev"],
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name=station_df["siteID"]),
    )

    for comp in ["ex", "ey", "hx", "hy", "hz"]:
        if station_df["{0}_azm".format(comp)] is not None:
            if "h" in comp:
                cha = Channel(
                    code=comp.upper(),
                    location_code="",
                    latitude=station_df["lat"],
                    longitude=station_df["lon"],
                    elevation=station_df["nm_elev"],
                    depth=0,
                    azimuth=station_df["{0}_azm".format(comp)],
                    dip=0,
                    sample_rate=station_df["sampling_rate"],
                )
                cha.channel_number = station_df["{0}_num".format(comp)]
                cha.sensor = Equipment(
                    serial_number=station_df["{0}_id".format(comp)])
            elif "e" in comp:
                cha = Channel(
                    code=comp.upper(),
                    location_code="",
                    latitude=station_df["lat"],
                    longitude=station_df["lon"],
                    elevation=station_df["nm_elev"],
예제 #3
0
def df_to_inventory(df) -> obspy.Inventory:
    """
    Create a station inventory from a dataframe.

    Parameters
    ----------
    df
        A dataframe which must have the same columns as the once produced by
        :func:`obsplus.stations_to_df`.

    Notes
    -----
    The dataframe can also contain columns named "sensor_keys" and
    "datalogger_keys" which will indicate the response information should
    be fetched suing obspy's ability to interact with the nominal response
    library. Each of these columns should either contain tuples or strings
    where the keys are separated by double underscores (__).
    """
    def _make_key_mappings(cls):
        """ Create a mapping from columns in df to kwargs for cls. """
        base_params = set(inspect.signature(cls).parameters)
        new_map = mapping_keys[cls]
        base_map = {x: x for x in base_params - set(new_map)}
        base_map.update(new_map)
        return base_map

    def _groupby_if_exists(df, columns):
        """ Groupby columns if they exist on dataframe, else return empty. """
        cols = list(obsplus.utils.iterate(columns))
        if not set(cols).issubset(df.columns):
            return

        # copy df and set missing start/end times to reasonable values
        # this is needed so they get included in a groupby
        df = df.copy()
        isnan = df.isna()
        default_start = pd.Timestamp(SMALLDT64)
        default_end = pd.Timestamp(LARGEDT64)

        if "start_date" in columns:
            df["start_date"] = df["start_date"].fillna(default_start)
        if "end_date" in columns:
            df["end_date"] = df["end_date"].fillna(default_end)

        for ind, df_sub in df.groupby(cols):
            # replace NaN values
            if isnan.any().any():
                df_sub[isnan.loc[df_sub.index]] = np.nan
            yield ind, df_sub

    def _get_kwargs(series, key_mapping):
        """ create the kwargs from a series and key mapping. """
        out = {}
        for k, v in key_mapping.items():
            # skip if requested kwarg is not in the series
            if v not in series:
                continue
            value = series[v]
            value = value if not pd.isnull(value) else None
            # if the type needs to be cast to something else
            if k in type_mappings and value is not None:
                value = type_mappings[k](value)
            out[k] = value

        return out

    @lru_cache()
    def get_nrl():
        """ Initiate a nominal response library object. """
        from obspy.clients.nrl import NRL

        return NRL()

    @lru_cache()
    def get_response(datalogger_keys, sensor_keys):
        nrl = get_nrl()
        kwargs = dict(datalogger_keys=datalogger_keys, sensor_keys=sensor_keys)
        return nrl.get_response(**kwargs)

    def _get_resp_key(key):
        """ Get response keys from various types. """
        if isinstance(key, str) or key is None:
            return tuple((key or "").split("__"))
        else:
            return tuple(key)

    def _maybe_add_response(series, channel_kwargs):
        """ Maybe add the response information if required columns exist. """
        # bail out of required columns do not exist
        if not {"sensor_keys", "datalogger_keys"}.issubset(set(series.index)):
            return
        # determine if both required columns are populated, else bail out
        sensor_keys = _get_resp_key(series["sensor_keys"])
        datalogger_keys = _get_resp_key(series["datalogger_keys"])
        if not (sensor_keys and datalogger_keys):
            return
        # at this point all the required info for resp lookup should be there
        channel_kwargs["response"] = get_response(datalogger_keys, sensor_keys)

    # Deal with pandas dtype weirdness
    # TODO remove this when custom column functions are supported by DataFrame
    #  Extractor (part of the big refactor in #131)
    for col in NSLC:
        df[col] = df[col].astype(str).str.replace(".0", "")

    # first get key_mappings
    net_map = _make_key_mappings(Network)
    sta_map = _make_key_mappings(Station)
    cha_map = _make_key_mappings(Channel)
    # next define columns groupbys should be performed on
    net_columns = ["network"]
    sta_columns = ["station", "start_date", "end_date"]
    cha_columns = ["channel", "location", "start_date", "end_date"]
    # Ensure input is a dataframe
    df = obsplus.stations_to_df(df)
    # Iterate networks and create stations
    networks = []
    for net_code, net_df in _groupby_if_exists(df, net_columns):
        stations = []
        for st_code, sta_df in _groupby_if_exists(net_df, sta_columns):
            channels = []
            for ch_code, ch_df in _groupby_if_exists(sta_df, cha_columns):
                chan_series = ch_df.iloc[0]
                kwargs = _get_kwargs(chan_series, cha_map)
                # try to add the inventory
                _maybe_add_response(chan_series, kwargs)
                channels.append(Channel(**kwargs))
            kwargs = _get_kwargs(sta_df.iloc[0], sta_map)
            stations.append(Station(channels=channels, **kwargs))
        kwargs = _get_kwargs(net_df.iloc[0], net_map)
        networks.append(Network(stations=stations, **kwargs))

    return obspy.Inventory(networks=networks,
                           source=f"ObsPlus_v{obsplus.__version__}")
예제 #4
0
def create_simple_inventory(network,
                            station,
                            latitude=None,
                            longitude=None,
                            elevation=None,
                            depth=None,
                            start_date=None,
                            end_date=None,
                            location_code="S3",
                            channel_code="MX"):
    """
    Create simple inventory with only location information,
    for ZNE component, especially usefull for synthetic data
    """
    azi_dict = {"MXZ": 0.0, "MXN": 0.0, "MXE": 90.0}
    dip_dict = {"MXZ": 90.0, "MXN": 0.0, "MXE": 0.0}
    channel_list = []

    if start_date is None:
        start_date = UTCDateTime(0)

    # specfem default channel code is MX
    for _comp in ["Z", "E", "N"]:
        _chan_code = "%s%s" % (channel_code, _comp)
        chan = Channel(_chan_code,
                       location_code,
                       latitude=latitude,
                       longitude=longitude,
                       elevation=elevation,
                       depth=depth,
                       azimuth=azi_dict[_chan_code],
                       dip=dip_dict[_chan_code],
                       start_date=start_date,
                       end_date=end_date)
        channel_list.append(chan)

    site = Site("N/A")
    sta = Station(station,
                  latitude=latitude,
                  longitude=longitude,
                  elevation=elevation,
                  channels=channel_list,
                  site=site,
                  creation_date=start_date,
                  total_number_of_channels=3,
                  selected_number_of_channels=3)

    nw = Network(network,
                 stations=[
                     sta,
                 ],
                 total_number_of_stations=1,
                 selected_number_of_stations=1)

    inv = Inventory([
        nw,
    ],
                    source="SPECFEM3D_GLOBE",
                    sender="Princeton",
                    created=UTCDateTime.now())

    return inv
예제 #5
0
def get_inventory():
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="US",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABCD",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha1 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN1",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)
    cha2 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN2",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=90.0,
        dip=-90.0,
        sample_rate=1)
    cha3 = Channel(
        # This is the channel code according to the SEED standard.
        code="HNZ",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)

    # Now tie it all together.
    sta.channels.append(cha1)
    sta.channels.append(cha2)
    sta.channels.append(cha3)
    net.stations.append(sta)
    inv.networks.append(net)

    return inv
예제 #6
0
    def test_reading_channel_file(self):
        """
        Test reading a file at the channel level.
        """

        resp_1 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.02,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=4.88233E8))
        resp_2 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.03,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=4.98112E8))
        resp_3 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.03,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=6.27252E8))

        # Manually create an expected Inventory object.
        expected_inv = Inventory(
            source=None,
            networks=[
                Network(
                    code="AK",
                    stations=[
                        Station(
                            code="BAGL",
                            latitude=60.4896,
                            longitude=-142.0915,
                            elevation=1470,
                            channels=[
                                Channel(
                                    code="LHZ",
                                    location_code="",
                                    latitude=60.4896,
                                    longitude=-142.0915,
                                    elevation=1470,
                                    depth=0.0,
                                    azimuth=0.0,
                                    dip=-90.0,
                                    sample_rate=1.0,
                                    sensor=Equipment(
                                        type="Nanometrics Trillium 240 Sec "
                                        "Response sn 400 and a"),
                                    start_date=obspy.UTCDateTime(
                                        "2013-01-01T00:00:00"),
                                    end_date=obspy.UTCDateTime(
                                        "2599-12-31T23:59:59"),
                                    response=resp_1)
                            ]),
                        Station(
                            code="BWN",
                            latitude=64.1732,
                            longitude=-149.2991,
                            elevation=356.0,
                            channels=[
                                Channel(
                                    code="LHZ",
                                    location_code="",
                                    latitude=64.1732,
                                    longitude=-149.2991,
                                    elevation=356.0,
                                    depth=0.0,
                                    azimuth=0.0,
                                    dip=-90.0,
                                    sample_rate=1.0,
                                    sensor=Equipment(
                                        type="Nanometrics Trillium 240 Sec "
                                        "Response sn 400 and a"),
                                    start_date=obspy.UTCDateTime(
                                        "2010-07-23T00:00:00"),
                                    end_date=obspy.UTCDateTime(
                                        "2014-05-28T23:59:59"),
                                    response=resp_1),
                                Channel(
                                    code="LHZ",
                                    location_code="",
                                    latitude=64.1732,
                                    longitude=-149.2991,
                                    elevation=356.0,
                                    depth=1.5,
                                    azimuth=0.0,
                                    dip=-90.0,
                                    sample_rate=1.0,
                                    sensor=Equipment(
                                        type="Nanometrics Trillium 120 Sec "
                                        "Response/Quanterra 33"),
                                    start_date=obspy.UTCDateTime(
                                        "2014-08-01T00:00:00"),
                                    end_date=obspy.UTCDateTime(
                                        "2599-12-31T23:59:59"),
                                    response=resp_2)
                            ])
                    ]),
                Network(
                    code="AZ",
                    stations=[
                        Station(
                            code="BZN",
                            latitude=33.4915,
                            longitude=-116.667,
                            elevation=1301.0,
                            channels=[
                                Channel(
                                    code="LHZ",
                                    location_code="",
                                    latitude=33.4915,
                                    longitude=-116.667,
                                    elevation=1301.0,
                                    depth=0.0,
                                    azimuth=0.0,
                                    dip=-90.0,
                                    sample_rate=1.0,
                                    sensor=Equipment(
                                        type=
                                        "Streckeisen STS-2 G1/Quanterra 330 "
                                        "Linear Phase Be"),
                                    start_date=obspy.UTCDateTime(
                                        "2010-07-26T17:22:00"),
                                    end_date=obspy.UTCDateTime(
                                        "2013-07-15T21:22:23"),
                                    response=resp_3),
                                Channel(
                                    code="LHZ",
                                    location_code="",
                                    latitude=33.4915,
                                    longitude=-116.667,
                                    elevation=1301.0,
                                    depth=0.0,
                                    azimuth=0.0,
                                    dip=-90.0,
                                    sample_rate=1.0,
                                    sensor=Equipment(
                                        type=
                                        "Streckeisen STS-2 G1/Quanterra 330 "
                                        "Linear Phase Be"),
                                    start_date=obspy.UTCDateTime(
                                        "2013-07-15T21:22:23"),
                                    end_date=obspy.UTCDateTime(
                                        "2013-10-22T19:30:00"),
                                    response=resp_3),
                                Channel(
                                    code="LHZ",
                                    location_code="",
                                    latitude=33.4915,
                                    longitude=-116.667,
                                    elevation=1301.0,
                                    depth=0.0,
                                    azimuth=0.0,
                                    dip=-90.0,
                                    sample_rate=1.0,
                                    sensor=Equipment(
                                        type=
                                        "Streckeisen STS-2 G1/Quanterra 330 "
                                        "Linear Phase Be"),
                                    start_date=obspy.UTCDateTime(
                                        "2013-10-22T19:30:00"),
                                    end_date=obspy.UTCDateTime(
                                        "2599-12-31T23:59:59"),
                                    response=resp_3)
                            ])
                    ])
            ])

        # Read from a filename.
        filename = os.path.join(self.data_dir, "channel_level_fdsn.txt")
        inv = read_fdsn_station_text_file(filename)
        inv_obs = obspy.read_inventory(filename)

        # Copy creation date as it will be slightly different otherwise.
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in text mode.
        with open(filename, "rt", encoding="utf8") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in binary mode.
        with open(filename, "rb") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from StringIO.
        with open(filename, "rt", encoding="utf8") as fh:
            with io.StringIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from BytesIO.
        with open(filename, "rb") as fh:
            with io.BytesIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)
예제 #7
0
def main(argv):
    with open("IRIS-ALL.xml", 'r', buffering=1024 * 1024) as f:
        inv = read_inventory(f)
    # if os.path.exists("IRIS-ALL.pkl"): # doesn't work on CentOS for some reason
    #     with open('IRIS-ALL.pkl', 'rb') as f:
    #         import cPickle as pkl
    #         inv = pkl.load(f)
    # else:
    #     inv = read_inventory("IRIS-ALL.xml")
    #     with open('IRIS-ALL.pkl', 'wb') as f:
    #         import pickle as pkl
    #         pkl.dump(inv, f, pkl.HIGHEST_PROTOCOL)
    sensorDict, responseDict = extract_unique_sensors_responses(inv)
    print('\nFound {0} response objects with keys: {1}'.format(
        len(responseDict.keys()), responseDict.keys()))

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]),
                                             np.float(stn_found[j, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]),
                                             np.float(stn_found[0, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

            #                Now we try to find the same station in XML file
            #                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we failed to find station anywhere and assign dummy values
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            min_dist = 0.
            filed = True
        else:
            # if station is found somewhere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude,
                                             xstn_found[j][0].longitude,
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

                    # last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            filed = True
        if xml:
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(
                    record)  # Alexei: should be extend, not append

            else:

                stn_found = isc[(isc[:, 0] == record[0]) &
                                (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1],
                                  stations=[],
                                  description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[
                        k, :])  # Alexei: should be extend, not append

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(
            stn_found[stn_found == ' ']) > 0:
        print
        "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    netDict = defaultdict(list)
    for k in xrange(stn_found.shape[0]):
        result = inv.select(network=stn_found[k, 1])
        if (len(result.networks)):
            net = result.networks[0]
            net.stations = []
        else:
            net = Network(code=stn_found[k, 1], stations=[], description=' ')

        # print stn_found[k, 1]

        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta = Station(code=stn_found[k, 0], creation_date=utcdatetime.UTCDateTime(stn_found[k, 6]), \
                      termination_date=utcdatetime.UTCDateTime(stn_found[k, 7]), \
                      site=Site(name=' '), \
                      latitude=np.float(stn_found[k, 2]), \
                      longitude=np.float(stn_found[k, 3]), \
                      elevation=np.float(stn_found[k, 4]))

        if (stn_found[k, 5] in responseDict.keys()):
            r = responseDict[stn_found[k, 5]]

            cha = Channel(code=stn_found[k, 5], \
                          depth=0., \
                          azimuth=0., \
                          dip=-90., \
                          location_code='', \
                          latitude=np.float(stn_found[k, 2]), \
                          longitude=np.float(stn_found[k, 3]), \
                          elevation=np.float(stn_found[k, 4]), \
                          # sensor=sensorDict[stn_found[k,5]], \
                          response=r)

            sta.channels.append(cha)

            if (type(netDict[stn_found[k, 1]]) == Network):
                netDict[stn_found[k, 1]].stations.append(sta)
            else:
                net.stations.append(sta)
                netDict[stn_found[k, 1]] = net

            #                 print 'np',stn_found[k,:]
            # end if

    our_xml = Inventory(networks=netDict.values(), source='EHB')

    print 'Writing output files..'
    output_folder = "output_old"
    pathlib.Path(output_folder).mkdir(exist_ok=True)
    for net in our_xml.networks:
        currInv = Inventory(networks=[net], source='EHB')
        fname = "network_{0}.xml".format(net.code)
        try:
            currInv.write(os.path.join(output_folder, fname),
                          format="stationxml",
                          validate=True)
        except Exception as e:
            print("FAILED writing file {0} for network {1}, continuing".format(
                fname, net.code))
            continue

    # our_xml.write("station.xml",format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
예제 #8
0
 _channel = Channel(
     code=_channel_code,
     location_code=Channels[channel]['_location_code'],
     latitude=lat,
     longitude=lon,
     elevation=elev,
     depth=Channels[channel]["_depth"],
     start_date=obspy.UTCDateTime(epoch.split("_")[0]),
     end_date=_channel_end_date,
     azimuth=Channels[channel]["_azimuth"],
     dip=Channels[channel]["_dip"],
     types=Channels[channel]["_types"],
     sample_rate=Channels[channel]["_sample_rate"],
     equipment=obspy.core.inventory.Equipment(
         description=Channels[channel]
         ["_equipment_description"],
         serial_number=Channels[channel]
         ["_equipment_serial"]),
     sensor=obspy.core.inventory.Equipment(
         description=Channels[channel]
         ["_sensor_description"],
         serial_number=Channels[channel]
         ["_equipment_serial"]),
     response=_response,
     external_references=[
         ExternalReference(
             Channels[channel]["_dataSearchURL"],
             'Data Search URL.'),
         ExternalReference(
             Channels[channel]["_deviceURL"],
             'Device URL.')
     ])
예제 #9
0
def main(argv):
    '''@package isc2stnxml
       It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code.
       When proper network code can not be identified the program just guess it, sorry...
    '''
    inv = read_inventory("IRIS-ALL.xml")

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    our_xml = Inventory(networks=[], source='EHB')

    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]),
                                             np.float(stn_found[j, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]),
                                             np.float(stn_found[0, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

#                Now we try to find the same station in XML file
#                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude,
                                             xstn_found[j][0].longitude,
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

# last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            filed = True
        if xml:
            #our_xml.networks.append(record)
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) &
                                (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1],
                                  stations=[],
                                  description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(
            stn_found[stn_found == ' ']) > 0:
        print "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    for k in xrange(stn_found.shape[0]):

        net = Network(code=stn_found[k, 1], stations=[], description=' ')
        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \
        termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \
        site=Site(name=' '), \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        cha=Channel(code=stn_found[k,5], \
        depth=0., \
        azimuth=0., \
        dip=-90., \
        location_code='', \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        sta.channels.append(cha)
        net.stations.append(sta)
        our_xml.networks.append(net)


#             print 'np',stn_found[k,:]

    our_xml.write("station.xml", format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
    def trace_to_inventory(self, trace):
        # if sac files are opened, it's useful to extract inventory from their streams so that we can populate the
        # stations tabs and the location widget
        new_inventory = None

        # The next bit is modified from the obspy webpage on building a stationxml site from scratch
        # https://docs.obspy.org/tutorial/code_snippets/stationxml_file_from_scratch.html
        #
        # We'll first create all the various objects. These strongly follow the
        # hierarchy of StationXML files.
        # initialize the lat/lon/ele
        lat = 0.0
        lon = 0.0
        ele = -1.0

        _network = trace.stats['network']
        _station = trace.stats['station']
        _channel = trace.stats['channel']
        _location = trace.stats['location']

        # if the trace is from a sac file, the sac header might have some inventory information
        if trace.stats['_format'] == 'SAC':

            if 'stla' in trace.stats['sac']:
                lat = trace.stats['sac']['stla']

            if 'stlo' in trace.stats['sac']:
                lon = trace.stats['sac']['stlo']

            if 'stel' in trace.stats['sac']:
                ele = trace.stats['sac']['stel']
            else:
                ele = 0.333

        if _network == 'LARSA' and _station == '121':
            if _channel == 'ai0':
                lat = 35.8492497
                lon = -106.2705465
            elif _channel == 'ai1':
                lat = 35.84924682
                lon = -106.2705505
            elif _channel == 'ai2':
                lat = 35.84925165
                lon = -106.2705516

        if lat == 0.0 or lon == 0.0 or ele < 0:
            if self.fill_sta_info_dialog.exec_(_network, _station, _location,
                                               _channel, lat, lon, ele):

                edited_values = self.fill_sta_info_dialog.get_values()

                lat = edited_values['lat']
                lon = edited_values['lon']
                ele = edited_values['ele']

                _network = edited_values['net']
                _station = edited_values['sta']
                _location = edited_values['loc']
                _channel = edited_values['cha']

                # (re)populate sac headers where possible
                if trace.stats['_format'] == 'SAC':
                    trace.stats['sac']['stla'] = lat
                    trace.stats['sac']['stlo'] = lon
                    trace.stats['sac']['stel'] = ele
                    trace.stats['sac']['knetwk'] = _network
                    trace.stats['sac']['kstnm'] = _station
                # (re)populate trace stats where possible
                trace.stats['network'] = _network
                trace.stats['station'] = _station
                trace.stats['location'] = _location
                trace.stats['channel'] = _channel
        try:
            new_inventory = Inventory(
                # We'll add networks later.
                networks=[],
                # The source should be the id whoever create the file.
                source="InfraView")

            net = Network(
                # This is the network code according to the SEED standard.
                code=_network,
                # A list of stations. We'll add one later.
                stations=[],
                # Description isn't something that's in the trace stats or SAC header, so lets set it to the network cod
                description=_network,
                # Start-and end dates are optional.

                # Start and end dates for the network are not stored in the sac header so lets set it to 1/1/1900
                start_date=UTCDateTime(1900, 1, 1))

            sta = Station(
                # This is the station code according to the SEED standard.
                code=_station,
                latitude=lat,
                longitude=lon,
                elevation=ele,
                # Creation_date is not saved in the trace stats or sac header
                creation_date=UTCDateTime(1900, 1, 1),
                # Site name is not in the trace stats or sac header, so set it to the site code
                site=Site(name=_station))

            # This is the channel code according to the SEED standard.
            cha = Channel(
                code=_channel,
                # This is the location code according to the SEED standard.
                location_code=_location,
                # Note that these coordinates can differ from the station coordinates.
                latitude=lat,
                longitude=lon,
                elevation=ele,
                depth=0.0)

            # Now tie it all together.
            # cha.response = response
            sta.channels.append(cha)
            net.stations.append(sta)
            new_inventory.networks.append(net)

            return new_inventory

        except ValueError:
            bad_values = ""
            if lon < -180 or lon > 180:
                bad_values = bad_values + "\tlon = " + str(lon) + "\n"
            if lat < -90 or lat > 90:
                bad_values = bad_values + "\tlat = " + str(lat)
            self.errorPopup("There seems to be a value error in " + _network +
                            "." + _station + "." + _channel +
                            "\nPossible bad value(s) are:\n" + bad_values)
예제 #11
0
def create_new_skeleton_inventory_file(path2xmlfile):
    """
    write a NEW skeleton inventory xml file
    :param path2xmlfile: path to a new xml file.
    :return:
    """
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="XX",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=obspy.UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABC",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha = Channel(
        # This is the channel code according to the SEED standard.
        code="HHZ",
        # This is the location code according to the SEED standard.
        location_code="",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=200)

    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    response = nrl.get_response(  # doctest: +SKIP
        sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
        datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    inv.write(path2xmlfile, format="stationxml", validate=True)
예제 #12
0
sta = Station(
    # This is the station code according to the SEED standard.
    code="ABC",
    latitude=1.0,
    longitude=2.0,
    elevation=345.0,
    creation_date=obspy.UTCDateTime(2016, 1, 2),
    site=Site(name="First station"))

cha = Channel(
    # This is the channel code according to the SEED standard.
    code="HHZ",
    # This is the location code according to the SEED standard.
    location_code="",
    # Note that these coordinates can differ from the station coordinates.
    latitude=1.0,
    longitude=2.0,
    elevation=345.0,
    depth=10.0,
    azimuth=0.0,
    dip=-90.0,
    sample_rate=200)

# By default this accesses the always up-to-date NRL online.
# Offline copies of the NRL can also be used instead.
nrl = NRL()
# The contents of the NRL can be explored interactively in a Python prompt,
# see API documentation of NRL submodule:
# http://docs.obspy.org/packages/obspy.clients.nrl.html
# Here we assume that the end point of data logger and sensor are already
# known:
예제 #13
0
    def test_write_stationtxt(self):
        """
        Test writing stationtxt at channel level
        """
        # Manually create a test Inventory object.
        resp_1 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.02,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=8.48507E8))
        resp_2 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=1.0,
                                                         input_units="M/S**2",
                                                         output_units=None,
                                                         value=53435.4))
        resp_3 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.03,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=6.27252E8))
        test_inv = Inventory(
            source=None,
            networks=[
                Network(
                    code="IU",
                    start_date=obspy.UTCDateTime("1988-01-01T00:00:00"),
                    end_date=obspy.UTCDateTime("2500-12-31T23:59:59"),
                    total_number_of_stations=1,
                    description="Global Seismograph Network (GSN - IRIS/USGS)",
                    stations=[
                        Station(code="ANMO",
                                latitude=34.9459,
                                longitude=-106.4572,
                                elevation=1850.0,
                                channels=[
                                    Channel(
                                        code="BCI",
                                        location_code="",
                                        latitude=34.9459,
                                        longitude=-106.4572,
                                        elevation=1850.0,
                                        depth=100.0,
                                        azimuth=0.0,
                                        dip=0.0,
                                        sample_rate=0.0,
                                        sensor=Equipment(
                                            type="Geotech KS-36000-I Borehole "
                                            "Seismometer"),
                                        start_date=obspy.UTCDateTime(
                                            "1989-08-29T00:00:00"),
                                        end_date=obspy.UTCDateTime(
                                            "1995-02-01T00:00:00"),
                                        response=resp_1),
                                    Channel(code="LNZ",
                                            location_code="20",
                                            latitude=34.9459,
                                            longitude=-106.4572,
                                            elevation=1820.7,
                                            depth=0.0,
                                            azimuth=0.0,
                                            dip=-90.0,
                                            sample_rate=0.0,
                                            sensor=Equipment(
                                                type="Titan Accelerometer"),
                                            start_date=obspy.UTCDateTime(
                                                "2013-06-20T16:30:00"),
                                            response=resp_2),
                                ]),
                    ]),
                Network(code="6E",
                        start_date=obspy.UTCDateTime("2013-01-01T00:00:00"),
                        end_date=obspy.UTCDateTime("2016-12-31T23:59:59"),
                        total_number_of_stations=1,
                        description="Wabash Valley Seismic Zone",
                        stations=[
                            Station(code="SH01",
                                    latitude=37.7457,
                                    longitude=-88.1368,
                                    elevation=126.0,
                                    channels=[
                                        Channel(
                                            code="LOG",
                                            location_code="",
                                            latitude=37.7457,
                                            longitude=-88.1368,
                                            elevation=126.0,
                                            depth=0.0,
                                            azimuth=0.0,
                                            dip=0.0,
                                            sample_rate=0.0,
                                            sensor=Equipment(
                                                type="Reftek 130 Datalogger"),
                                            start_date=obspy.UTCDateTime(
                                                "2013-11-23T00:00:00"),
                                            end_date=obspy.UTCDateTime(
                                                "2016-12-31T23:59:59"),
                                            response=resp_3)
                                    ]),
                        ])
            ])

        # CHANNEL level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="CHANNEL")
        # check contents
        content = stio.getvalue()
        expected = [
            ("Network|Station|Location|Channel|Latitude|Longitude|"
             "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|"
             "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime"),
            ("IU|ANMO||BCI|34.9459|-106.4572|1850.0|100.0|0.0|"
             "0.0|Geotech KS-36000-I Borehole Seismometer|"
             "848507000.0|0.02|M/S|0.0|1989-08-29T00:00:00|"
             "1995-02-01T00:00:00"),
            ("IU|ANMO|20|LNZ|34.9459|-106.4572|1820.7|0.0|0.0|"
             "-90.0|Titan Accelerometer|53435.4|1.0|M/S**2|0.0|"
             "2013-06-20T16:30:00|"),
            ("6E|SH01||LOG|37.7457|-88.1368|126.0|0.0|0.0|0.0|"
             "Reftek 130 Datalogger|627252000.0|0.03|M/S|0.0|"
             "2013-11-23T00:00:00|2016-12-31T23:59:59"),
        ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))

        # STATION level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="STATION")
        # check contents
        content = stio.getvalue()
        expected = [
            ("Network|Station|Latitude|Longitude|"
             "Elevation|SiteName|StartTime|EndTime"),
            ("IU|ANMO|34.9459|-106.4572|1850.0||"),
            ("6E|SH01|37.7457|-88.1368|126.0||"),
        ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))

        # NETWORK level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="NETWORK")
        # check contents
        content = stio.getvalue()
        expected = [
            ("Network|Description|StartTime|EndTime|TotalStations"),
            ("IU|Global Seismograph Network (GSN - IRIS/USGS)|"
             "1988-01-01T00:00:00|2500-12-31T23:59:59|1"),
            ("6E|Wabash Valley Seismic Zone|"
             "2013-01-01T00:00:00|2016-12-31T23:59:59|1"),
        ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))
예제 #14
0
    def test_reading_channel_without_response_info(self):
        """
        Test reading a file at the channel level with missing scale,
        scale frequency and units. This is common for the log channel of
        instruments.
        """
        # Manually create an expected Inventory object.
        expected_inv = Inventory(
            source=None,
            networks=[
                Network(code="6E",
                        stations=[
                            Station(code="SH01",
                                    latitude=37.7457,
                                    longitude=-88.1368,
                                    elevation=126.0,
                                    channels=[
                                        Channel(
                                            code="LOG",
                                            location_code="",
                                            latitude=37.7457,
                                            longitude=-88.1368,
                                            elevation=126.0,
                                            depth=0.0,
                                            azimuth=0.0,
                                            dip=0.0,
                                            sample_rate=0.0,
                                            sensor=Equipment(
                                                type="Reftek 130 Datalogger"),
                                            start_date=obspy.UTCDateTime(
                                                "2013-11-23T00:00:00"),
                                            end_date=obspy.UTCDateTime(
                                                "2016-12-31T23:59:59"))
                                    ]),
                        ])
            ])

        # Read from a filename.
        filename = os.path.join(self.data_dir, "log_channel_fdsn.txt")
        inv = read_fdsn_station_text_file(filename)
        inv_obs = obspy.read_inventory(filename)

        # Copy creation date as it will be slightly different otherwise.
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in text mode.
        with open(filename, "rt", encoding="utf8") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in binary mode.
        with open(filename, "rb") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from StringIO.
        with open(filename, "rt", encoding="utf8") as fh:
            with io.StringIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from BytesIO.
        with open(filename, "rb") as fh:
            with io.BytesIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)
예제 #15
0
    def test_get_response(self):
        response_n1_s1 = Response('RESPN1S1')
        response_n1_s2 = Response('RESPN1S2')
        response_n2_s1 = Response('RESPN2S1')
        channels_n1_s1 = [
            Channel(code='BHZ',
                    location_code='',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    depth=0.0,
                    response=response_n1_s1)
        ]
        channels_n1_s2 = [
            Channel(code='BHZ',
                    location_code='',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    depth=0.0,
                    response=response_n1_s2)
        ]
        channels_n2_s1 = [
            Channel(code='BHZ',
                    location_code='',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    depth=0.0,
                    response=response_n2_s1)
        ]
        stations_1 = [
            Station(code='N1S1',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    channels=channels_n1_s1),
            Station(code='N1S2',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    channels=channels_n1_s2)
        ]
        stations_2 = [
            Station(code='N2S1',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    channels=channels_n2_s1)
        ]
        networks = [
            Network('N1', stations=stations_1),
            Network('N2', stations=stations_2)
        ]
        inv = Inventory(networks=networks, source='TEST')

        response = inv.get_response('N1.N1S1..BHZ',
                                    UTCDateTime('2010-01-01T12:00'))
        self.assertEqual(response, response_n1_s1)
        response = inv.get_response('N1.N1S2..BHZ',
                                    UTCDateTime('2010-01-01T12:00'))
        self.assertEqual(response, response_n1_s2)
        response = inv.get_response('N2.N2S1..BHZ',
                                    UTCDateTime('2010-01-01T12:00'))
        self.assertEqual(response, response_n2_s1)
예제 #16
0
파일: core.py 프로젝트: wbm06/obspy
def read_fdsn_station_text_file(path_or_file_object):
    """
    Function reading a FDSN station text file to an inventory object.

    :param path_or_file_object: File name or file like object.
    """
    def _read(obj):
        r = unicode_csv_reader(obj, delimiter="|")
        header = next(r)
        header[0] = header[0].lstrip("#")
        header = [_i.strip().lower() for _i in header]
        # IRIS currently has a wrong header name. Just map it.
        header = [_i.replace("instrument", "sensordescription") for _i in
                  header]

        all_lines = []
        for line in r:
            # Skip comment lines.
            if line[0].startswith("#"):
                continue
            all_lines.append([_i.strip() for _i in line])
        return {"header": tuple(header), "content": all_lines}

    # Enable reading from files and buffers opened in binary mode.
    if (hasattr(path_or_file_object, "mode") and
            "b" in path_or_file_object.mode) or \
            isinstance(path_or_file_object, io.BytesIO):
        buf = io.StringIO(path_or_file_object.read().decode("utf-8"))
        buf.seek(0, 0)
        path_or_file_object = buf

    if hasattr(path_or_file_object, "read"):
        content = _read(path_or_file_object)
    else:
        with open(path_or_file_object, "rt", newline="",
                  encoding="utf8") as fh:
            content = _read(fh)

    # Figure out the type.
    if content["header"] == network_components:
        level = "network"
        filetypes = network_types
    elif content["header"] == station_components:
        level = "station"
        filetypes = station_types
    elif content["header"] == channel_components:
        level = "channel"
        filetypes = channel_types
    else:
        raise ValueError("Unknown type of header.")

    content = content["content"]
    converted_content = []
    # Convert all types.
    for line in content:
        converted_content.append([
            v_type(value) for value, v_type in zip(line, filetypes)])

    # Now convert to an inventory object.
    inv = Inventory(networks=[], source=None)

    if level == "network":
        for net in converted_content:
            network = Network(
                code=net[0],
                description=net[1],
                start_date=net[2],
                end_date=net[3],
                total_number_of_stations=net[4])
            inv.networks.append(network)
    elif level == "station":
        networks = collections.OrderedDict()
        for sta in converted_content:
            site = Site(name=sta[5])
            station = Station(
                code=sta[1], latitude=sta[2], longitude=sta[3],
                elevation=sta[4], site=site, start_date=sta[6],
                end_date=sta[7])
            if sta[0] not in networks:
                networks[sta[0]] = []
            networks[sta[0]].append(station)
        for network_code, stations in networks.items():
            net = Network(code=network_code, stations=stations)
            inv.networks.append(net)
    elif level == "channel":
        networks = collections.OrderedDict()
        stations = collections.OrderedDict()

        for channel in converted_content:
            net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \
                scale_freq, scale_units, s_r, st, et = channel

            if net not in networks:
                networks[net] = Network(code=net)

            if (net, sta) not in stations:
                station = Station(code=sta, latitude=lat,
                                  longitude=lng, elevation=ele)
                networks[net].stations.append(station)
                stations[(net, sta)] = station

            sensor = Equipment(type=inst)
            if scale is not None and scale_freq is not None:
                resp = Response(
                    instrument_sensitivity=InstrumentSensitivity(
                        value=scale, frequency=scale_freq,
                        input_units=scale_units, output_units=None))
            else:
                resp = None
            try:
                channel = Channel(
                    code=chan, location_code=loc, latitude=lat, longitude=lng,
                    elevation=ele, depth=dep, azimuth=azi, dip=dip,
                    sensor=sensor, sample_rate=s_r, start_date=st,
                    end_date=et, response=resp)
            except Exception as e:
                warnings.warn(
                    "Failed to parse channel %s.%s.%s.%s due to: %s" % (
                        net, sta, loc, chan, str(e)),
                    UserWarning)
                continue
            stations[(net, sta)].channels.append(channel)
        inv.networks.extend(list(networks.values()))
    else:
        # Cannot really happen - just a safety measure.
        raise NotImplementedError("Unknown level: %s" % str(level))
    return inv
예제 #17
0
파일: test_channel.py 프로젝트: Brtle/obspy
    def test_channel_str(self):
        """
        Tests the __str__ method of the channel object.
        """
        c = Channel(code="BHE", location_code="10", latitude=1, longitude=2,
                    elevation=3, depth=4, azimuth=5, dip=6)
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n")

        # Adding channel types.
        c.types = ["A", "B"]
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n")

        # Adding channel types.
        c.sample_rate = 10.0
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n")

        # "Adding" response
        c.response = True
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tResponse information available"
        )

        # Adding an empty sensor.
        c.sensor = Equipment(type=None)
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): None (None)\n"
            "\tResponse information available"
        )

        # Adding a sensor with only a type.
        c.sensor = Equipment(type="random")
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): random (None)\n"
            "\tResponse information available"
        )

        # Adding a sensor with only a description
        c.sensor = Equipment(description="some description")
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): None (some description)\n"
            "\tResponse information available"
        )

        # Adding a sensor with type and description
        c.sensor = Equipment(type="random", description="some description")
        assert str(c) == (
            "Channel 'BHE', Location '10' \n"
            "\tTime range: -- - --\n"
            "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, "
            "Local Depth: 4.0 m\n"
            "\tAzimuth: 5.00 degrees from north, clockwise\n"
            "\tDip: 6.00 degrees down from horizontal\n"
            "\tChannel types: A, B\n"
            "\tSampling Rate: 10.00 Hz\n"
            "\tSensor (Description): random (some description)\n"
            "\tResponse information available"
        )
예제 #18
0
def main():
    chans = "EHZ,EHN,EHE"
    # Get StationXML file
    print(f"Interactive StaXML builder")
    print(f"Work in progress...some things hardwired\n\n")
    inv_name = input(f"Enter StationXML file name: ")
    if (os.path.isfile(inv_name)):
        inv = read_inventory(inv_name)
    else:
        print(f"Making new inventory: {inv_name}\n")
        inv = Inventory(networks=[], source="Weston")

    # Net code
    ques = f"Enter Network Code ({str(netc)}) :"
    net_code = str(input(ques) or netc)
    net = Network(code=net_code, stations=[])
    print(f"\n")

    # connect to NRL
    nrl = NRL()

    # Datalogger info
    ret = 0
    digi = f"REF TEK|RT 130S & 130-SMHR|1|200"
    print(f"Input NRL Digi info ( | separated, careful with spaces)....")
    print(f"E.g manufacturer| model| gain| sps\n")
    while ret == 0:
        ques = f"Enter DIGI info ({digi}) :"
        digi = str(input(ques) or digi)
        print(f"\n")
        try:
            nrl.get_datalogger_response(digi.split('|'))
            ret = 1
            print("!!!!! DATA LOGGER SUCCESS!!!\n")
        except Exception as e:
            print(f"Try again ... {e}")

    # Sensor info
    ret = 0
    sensor = f"Streckeisen,STS-1,360 seconds"
    print(f"Input NRL Sensor info ....\n")
    print(f"E.g Manufact|model|Sensitivy\n")
    print(f"Guralp|CMG-40T,30s - 100Hz|800")
    print(f"Sercel/Mark Products|L-22D|5470 Ohms|20000 Ohms")
    print(f"Streckeisen|STS-1|360 seconds")
    print(f"Nanometrics|Trillium Compact 120 (Vault, Posthole, OBS)|754 V/m/s")
    while ret == 0:
        ques = f"Enter sensor info {str(sensor)} :"
        sensor = str(input(ques) or sensor)
        try:
            nrl.get_sensor_response(sensor.split('|'))
            ret = 1
            inst_info = f"{sensor.split('|')[0]} {sensor.split('|')[1]}"
            print("Sensor success!!!!")
        except Exception as e:
            print(f"Try again ... {e}")

    print("Getting full response...")
    try:
        response = nrl.get_response(sensor_keys=sensor.split('|'),
                                    datalogger_keys=digi.split('|'))
        print("Full response success \n\n")
    except Exception as e:
        print(f"Oops .. {e}")
    #
    nstas = int(
        input(
            "Enter number of stations to add with same sensor/digitizer (default 1):"
        ) or 1)
    for i in range(0, nstas):
        ques = "Station code (" + str(scode) + ") :"
        sta_code = str(input(ques) or scode)

        ques = "Station latitude (" + str(geolat) + ") :"
        sta_lat = float(input(ques) or geolat)

        ques = "Station longitude (" + str(geolon) + ") :"
        sta_lon = float(input(ques) or geolat)

        ques = "Station elev(" + str(geoelev) + ") :"
        sta_elev = float(input(ques) or geoelev)

        ques = "Station ondate (" + str(date) + ") :"
        sta_ondate = str(input(ques) or date)

        ques = "Station offdate (" + str(date) + ") :"
        sta_offdate = str(input(ques) or date)

        ques = "Station long name (" + str(longname) + ") :"
        sta_sitename = str(input(ques) or longname)

        sta = Station(code=sta_code,
                      latitude=sta_lat,
                      longitude=sta_lon,
                      elevation=sta_elev,
                      creation_date=UTCDateTime(sta_ondate),
                      site=Site(name=sta_sitename))
        # add station to network
        net.stations.append(sta)

        # Default chan info
        coords = {
            'latitude': sta_lat,
            'longitude': sta_lon,
            'elevation': sta_elev,
            'depth': 0.0,
            'sample_rate': sps
        }

        n = -1
        ques = f"Enter channel names, comma separated ({chans}) :"
        chans = str(input(ques) or chans)
        for j in chans.split(','):
            n += 1
            chantmp = j
            print("Doing channel ", chantmp)
            aztmp = azims[n]
            diptmp = dips[n]
            loc = locs[n]
            for k in coords.keys():
                ques = str(chantmp) + " enter " + k + "(" + str(
                    coords[k]) + "):"
                coords[k] = float(input(ques) or coords[k])

            chan = Channel(code=chantmp,
                           location_code=loc,
                           latitude=coords['latitude'],
                           longitude=coords['longitude'],
                           elevation=coords['elevation'],
                           depth=coords['depth'],
                           azimuth=aztmp,
                           dip=diptmp,
                           sample_rate=coords['sample_rate'],
                           sensor=Equipment(description=inst_info))
            chan.response = response
            sta.channels.append(chan)

    inv.networks.append(net)
    inv.write(inv_name, format="STATIONXML")