Пример #1
0
    def test_read_nlloc_with_pick_seed_id_lookup(self):
        # create some bogus metadata for lookup
        cha = Channel('HHZ', '00', 0, 0, 0, 0)
        sta = Station('HM02', 0, 0, 0, channels=[cha])
        cha = Channel('HHZ', '10', 0, 0, 0, 0)
        sta2 = Station('YYYY', 0, 0, 0, channels=[cha])
        net = Network('XX', stations=[sta, sta2])
        # second network with non matching data
        cha = Channel('HHZ', '00', 0, 0, 0, 0)
        sta = Station('ABCD', 0, 0, 0, channels=[cha])
        cha = Channel('HHZ', '10', 0, 0, 0, 0)
        sta2 = Station('EFGH', 0, 0, 0, channels=[cha])
        net2 = Network('YY', stations=[sta, sta2])

        inv = Inventory(networks=[net, net2], source='')

        filename = get_example_file("nlloc_custom.hyp")
        # we get some warnings since we only provide sufficient metadata for
        # one pick
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            cat = read_events(filename, format="NLLOC_HYP", inventory=inv)
        self.assertEqual(len(cat), 1)
        for pick in cat[0].picks:
            wid = pick.waveform_id
            if wid.station_code == 'HM02':
                self.assertEqual(wid.network_code, 'XX')
                self.assertEqual(wid.location_code, '')
            else:
                self.assertEqual(wid.network_code, '')
                self.assertEqual(wid.location_code, None)
Пример #2
0
    def _stations_channels2elements(self):
        """
        Extract channel metadata and populate the `elements` attribute
        accounting for different metadata structures.

        Some structures (usually from IMS) already contain a separate
        `Station` object for each element. Others regard the array as
        one station with channels as elements.
        """
        if len(self[0].stations) == 1:
            station = self[0].stations[0]
            stations = []
            site = deepcopy(station.site)
            site_name = site.name
            for cha in station:
                site.name = 'Site {}, {}'.format(cha.location_code, site_name)
                station_ = Station(station.code + cha.location_code,
                                   cha.latitude,
                                   cha.longitude,
                                   cha.elevation,
                                   channels=[deepcopy(cha)],
                                   site=deepcopy(site))
                station_._original_code = station.code
                stations += [station_]
            self[0].stations = stations
            self.elements = self[0].stations
        else:
            self.elements = self[0].stations
            for ele in self.elements:
                ele._original_code = ele.code
Пример #3
0
    def test_get_response(self):
        response_n1_s1 = Response('RESPN1S1')
        response_n1_s2 = Response('RESPN1S2')
        response_n2_s1 = Response('RESPN2S1')
        channels_n1_s1 = [
            Channel(code='BHZ',
                    location_code='',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    depth=0.0,
                    response=response_n1_s1)
        ]
        channels_n1_s2 = [
            Channel(code='BHZ',
                    location_code='',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    depth=0.0,
                    response=response_n1_s2)
        ]
        channels_n2_s1 = [
            Channel(code='BHZ',
                    location_code='',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    depth=0.0,
                    response=response_n2_s1)
        ]
        stations_1 = [
            Station(code='N1S1',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    channels=channels_n1_s1),
            Station(code='N1S2',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    channels=channels_n1_s2),
            Station(code='N2S1',
                    latitude=0.0,
                    longitude=0.0,
                    elevation=0.0,
                    channels=channels_n2_s1)
        ]
        network = Network('N1', stations=stations_1)

        response = network.get_response('N1.N1S1..BHZ',
                                        UTCDateTime('2010-01-01T12:00'))
        self.assertEqual(response, response_n1_s1)
        response = network.get_response('N1.N1S2..BHZ',
                                        UTCDateTime('2010-01-01T12:00'))
        self.assertEqual(response, response_n1_s2)
        response = network.get_response('N1.N2S1..BHZ',
                                        UTCDateTime('2010-01-01T12:00'))
        self.assertEqual(response, response_n2_s1)
Пример #4
0
 def test_warn_identifier_invalid_uri_syntax(self):
     """
     Tests the warning on Identifiers getting set with an invalid URI (not
     having scheme-colon-path)
     """
     sta = Station(code='A', latitude=1, longitude=1, elevation=1)
     invalid_uri = "this-has-no-URI-scheme-and-no-colon"
     msg = f"Given string seems to not be a valid URI: '{invalid_uri}'"
     with CatchAndAssertWarnings(expected=[(UserWarning, msg)]):
         sta.identifiers = [invalid_uri]
Пример #5
0
    def test_get_response(self):
        response_n1_s1 = Response('RESPN1S1')
        response_n1_s2 = Response('RESPN1S2')
        response_n2_s1 = Response('RESPN2S1')
        channels_n1_s1 = [Channel(code='BHZ',
                                  location_code='',
                                  latitude=0.0,
                                  longitude=0.0,
                                  elevation=0.0,
                                  depth=0.0,
                                  response=response_n1_s1)]
        channels_n1_s2 = [Channel(code='BHZ',
                                  location_code='',
                                  latitude=0.0,
                                  longitude=0.0,
                                  elevation=0.0,
                                  depth=0.0,
                                  response=response_n1_s2)]
        channels_n2_s1 = [Channel(code='BHZ',
                                  location_code='',
                                  latitude=0.0,
                                  longitude=0.0,
                                  elevation=0.0,
                                  depth=0.0,
                                  response=response_n2_s1)]
        stations_1 = [Station(code='N1S1',
                              latitude=0.0,
                              longitude=0.0,
                              elevation=0.0,
                              channels=channels_n1_s1),
                      Station(code='N1S2',
                              latitude=0.0,
                              longitude=0.0,
                              elevation=0.0,
                              channels=channels_n1_s2)]
        stations_2 = [Station(code='N2S1',
                              latitude=0.0,
                              longitude=0.0,
                              elevation=0.0,
                              channels=channels_n2_s1)]
        networks = [Network('N1', stations=stations_1),
                    Network('N2', stations=stations_2)]
        inv = Inventory(networks=networks, source='TEST')

        response = inv.get_response('N1.N1S1..BHZ',
                                    UTCDateTime('2010-01-01T12:00'))
        assert response == response_n1_s1
        response = inv.get_response('N1.N1S2..BHZ',
                                    UTCDateTime('2010-01-01T12:00'))
        assert response == response_n1_s2
        response = inv.get_response('N2.N2S1..BHZ',
                                    UTCDateTime('2010-01-01T12:00'))
        assert response == response_n2_s1
Пример #6
0
def do_xml():
    nrl = NRL('http://ds.iris.edu/NRL/')
    datalogger_keys = ['REF TEK', 'RT 130 & 130-SMA', '1', '40']
    sensor_keys = ['Streckeisen', 'STS-2', '1500', '3 - installed 04/97 to present']

    response = nrl.get_response(sensor_keys=sensor_keys, datalogger_keys=datalogger_keys)

    channel = Channel(code='BHZ',
                      location_code='10',      # required
                      latitude=0,      # required
                      longitude=0,   # required
                      elevation=0.0,        # required
                      depth=0.,                # required
                      )

    channel.response = response
    station = Station(code='ABCD',
                      latitude=0,
                      longitude=0,
                      elevation=0.0,
                      creation_date=UTCDateTime(1970, 1, 1),          # required
                      site=Site(name='Fake Site'),  # required
                      channels=[channel],
                      )

    network = Network(code='XX',
                     stations=[station])
    inventory = Inventory(networks=[network], source="demo")

    inventory.write("Test.xml", format="stationxml", validate=True)
Пример #7
0
 def test_get_coordinates(self):
     """
     Test extracting coordinates
     """
     expected = {u'latitude': 47.737166999999999,
                 u'longitude': 12.795714,
                 u'elevation': 860.0,
                 u'local_depth': 0.0}
     channels = [Channel(code='EHZ',
                         location_code='',
                         start_date=UTCDateTime('2007-01-01'),
                         latitude=47.737166999999999,
                         longitude=12.795714,
                         elevation=860.0,
                         depth=0.0)]
     stations = [Station(code='RJOB',
                         latitude=0.0,
                         longitude=0.0,
                         elevation=0.0,
                         channels=channels)]
     network = Network('BW', stations=stations)
     # 1
     coordinates = network.get_coordinates('BW.RJOB..EHZ',
                                           UTCDateTime('2010-01-01T12:00'))
     assert sorted(coordinates.items()) == sorted(expected.items())
     # 2 - without datetime
     coordinates = network.get_coordinates('BW.RJOB..EHZ')
     assert sorted(coordinates.items()) == sorted(expected.items())
     # 3 - unknown SEED ID should raise exception
     with pytest.raises(Exception):
         network.get_coordinates('BW.RJOB..XXX')
def create_inv(network_code, station_code, location_code, channel_code, isr,
               sf, u):
    writethisinv = Inventory(
        networks=[
            Network(code=network_code,
                    start_date=obspy.UTCDateTime('2007-01-01'),
                    stations=[
                        Station(
                            code=station_code,
                            latitude=1,
                            longitude=2,
                            elevation=3,
                            creation_date=obspy.UTCDateTime('2007-01-01'),
                            site=Site(name='site'),
                            channels=[
                                Channel(
                                    code=channel_code,
                                    location_code=location_code,
                                    start_date=obspy.UTCDateTime('2007-01-01'),
                                    latitude=1,
                                    longitude=2,
                                    elevation=3,
                                    depth=4,
                                    response=create_response(
                                        inputsamplerate=isr,
                                        scaling_factor=sf,
                                        units=u))
                            ])
                    ])
        ],
        source=
        'Joseph Farrugia, Ocean Networks Canada',  # The source should be the id whoever create the file.
        created=obspy.UTCDateTime(datetime.today()))
    return writethisinv
Пример #9
0
 def test_get_orientation(self):
     """
     Test extracting orientation
     """
     expected = {u'azimuth': 90.0, u'dip': 0.0}
     channels = [
         Channel(code='EHZ',
                 location_code='',
                 start_date=UTCDateTime('2007-01-01'),
                 latitude=47.737166999999999,
                 longitude=12.795714,
                 elevation=860.0,
                 depth=0.0,
                 azimuth=90.0,
                 dip=0.0)
     ]
     stations = [
         Station(code='RJOB',
                 latitude=0.0,
                 longitude=0.0,
                 elevation=0.0,
                 channels=channels)
     ]
     networks = [Network('BW', stations=stations)]
     inv = Inventory(networks=networks, source='TEST')
     # 1
     orientation = inv.get_orientation('BW.RJOB..EHZ',
                                       UTCDateTime('2010-01-01T12:00'))
     self.assertEqual(sorted(orientation.items()), sorted(expected.items()))
     # 2 - without datetime
     orientation = inv.get_orientation('BW.RJOB..EHZ')
     self.assertEqual(sorted(orientation.items()), sorted(expected.items()))
     # 3 - unknown SEED ID should raise exception
     self.assertRaises(Exception, inv.get_orientation, 'BW.RJOB..XXX')
def get_inventory(stations,
                  depths,
                  lat=50.45031,
                  long=-112.12087,
                  elevation=779.0,
                  dip1=0,
                  azi1=0,
                  dip2=0,
                  azi2=90,
                  dip3=90,
                  azi3=0):
    inv = Inventory(networks=[], source="Genevieve")
    net = Network(code="BH",
                  stations=[],
                  description=" ",
                  start_date=UTCDateTime(2019, 1, 1))
    for i, station in enumerate(stations):
        dep = depths[i]
        sta = Station(code=station,
                      latitude=lat,
                      longitude=long,
                      elevation=elevation,
                      creation_date=UTCDateTime(2019, 1, 1),
                      site=Site(name="borehole"))
        chaz = Channel(code="DPZ",
                       location_code="",
                       latitude=lat,
                       longitude=long,
                       elevation=elevation,
                       azimuth=azi3,
                       dip=dip3,
                       depth=dep,
                       sample_rate=500)
        cha1 = Channel(code="DPN",
                       location_code="",
                       latitude=lat,
                       longitude=long,
                       elevation=elevation,
                       azimuth=azi1,
                       dip=dip1,
                       depth=dep,
                       sample_rate=500)
        cha2 = Channel(code="DPE",
                       location_code="",
                       latitude=lat,
                       longitude=long,
                       elevation=elevation,
                       azimuth=azi2,
                       dip=dip2,
                       depth=dep,
                       sample_rate=500)
        sta.channels.append(chaz)
        sta.channels.append(cha1)
        sta.channels.append(cha2)
        net.stations.append(sta)
    inv.networks.append(net)
    return inv
Пример #11
0
 def set_station(self):
     self.station_raw = self.get_station()
     self.station = Station(code=self.station_raw["name"],
                            latitude=self.station_raw["latitude"],
                            longitude=self.station_raw["longitude"],
                            elevation=self.station_raw["elevation"],
                            creation_date=obspy.UTCDateTime(
                                self.station_raw["creation_date"]),
                            site=Site(name=self.station_raw["site_name"]))
Пример #12
0
def sac2asdf_hinet(sac_directory, cmt_path, output_path):
    with pyasdf.ASDFDataSet(output_path, mode="w", compression=None,
                            mpi=False) as ds:
        # read in eventxml
        event_xml = obspy.read_events(cmt_path)
        # add eventxml to ds
        ds.add_quakeml(event_xml)
        event = ds.events[0]
        # read in waves
        files = sorted(glob(join(sac_directory, "*")))
        inv = Inventory()  # pylint: disable=no-value-for-parameter
        net_inv = Network(code="N", stations=[])
        # * we should sort files based on the station names
        sta_collection = {}
        # * here we add the waveforms along with the process of building the inventory
        for each_file in files:
            tr = obspy.read(each_file)[0]
            # here we need to modify some stats' values
            net_sta = tr.stats.station
            net, sta = net_sta.split(".")
            tr.stats.network = net
            tr.stats.station = sta
            # we change the channel names U->HHZ N->HHN E->HHE
            channel_mapper = {"U": "HHZ", "N": "HHN", "E": "HHE"}
            try:
                tr.stats.channel = channel_mapper[tr.stats.channel]
            except KeyError:
                continue
            # we have to consider the time difference in Japan
            tr.stats.starttime = tr.stats.starttime - 9 * 60 * 60
            # * add the waveforms
            tr.data = np.require(tr.data, dtype="float32")
            ds.add_waveforms(tr, tag="raw", event_id=event)
            # * handle the stationxml
            cha = Channel(code=tr.stats.channel,
                          location_code="",
                          latitude=tr.stats.sac.stla,
                          longitude=tr.stats.sac.stlo,
                          elevation=tr.stats.sac.stel,
                          depth=0.0,
                          sample_rate=tr.stats.sampling_rate)
            if (sta in sta_collection):
                sta_collection[sta].channels.append(cha)
            else:
                sta_collection[sta] = Station(code=sta,
                                              latitude=tr.stats.sac.stla,
                                              longitude=tr.stats.sac.stlo,
                                              elevation=tr.stats.sac.stel)
                sta_collection[sta].channels.append(cha)
        # * now we can add all the sta to net
        for sta in sta_collection:
            if (len(sta_collection[sta].channels) == 3):
                net_inv.stations.append(sta_collection[sta])
        # * we can add net to station_xml
        inv.networks.append(net_inv)
        # * now we can add inv to asdf
        ds.add_stationxml(inv)
Пример #13
0
def read_hyp_inventory(hyp, network, kml_output_dir=None):
    inventory = Inventory(networks=[], source="")
    net = Network(code=network, stations=[], description="")

    with open(hyp, 'r') as file:
        blank_line = 0
        while True:
            line = file.readline().rstrip()

            if not len(line):
                blank_line += 1
                continue

            if blank_line > 1:
                break

            elif blank_line == 1:
                lat = line[6:14]
                lon = line[14:23]
                elev = float(line[23:])
                station = line[1:6]

                if lat[-1] == 'S':
                    NS = -1
                else:
                    NS = 1

                if lon[-1] == 'W':
                    EW = -1
                else:
                    EW = 1

                lat = (int(lat[0:2]) + float(lat[2:-1]) / 60) * NS
                lat = Latitude(lat)

                lon = (int(lon[0:3]) + float(lon[3:-1]) / 60) * EW
                lon = Longitude(lon)

                sta = Station(code=station,
                              latitude=lat,
                              longitude=lon,
                              elevation=elev)

                net.stations.append(sta)

    inventory.networks.append(net)

    if kml_output_dir:
        os.makedirs(kml_output_dir, exist_ok=True)
        inventory.write(kml_output_dir + "/" + network + ".kml", format="KML")

    return inventory
Пример #14
0
def _dataframe_to_station(statcode, station_df, instrument_register=None):
    """
    Convert Pandas dataframe with unique station code to obspy Station object.

    :param statcode: Station code
    :type statcode: str
    :param station_df: Dataframe containing records for a single station code.
    :type station_df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA
    :param instrument_register: Dictionary of nominal instrument responses indexed by channel code, defaults to None
    :param instrument_register: dict of {str, Instrument(obspy.core.inventory.util.Equipment,
        obspy.core.inventory.response.Response)}, optional
    :return: Station object containing the station information from the dataframe
    :rtype: obspy.core.inventory.station.Station
    """
    station_data = station_df.iloc[0]
    st_start = station_data['StationStart']
    assert pd.notnull(st_start)
    st_start = utcdatetime.UTCDateTime(st_start)
    st_end = station_data['StationEnd']
    assert pd.notnull(st_end)
    st_end = utcdatetime.UTCDateTime(st_end)
    station = Station(statcode,
                      station_data['Latitude'],
                      station_data['Longitude'],
                      station_data['Elevation'],
                      start_date=st_start, creation_date=st_start,
                      end_date=st_end, termination_date=st_end,
                      site=Site(name=' '))
    for _, d in station_df.iterrows():
        ch_start = d['ChannelStart']
        ch_start = utcdatetime.UTCDateTime(ch_start) if not pd.isnull(ch_start) else None
        ch_end = d['ChannelEnd']
        ch_end = utcdatetime.UTCDateTime(ch_end) if not pd.isnull(ch_end) else None
        ch_code = d['ChannelCode']
        instrument = instrument_register[ch_code]
        if instrument is not None:
            sensor = instrument.sensor
            response = instrument.response
        elif 'LAST_RESORT' in instrument_register:
            last_resort = instrument_register['LAST_RESORT']
            sensor = last_resort.sensor
            response = last_resort.response
        else:
            sensor = None
            response = None
        cha = Channel(ch_code, '', float(d['Latitude']), float(d['Longitude']), float(d['Elevation']),
                      depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=0.0, clock_drift_in_seconds_per_sample=0.0,
                      start_date=ch_start, end_date=ch_end, sensor=sensor, response=response)
        station.channels.append(cha)
    return station
Пример #15
0
def get_station_list(stream):
    """ Creates station list from sac metadata
    """
    stations = []
    for trace in stream:
        station_name = trace.station
        try:
            latitude = trace.stats.sac.stla
            longitude = trace.stats.sac.stlo
            elevation = 0.
        except:
            warnings.warn("Could not determine station location from sac headers.")
        stations += [Station(
           station_name,
           latitude,
           longitude,
           elevation)]
    return stations
Пример #16
0
def read_station(station_file):
    """
    Read station files and put them in Obpsy list of staton class
    """
    if not os.path.isfile(station_file):
        raise IOError('No such file')
    
    list_dic=[]
    fic = open(station_file, 'r') 
    for line in fic:
        elements=line.split()
        lon,lat,depth=float(elements[0]),float(elements[1]),float(elements[2])
        sta_name=elements[3]
        
        sta_temp=Station(sta_name,lat,lon,depth*1000)
        
        list_dic.append(sta_temp)
        
    return list_dic
Пример #17
0
def clone_inv(inv, net_name, sta_name):

    net = Network(
        # This is the network code according to the SEED standard.
        code=net_name,
        # A list of stations. We'll add one later.
        stations=[],
        #        description="A test stations.",
        # Start-and end dates are optional.
        #        start_date=obspy.UTCDateTime(2016, 1, 2))
    )

    sta = Station(
        # This is the station code according to the SEED standard.
        code=sta_name,
        latitude=inv[0][0].latitude,
        longitude=inv[0][0].longitude,
        elevation=inv[0][0].elevation,
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name="station with cloned inv"))

    cha = Channel(
        # This is the channel code according to the SEED standard.
        code="HHZ",
        # This is the location code according to the SEED standard.
        location_code="",
        # Note that these coordinates can differ from the station coordinates.
        start_date=inv[0][0][0].start_date,
        latitude=inv[0][0][0].latitude,
        longitude=inv[0][0][0].longitude,
        elevation=inv[0][0][0].elevation,
        depth=inv[0][0][0].depth,
        #        azimuth=0.0,
        #        dip=-90.0,
        sample_rate=inv[0][0][0].sample_rate)

    # Now tie it all together.
    cha.response = inv[0][0][0].response  #response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    return inv
Пример #18
0
def staCsv2Xml(staCsvPath, staXmlPath, source='Lazylyst'):
    # Load the csv file
    info = np.genfromtxt(staCsvPath, delimiter=',', dtype=str)
    # For each network...
    networks = []
    unqNets = np.unique(info[:, 5])
    for net in unqNets:
        netInfo = info[np.where(info[:, 5] == net)]
        # ...gather its stations
        stations = []
        for entry in netInfo:
            stations.append(
                Station(entry[0],
                        entry[1],
                        entry[2],
                        entry[3],
                        site=Site(''),
                        creation_date=UTCDateTime(1970, 1, 1)))
        networks.append(Network(net, stations=stations))
    # Generate the inventory object, and save it as a station XML
    inv = Inventory(networks=networks, source=source)
    inv.write(staXmlPath, format='stationxml', validate=True)
Пример #19
0
 def set_station(self,
                 p_name="",
                 p_latitude=0,
                 p_longitude=0,
                 p_elevation=0,
                 p_creation_date="2020,2,1,0,0,0.00",
                 p_site_name=""):
     self.station_raw = {
         "name": p_name,
         "latitude": p_latitude,
         "longitude": p_longitude,
         "elevation": p_elevation,
         "creation_date": p_creation_date,
         "site_name": p_site_name
     }
     self.station = Station(code=self.station_raw["name"],
                            latitude=self.station_raw["latitude"],
                            longitude=self.station_raw["longitude"],
                            elevation=self.station_raw["elevation"],
                            creation_date=obspy.UTCDateTime(
                                self.station_raw["creation_date"]),
                            site=Site(name=self.station_raw["site_name"]))
Пример #20
0
def create_simple_inventory(network, station, latitude=None, longitude=None,
                            elevation=None, depth=None, start_date=None,
                            end_date=None, location_code="S3",
                            channel_code="MX"):
    """
    Create simple inventory with only location information,
    for ZNE component, especially usefull for synthetic data
    """
    azi_dict = {"MXZ": 0.0,  "MXN": 0.0, "MXE": 90.0}
    dip_dict = {"MXZ": 90.0, "MXN": 0.0, "MXE": 0.0}
    channel_list = []

    if start_date is None:
        start_date = UTCDateTime(0)

    # specfem default channel code is MX
    for _comp in ["Z", "E", "N"]:
        _chan_code = "%s%s" % (channel_code, _comp)
        chan = Channel(_chan_code, location_code, latitude=latitude,
                       longitude=longitude, elevation=elevation,
                       depth=depth, azimuth=azi_dict[_chan_code],
                       dip=dip_dict[_chan_code], start_date=start_date,
                       end_date=end_date)
        channel_list.append(chan)

    site = Site("N/A")
    sta = Station(station, latitude=latitude, longitude=longitude,
                  elevation=elevation, channels=channel_list, site=site,
                  creation_date=start_date, total_number_of_channels=3,
                  selected_number_of_channels=3)

    nw = Network(network, stations=[sta, ], total_number_of_stations=1,
                 selected_number_of_stations=1)

    inv = Inventory([nw, ], source="SPECFEM3D_GLOBE", sender="Princeton",
                    created=UTCDateTime.now())

    return inv
Пример #21
0
    def _make_inventory(self, df: pd.DataFrame):
        """
        Loopy logic for creating the inventory form a dataframe.
        """
        # get dataframe with correct columns/conditioning from input
        df = obsplus.stations_to_df(df).copy()
        # add responses (if requested) and drop response cols
        df["response"] = self._get_responses(df)
        df = df.drop(columns=self._drop_cols, errors="ignore")
        # warn if any unexpected columns are found in df
        self._maybe_warn_on_unexpected_columns(df)
        # Iterate networks and create stations
        networks = []
        for net_code, net_df in self._groupby_if_exists(df, "network"):
            stations = []
            for st_code, sta_df in self._groupby_if_exists(net_df, "station"):
                if not st_code[0]:
                    continue
                channels = []
                for ch_code, ch_df in self._groupby_if_exists(sta_df, "channel"):
                    if not ch_code[0]:  # skip empty channel lines
                        continue
                    chan_series = ch_df.iloc[0]
                    kwargs = self._get_kwargs(chan_series, self.cha_map)
                    # try to add the inventory
                    channels.append(Channel(**kwargs))
                kwargs = self._get_kwargs(sta_df.iloc[0], self.sta_map)
                self._add_dates(kwargs, channels)
                stations.append(Station(channels=channels, **kwargs))
            kwargs = self._get_kwargs(net_df.iloc[0], self.net_map)
            self._add_dates(kwargs, stations)
            networks.append(Network(stations=stations, **kwargs))

        return obspy.Inventory(
            networks=networks, source=f"ObsPlus_v{obsplus.__version__}"
        )
Пример #22
0
def read_fdsn_station_text_file(path_or_file_object):
    """
    Function reading a FDSN station text file to an inventory object.

    :param path_or_file_object: File name or file like object.
    """
    def _read(obj):
        r = unicode_csv_reader(obj, delimiter=native_str("|"))
        header = next(r)
        header[0] = header[0].lstrip("#")
        header = [_i.strip().lower() for _i in header]
        # IRIS currently has a wrong header name. Just map it.
        header = [
            _i.replace("instrument", "sensordescription") for _i in header
        ]

        all_lines = []
        for line in r:
            # Skip comment lines.
            if line[0].startswith("#"):
                continue
            all_lines.append([_i.strip() for _i in line])
        return {"header": tuple(header), "content": all_lines}

    # Enable reading from files and buffers opened in binary mode.
    if (hasattr(path_or_file_object, "mode") and
            "b" in path_or_file_object.mode) or \
            isinstance(path_or_file_object, io.BytesIO):
        buf = io.StringIO(path_or_file_object.read().decode("utf-8"))
        buf.seek(0, 0)
        path_or_file_object = buf

    if hasattr(path_or_file_object, "read"):
        content = _read(path_or_file_object)
    else:
        with open(path_or_file_object, "rt", newline="",
                  encoding="utf8") as fh:
            content = _read(fh)

    # Figure out the type.
    if content["header"] == network_components:
        level = "network"
        filetypes = network_types
    elif content["header"] == station_components:
        level = "station"
        filetypes = station_types
    elif content["header"] == channel_components:
        level = "channel"
        filetypes = channel_types
    else:
        raise ValueError("Unknown type of header.")

    content = content["content"]
    converted_content = []
    # Convert all types.
    for line in content:
        converted_content.append(
            [v_type(value) for value, v_type in zip(line, filetypes)])

    # Now convert to an inventory object.
    inv = Inventory(networks=[], source=None)

    if level == "network":
        for net in converted_content:
            network = Network(code=net[0],
                              description=net[1],
                              start_date=net[2],
                              end_date=net[3],
                              total_number_of_stations=net[4])
            inv.networks.append(network)
    elif level == "station":
        networks = collections.OrderedDict()
        for sta in converted_content:
            site = Site(name=sta[5])
            station = Station(code=sta[1],
                              latitude=sta[2],
                              longitude=sta[3],
                              elevation=sta[4],
                              site=site,
                              start_date=sta[6],
                              end_date=sta[7])
            if sta[0] not in networks:
                networks[sta[0]] = []
            networks[sta[0]].append(station)
        for network_code, stations in networks.items():
            net = Network(code=network_code, stations=stations)
            inv.networks.append(net)
    elif level == "channel":
        networks = collections.OrderedDict()
        stations = collections.OrderedDict()

        for channel in converted_content:
            net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \
                scale_freq, scale_units, s_r, st, et = channel

            if net not in networks:
                networks[net] = Network(code=net)

            if (net, sta) not in stations:
                station = Station(code=sta,
                                  latitude=lat,
                                  longitude=lng,
                                  elevation=ele)
                networks[net].stations.append(station)
                stations[(net, sta)] = station

            sensor = Equipment(type=inst)
            if scale is not None and scale_freq is not None:
                resp = Response(instrument_sensitivity=InstrumentSensitivity(
                    value=scale,
                    frequency=scale_freq,
                    input_units=scale_units,
                    output_units=None))
            else:
                resp = None
            try:
                channel = Channel(code=chan,
                                  location_code=loc,
                                  latitude=lat,
                                  longitude=lng,
                                  elevation=ele,
                                  depth=dep,
                                  azimuth=azi,
                                  dip=dip,
                                  sensor=sensor,
                                  sample_rate=s_r,
                                  start_date=st,
                                  end_date=et,
                                  response=resp)
            except Exception as e:
                warnings.warn(
                    "Failed to parse channel %s.%s.%s.%s due to: %s" %
                    (net, sta, loc, chan, str(e)), UserWarning)
                continue
            stations[(net, sta)].channels.append(channel)
        inv.networks.extend(list(networks.values()))
    else:
        # Cannot really happen - just a safety measure.
        raise NotImplementedError("Unknown level: %s" % str(level))
    return inv
Пример #23
0
    def getInventory(self):
        """
        Extract an ObsPy inventory object from a Stream read in by gmprocess
        tools.
        """
        networks = [trace.stats.network for trace in self]
        if len(set(networks)) > 1:
            raise Exception(
                "Input stream has stations from multiple networks.")

        # We'll first create all the various objects. These strongly follow the
        # hierarchy of StationXML files.
        source = ''
        if 'standard' in self[0].stats and 'source' in self[0].stats.standard:
            source = self[0].stats.standard.source
        inv = Inventory(
            # We'll add networks later.
            networks=[],
            # The source should be the id whoever create the file.
            source=source)

        net = Network(
            # This is the network code according to the SEED standard.
            code=networks[0],
            # A list of stations. We'll add one later.
            stations=[],
            description="source",
            # Start-and end dates are optional.
        )
        channels = []
        for trace in self:
            logging.debug('trace: %s' % trace)
            channel = _channel_from_stats(trace.stats)
            channels.append(channel)

        subdict = {}
        for k in UNUSED_STANDARD_PARAMS:
            if k in self[0].stats.standard:
                subdict[k] = self[0].stats.standard[k]

        format_specific = {}
        if 'format_specific' in self[0].stats:
            format_specific = dict(self[0].stats.format_specific)

        big_dict = {'standard': subdict,
                    'format_specific': format_specific}
        try:
            jsonstr = json.dumps(big_dict)
        except Exception as e:
            raise GMProcessException('Exception in json.dumps: %s' % e)
        sta = Station(
            # This is the station code according to the SEED standard.
            code=self[0].stats.station,
            latitude=self[0].stats.coordinates.latitude,
            elevation=self[0].stats.coordinates.elevation,
            longitude=self[0].stats.coordinates.longitude,
            channels=channels,
            site=Site(name=self[0].stats.standard.station_name),
            description=jsonstr,
            creation_date=UTCDateTime(1970, 1, 1),  # this is bogus
            total_number_of_channels=len(self))

        net.stations.append(sta)
        inv.networks.append(net)

        return inv
Пример #24
0
def main(argv):
    '''@package isc2stnxml
       It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code.
       When proper network code can not be identified the program just guess it, sorry...
    '''
    inv = read_inventory("IRIS-ALL.xml")

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    our_xml = Inventory(networks=[], source='EHB')

    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]),
                                             np.float(stn_found[j, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]),
                                             np.float(stn_found[0, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

#                Now we try to find the same station in XML file
#                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude,
                                             xstn_found[j][0].longitude,
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

# last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            filed = True
        if xml:
            #our_xml.networks.append(record)
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) &
                                (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1],
                                  stations=[],
                                  description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(
            stn_found[stn_found == ' ']) > 0:
        print "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    for k in xrange(stn_found.shape[0]):

        net = Network(code=stn_found[k, 1], stations=[], description=' ')
        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \
        termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \
        site=Site(name=' '), \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        cha=Channel(code=stn_found[k,5], \
        depth=0., \
        azimuth=0., \
        dip=-90., \
        location_code='', \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        sta.channels.append(cha)
        net.stations.append(sta)
        our_xml.networks.append(net)


#             print 'np',stn_found[k,:]

    our_xml.write("station.xml", format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
Пример #25
0
def surf_4100_to_inv(location_file, response_inv, plot=False):
    """
    Combine the xyz Homestake locations and MMF calibration responses into
    an Inventory object for the 4100L
    """
    converter = SURF_converter()
    sta_df = pd.read_csv(location_file)
    inv = Inventory()
    serial_map = {'GMF1': '21010', 'GMF2': '21015', 'GMF3': '21027'}
    inv.networks = [Network(code='CB')]
    for _, row in sta_df.iterrows():
        print(row)
        sta_code = row['Sensor name']
        # Station location
        # Convert from SURF coords to lat lon, but keep local for actual use
        lon, lat, elev = converter.to_lonlat(
            (row['x_ft'] * 0.3048, row['y_ft'] * 0.3048, row['z_ft'] * 0.3048))
        print(lon, lat, elev)
        # Just leave as zero here and convert HMC feet elevation to m
        depth = 0.0
        # Save HMC coords to custom attributes of Station and Channel
        extra = AttribDict({
            'hmc_east': {
                'value': row['x_ft'],
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': row['y_ft'],
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': row['z_ft'] * 0.3048,
                'namespace': 'smi:local/hmc'
            }
        })
        if sta_code.startswith('TS'):
            # Hydrophone or CASSM, wet well
            if 'SS' in sta_code:
                # Cassm (Y for unspecified instrument)
                chan_code = 'XY1'
                chans = [
                    Channel(code=chan_code,
                            location_code='',
                            latitude=lat,
                            longitude=lon,
                            elevation=elev,
                            depth=depth,
                            response=Response())
                ]
            else:
                # Hydrophone (D), Downhole (H) per SEED manual
                chan_code = 'XDH'
                chans = [
                    Channel(code=chan_code,
                            location_code='',
                            latitude=lat,
                            longitude=lon,
                            elevation=elev,
                            depth=depth,
                            response=Response())
                ]
        elif 'S' in sta_code:
            # Grouted CASSM
            chan_code = 'XY1'
            chans = [
                Channel(code=chan_code,
                        location_code='',
                        latitude=lat,
                        longitude=lon,
                        elevation=elev,
                        depth=depth,
                        response=Response())
            ]
        else:
            # Grouted accelerometer
            chans = []
            try:
                serial = serial_map[sta_code]
            except KeyError:
                serial = '9999'
            for chan_code in ['XNX', 'XNY', 'XNZ']:
                # Set samp_rate to 40 kHz so that Nyquist is below max shake f
                chan = Channel(code=chan_code,
                               location_code='',
                               latitude=lat,
                               longitude=lon,
                               elevation=elev,
                               depth=0.,
                               sample_rate=40000.,
                               sensor=Equipment(
                                   type='IEPE Accelerometer',
                                   description='Piezoelectric accelerometer',
                                   manufacturer='MMF',
                                   model='KS943B.100',
                                   serial_number=serial))
                # Apply exact response for the three tested sensors,
                # ...otherwise use the average
                avg_resp = response_inv.select(
                    station='AVG', channel=chan_code)[0][0][0].response
                chan.response = avg_resp
                chans.append(chan)
        sta = Station(code=sta_code,
                      latitude=chans[0].latitude,
                      longitude=chans[0].longitude,
                      elevation=chans[0].elevation,
                      channels=chans)
        sta.extra = extra
        inv[0].stations.append(sta)
    return inv
Пример #26
0
def get_inventory():
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="US",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABCD",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha1 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN1",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)
    cha2 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN2",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=90.0,
        dip=-90.0,
        sample_rate=1)
    cha3 = Channel(
        # This is the channel code according to the SEED standard.
        code="HNZ",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)

    # Now tie it all together.
    sta.channels.append(cha1)
    sta.channels.append(cha2)
    sta.channels.append(cha3)
    net.stations.append(sta)
    inv.networks.append(net)

    return inv
Пример #27
0
def stats2inv(stats, resp=None, filexml=None, locs=None):

    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(networks=[], source="japan_from_resp")

    if locs is None:
        net = Network(
            # This is the network code according to the SEED standard.
            code=stats.network,
            # A list of stations. We'll add one later.
            stations=[],
            description="Marine created from SAC and resp files",
            # Start-and end dates are optional.
            start_date=stats.starttime)

        sta = Station(
            # This is the station code according to the SEED standard.
            code=stats.station,
            latitude=stats.sac["stla"],
            longitude=stats.sac["stlo"],
            elevation=stats.sac["stel"],
            creation_date=stats.starttime,
            site=Site(name="First station"))

        cha = Channel(
            # This is the channel code according to the SEED standard.
            code=stats.channel,
            # This is the location code according to the SEED standard.
            location_code=stats.location,
            # Note that these coordinates can differ from the station coordinates.
            latitude=stats.sac["stla"],
            longitude=stats.sac["stlo"],
            elevation=stats.sac["stel"],
            depth=-stats.sac["stel"],
            azimuth=stats.sac["cmpaz"],
            dip=stats.sac["cmpinc"],
            sample_rate=stats.sampling_rate)

    else:
        ista = locs[locs['station'] == stats.station].index.values.astype(
            'int64')[0]

        net = Network(
            # This is the network code according to the SEED standard.
            code=locs.iloc[ista]["network"],
            # A list of stations. We'll add one later.
            stations=[],
            description="Marine created from SAC and resp files",
            # Start-and end dates are optional.
            start_date=stats.starttime)

        sta = Station(
            # This is the station code according to the SEED standard.
            code=locs.iloc[ista]["station"],
            latitude=locs.iloc[ista]["latitude"],
            longitude=locs.iloc[ista]["longitude"],
            elevation=locs.iloc[ista]["elevation"],
            creation_date=stats.starttime,
            site=Site(name="First station"))
        cha = Channel(
            # This is the channel code according to the SEED standard.
            code=stats.channel,
            # This is the location code according to the SEED standard.
            location_code=stats.location,
            # Note that these coordinates can differ from the station coordinates.
            latitude=locs.iloc[ista]["latitude"],
            longitude=locs.iloc[ista]["longitude"],
            elevation=locs.iloc[ista]["elevation"],
            depth=-locs.iloc[ista]["elevation"],
            azimuth=0,
            dip=0,
            sample_rate=stats.sampling_rate)

    response = obspy.core.inventory.response.Response()
    if resp is not None:
        print('i dont have the response')
    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    # nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    #response = nrl.get_response( # doctest: +SKIP
    #    sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
    #    datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    if filexml is not None:
        inv.write(filexml, format="stationxml", validate=True)

    return inv
Пример #28
0
def getStation(stationBlock, units, transFuncs):
    ## Should probably do a check up here to see that the order given in block is consistent ##
    for entry in stationBlock:
        if entry.name == 'Station Identifier':
            #            print 'NewStation!',entry.station_call_letters
            staDict = {
                'code': entry.station_call_letters,
                'latitude': entry.latitude,
                'longitude': entry.longitude,
                'elevation': entry.elevation,
                'channels': [],
                'site': Site(entry.site_name),
                'creation_date':
                UTCDateTime(entry.start_effective_date),  # Allows for save
                'start_date': UTCDateTime(entry.start_effective_date),
                'end_date': UTCDateTime(entry.end_effective_date)
            }
            staNetCode = entry.network_code
        # If found a new channel, reset the stages
        elif entry.name == 'Channel Identifier':
            #            print 'NewChannel!',entry.channel_identifier
            stages = []
            chaDict = {
                'code': entry.channel_identifier,
                'location_code': entry.location_identifier,
                'latitude': entry.latitude,
                'longitude': entry.longitude,
                'elevation': entry.elevation,
                'depth': entry.local_depth,
                'sample_rate': entry.sample_rate,
                'start_date': UTCDateTime(entry.start_date),
                'end_date': UTCDateTime(entry.end_date),
                'azimuth': entry.azimuth,
                'dip': entry.dip
            }
        #code, location_code, latitude, longitude, elevation, depth
        # If on a new stage, set up the dictionary again
        # ...paz stage
        elif entry.name == 'Response Poles and Zeros':
            # Get units
            stageReqs = {}
            #            print entry.name,entry.stage_sequence_number
            #            print entry
            #            quit()
            stageReqs['input_units'] = units[entry.stage_signal_input_units]
            stageReqs['output_units'] = units[entry.stage_signal_output_units]
            # Collect the poles and zeros
            lastType = 'paz'
            if entry.number_of_complex_zeros == 0:
                zeros = np.array([], dtype=float)
            else:
                zeros = np.array(entry.real_zero, dtype=float) + np.array(
                    entry.imaginary_zero, dtype=float) * 1j
            if entry.number_of_complex_poles == 0:
                poles = np.array([], dtype=float)
            else:
                poles = np.array(entry.real_pole, dtype=float) + np.array(
                    entry.imaginary_pole, dtype=float) * 1j
            # Form the paz response dictionary (also ensure arrays are 1D)
            pazDict = {
                'pz_transfer_function_type':
                transFuncs[entry.transfer_function_types],
                'normalization_factor':
                entry.A0_normalization_factor,
                'normalization_frequency':
                entry.normalization_frequency,
                'zeros':
                setArrDim(zeros),
                'poles':
                setArrDim(poles)
            }
        # ...coeff stage
        elif entry.name == 'Response Coefficients':
            # Get units
            stageReqs = {}
            #            print entry.name,entry.stage_sequence_number
            stageReqs['input_units'] = units[entry.signal_input_units]
            stageReqs['output_units'] = units[entry.signal_output_units]
            # Collect the coefficients
            lastType = 'coef'
            if entry.number_of_denominators == 0:
                denom = np.array([], dtype=float)
                denomErr = np.array([], dtype=float)
            else:
                denom = np.array(entry.denominator_coefficient, dtype=float)
                denomErr = np.array(entry.denominator_error, dtype=float)
            if entry.number_of_numerators == 0:
                numer = np.array([], dtype=float)
                numerErr = np.array([], dtype=float)
            else:
                numer = np.array(entry.numerator_coefficient, dtype=float)
                numerErr = np.array(entry.numerator_error, dtype=float)
            # Convert these arrays into lists of numbers which have uncertainty (also ensure arrays are 1D)
            denomArr = genArrWithUncertainty(setArrDim(denom),
                                             setArrDim(denomErr))
            numerArr = genArrWithUncertainty(setArrDim(numer),
                                             setArrDim(numerErr))
            # Form the coeefficient response dictionary
            coefDict = {
                'cf_transfer_function_type': transFuncs[entry.response_type],
                'numerator': numerArr,
                'denominator': denomArr
            }
        # Get the decimation sampling info
        elif entry.name == 'Decimation':
            #            print entry.name,entry.stage_sequence_number
            stageReqs['decimation_input_sample_rate'] = Frequency(
                entry.input_sample_rate)
            stageReqs['decimation_factor'] = entry.decimation_factor
            stageReqs['decimation_offset'] = entry.decimation_offset
            stageReqs['decimation_delay'] = FloatWithUncertaintiesAndUnit(
                entry.estimated_delay)
            stageReqs['decimation_correction'] = FloatWithUncertaintiesAndUnit(
                entry.correction_applied)
        # Get the stage sensitivity
        elif entry.name == 'Channel Sensitivity Gain':
            #            print entry.name,entry.stage_sequence_number
            if entry.stage_sequence_number != 0:
                stageReqs[
                    'stage_sequence_number'] = entry.stage_sequence_number
                stageReqs['stage_gain'] = entry.sensitivity_gain
                stageReqs['stage_gain_frequency'] = entry.frequency
                # See what type of stage this was
                if lastType == 'paz':
                    pazDict.update(stageReqs)
                    stages.append(PolesZerosResponseStage(**pazDict))
                else:
                    coefDict.update(stageReqs)
                    stages.append(CoefficientsTypeResponseStage(**coefDict))
            # If on the last stage, send off the collected stage info
            else:
                if len(stages) > 0:
                    instrSens = InstrumentSensitivity(entry.sensitivity_gain,
                                                      entry.frequency,
                                                      stages[0].input_units,
                                                      stages[-1].output_units)
                    # Finalize the channel dictionary, and append this channel to the station dictionary
                    chaResp = Response(response_stages=stages,
                                       instrument_sensitivity=instrSens)
                    chaDict['response'] = chaResp
                staDict['channels'].append(Channel(**chaDict))
    # Return the stations to the list of stations (also track the network code)
    return Station(**staDict), staNetCode
Пример #29
0
def surf_stations_to_inv(excel_file, debug=0):
    """
    Take Petrs orientation excel file for the hydrophones/accelerometers
    and build an inventory for later use.
    :param excel_file: path to Petr's excel file (formatting hard-coded)
    :return: obspy.core.Inventory
    """
    # Call coordinate converter
    converter = SURF_converter()
    sta_df = pd.read_excel(excel_file,
                           skiprows=[0, 1, 2, 3],
                           header=1,
                           nrows=90)
    # Assemble dictionary of {station: {channel: infoz}}
    # Create dict before, then build inventory from channel level upwards
    sta_dict = {}
    extra_dict = {}
    for i, row in sta_df.iterrows():
        # Station location
        # Convert from SURF coords to lat lon, but keep local for actual use
        lon, lat, elev = converter.to_lonlat(
            (row['Easting(m)'], row['Northing(m)'], row['Elev(m)']))
        # Correct for arbitrary zero 'depth' of 130m
        elev -= 130
        # Already accounted for in the elevation but will include here as its
        # ...a required arg for Channel()
        depth = row['Depth (m)']
        # Save HMC coords to custom attributes of Station and Channel
        extra = AttribDict({
            'hmc_east': {
                'value': row['Easting(m)'],
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': row['Northing(m)'],
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': row['Elev(m)'],  # extra will preserve absolute elev
                'namespace': 'smi:local/hmc'
            }
        })
        # Sort out azimuth and dip for this channel (if it exists)
        if not np.isnan(row['Sx']):
            # TODO Something is real effed here. Answers are right though.
            dip_rad = np.arcsin(-row['Sz'])
            az_rad = np.arcsin(row['Sx'] / np.cos(dip_rad))
            dip = np.rad2deg(dip_rad)
            az = np.rad2deg(az_rad)
            # Force positive
            if az < 0:
                az += 360.
            # Correct
            if row['Sx'] < 0 and row['Sy'] < 0:
                az -= 270.
                az = 270. - az
            elif row['Sy'] < 0:
                az = 180 - az
            if debug > 0:
                print(np.array((row['Sx'], row['Sy'], row['Sz'])))
                print(az, dip)
        if row['Sensor'].endswith(('Z', 'X', 'Y')):
            chan = 'XN{}'.format(row['Sensor'][-1])
            # Geophones
            if row['Sensor'].startswith('G'):
                continue
            # Accelerometers
            else:
                no = row['Sensor'].split('_')[1]
            sta_name = '{}{}'.format(row['Desc'], no)
            if sta_name in ['OB14', 'OT17', 'PDT2', 'PDT5', 'PSB8', 'PST11']:
                # These are geode stations only, skip
                continue
            channel = Channel(code=chan,
                              location_code='',
                              latitude=lat,
                              longitude=lon,
                              elevation=elev,
                              depth=depth,
                              azimuth=az,
                              dip=dip,
                              response=Response())
            # channel.extra = extra
        elif row['Sensor'].startswith('Hydro'):
            chan = 'XN1'
            sta_name = '{}{}'.format(row['Desc'],
                                     row['Sensor'].split('-')[-1].zfill(2))
            channel = Channel(code=chan,
                              location_code='',
                              latitude=lat,
                              longitude=lon,
                              elevation=elev,
                              depth=depth,
                              response=Response())
        extra_dict[sta_name] = extra
        # channel.extra = extra
        if sta_name in sta_dict.keys():
            sta_dict[sta_name].append(channel)
        else:
            sta_dict[sta_name] = [channel]
    # Now loop station dict to create inventory
    stas = []
    for nm, chans in sta_dict.items():
        station = Station(code=nm,
                          latitude=chans[0].latitude,
                          longitude=chans[0].longitude,
                          elevation=chans[0].elevation,
                          channels=chans)
        station.extra = extra_dict[nm]
        stas.append(station)
    # Build inventory
    inventory = Inventory(networks=[Network(code='SV', stations=stas)],
                          source='SURF')
    return inventory
Пример #30
0
# hierarchy of StationXML files.
inv = Inventory(networks=[], source="MT Test")

net = Network(
    code="MT",
    # A list of stations. We'll add one later.
    stations=[],
    description="Test stations.",
    # Start-and end dates are optional.
    start_date=obspy.UTCDateTime(2016, 1, 2))
inv.networks.append(net)

for row, station_df in survey_df.iterrows():
    sta = Station(code=station_df['siteID'],
                  latitude=station_df['lat'],
                  longitude=station_df['lon'],
                  elevation=station_df['nm_elev'],
                  creation_date=obspy.UTCDateTime(2016, 1, 2),
                  site=Site(name=station_df['siteID']))

    for comp in ['ex', 'ey', 'hx', 'hy', 'hz']:
        if station_df['{0}_azm'.format(comp)] is not None:
            if 'h' in comp:
                cha = Channel(code=comp.upper(),
                              location_code="",
                              latitude=station_df['lat'],
                              longitude=station_df['lon'],
                              elevation=station_df['nm_elev'],
                              depth=0,
                              azimuth=station_df['{0}_azm'.format(comp)],
                              dip=0,
                              sample_rate=station_df['sampling_rate'])