コード例 #1
0
ファイル: create_staxml.py プロジェクト: crotwell/StationXML
def do_xml():
    nrl = NRL('http://ds.iris.edu/NRL/')
    datalogger_keys = ['REF TEK', 'RT 130 & 130-SMA', '1', '40']
    sensor_keys = ['Streckeisen', 'STS-2', '1500', '3 - installed 04/97 to present']

    response = nrl.get_response(sensor_keys=sensor_keys, datalogger_keys=datalogger_keys)

    channel = Channel(code='BHZ',
                      location_code='10',      # required
                      latitude=0,      # required
                      longitude=0,   # required
                      elevation=0.0,        # required
                      depth=0.,                # required
                      )

    channel.response = response
    station = Station(code='ABCD',
                      latitude=0,
                      longitude=0,
                      elevation=0.0,
                      creation_date=UTCDateTime(1970, 1, 1),          # required
                      site=Site(name='Fake Site'),  # required
                      channels=[channel],
                      )

    network = Network(code='XX',
                     stations=[station])
    inventory = Inventory(networks=[network], source="demo")

    inventory.write("Test.xml", format="stationxml", validate=True)
コード例 #2
0
def create_inv(network_code, station_code, location_code, channel_code, isr,
               sf, u):
    writethisinv = Inventory(
        networks=[
            Network(code=network_code,
                    start_date=obspy.UTCDateTime('2007-01-01'),
                    stations=[
                        Station(
                            code=station_code,
                            latitude=1,
                            longitude=2,
                            elevation=3,
                            creation_date=obspy.UTCDateTime('2007-01-01'),
                            site=Site(name='site'),
                            channels=[
                                Channel(
                                    code=channel_code,
                                    location_code=location_code,
                                    start_date=obspy.UTCDateTime('2007-01-01'),
                                    latitude=1,
                                    longitude=2,
                                    elevation=3,
                                    depth=4,
                                    response=create_response(
                                        inputsamplerate=isr,
                                        scaling_factor=sf,
                                        units=u))
                            ])
                    ])
        ],
        source=
        'Joseph Farrugia, Ocean Networks Canada',  # The source should be the id whoever create the file.
        created=obspy.UTCDateTime(datetime.today()))
    return writethisinv
コード例 #3
0
 def set_station(self):
     self.station_raw = self.get_station()
     self.station = Station(code=self.station_raw["name"],
                            latitude=self.station_raw["latitude"],
                            longitude=self.station_raw["longitude"],
                            elevation=self.station_raw["elevation"],
                            creation_date=obspy.UTCDateTime(
                                self.station_raw["creation_date"]),
                            site=Site(name=self.station_raw["site_name"]))
コード例 #4
0
def get_inventory(stations,
                  depths,
                  lat=50.45031,
                  long=-112.12087,
                  elevation=779.0,
                  dip1=0,
                  azi1=0,
                  dip2=0,
                  azi2=90,
                  dip3=90,
                  azi3=0):
    inv = Inventory(networks=[], source="Genevieve")
    net = Network(code="BH",
                  stations=[],
                  description=" ",
                  start_date=UTCDateTime(2019, 1, 1))
    for i, station in enumerate(stations):
        dep = depths[i]
        sta = Station(code=station,
                      latitude=lat,
                      longitude=long,
                      elevation=elevation,
                      creation_date=UTCDateTime(2019, 1, 1),
                      site=Site(name="borehole"))
        chaz = Channel(code="DPZ",
                       location_code="",
                       latitude=lat,
                       longitude=long,
                       elevation=elevation,
                       azimuth=azi3,
                       dip=dip3,
                       depth=dep,
                       sample_rate=500)
        cha1 = Channel(code="DPN",
                       location_code="",
                       latitude=lat,
                       longitude=long,
                       elevation=elevation,
                       azimuth=azi1,
                       dip=dip1,
                       depth=dep,
                       sample_rate=500)
        cha2 = Channel(code="DPE",
                       location_code="",
                       latitude=lat,
                       longitude=long,
                       elevation=elevation,
                       azimuth=azi2,
                       dip=dip2,
                       depth=dep,
                       sample_rate=500)
        sta.channels.append(chaz)
        sta.channels.append(cha1)
        sta.channels.append(cha2)
        net.stations.append(sta)
    inv.networks.append(net)
    return inv
コード例 #5
0
def _dataframe_to_station(statcode, station_df, instrument_register=None):
    """
    Convert Pandas dataframe with unique station code to obspy Station object.

    :param statcode: Station code
    :type statcode: str
    :param station_df: Dataframe containing records for a single station code.
    :type station_df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA
    :param instrument_register: Dictionary of nominal instrument responses indexed by channel code, defaults to None
    :param instrument_register: dict of {str, Instrument(obspy.core.inventory.util.Equipment,
        obspy.core.inventory.response.Response)}, optional
    :return: Station object containing the station information from the dataframe
    :rtype: obspy.core.inventory.station.Station
    """
    station_data = station_df.iloc[0]
    st_start = station_data['StationStart']
    assert pd.notnull(st_start)
    st_start = utcdatetime.UTCDateTime(st_start)
    st_end = station_data['StationEnd']
    assert pd.notnull(st_end)
    st_end = utcdatetime.UTCDateTime(st_end)
    station = Station(statcode,
                      station_data['Latitude'],
                      station_data['Longitude'],
                      station_data['Elevation'],
                      start_date=st_start, creation_date=st_start,
                      end_date=st_end, termination_date=st_end,
                      site=Site(name=' '))
    for _, d in station_df.iterrows():
        ch_start = d['ChannelStart']
        ch_start = utcdatetime.UTCDateTime(ch_start) if not pd.isnull(ch_start) else None
        ch_end = d['ChannelEnd']
        ch_end = utcdatetime.UTCDateTime(ch_end) if not pd.isnull(ch_end) else None
        ch_code = d['ChannelCode']
        instrument = instrument_register[ch_code]
        if instrument is not None:
            sensor = instrument.sensor
            response = instrument.response
        elif 'LAST_RESORT' in instrument_register:
            last_resort = instrument_register['LAST_RESORT']
            sensor = last_resort.sensor
            response = last_resort.response
        else:
            sensor = None
            response = None
        cha = Channel(ch_code, '', float(d['Latitude']), float(d['Longitude']), float(d['Elevation']),
                      depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=0.0, clock_drift_in_seconds_per_sample=0.0,
                      start_date=ch_start, end_date=ch_end, sensor=sensor, response=response)
        station.channels.append(cha)
    return station
コード例 #6
0
ファイル: clone_inv.py プロジェクト: tieganh/med_spec
def clone_inv(inv, net_name, sta_name):

    net = Network(
        # This is the network code according to the SEED standard.
        code=net_name,
        # A list of stations. We'll add one later.
        stations=[],
        #        description="A test stations.",
        # Start-and end dates are optional.
        #        start_date=obspy.UTCDateTime(2016, 1, 2))
    )

    sta = Station(
        # This is the station code according to the SEED standard.
        code=sta_name,
        latitude=inv[0][0].latitude,
        longitude=inv[0][0].longitude,
        elevation=inv[0][0].elevation,
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name="station with cloned inv"))

    cha = Channel(
        # This is the channel code according to the SEED standard.
        code="HHZ",
        # This is the location code according to the SEED standard.
        location_code="",
        # Note that these coordinates can differ from the station coordinates.
        start_date=inv[0][0][0].start_date,
        latitude=inv[0][0][0].latitude,
        longitude=inv[0][0][0].longitude,
        elevation=inv[0][0][0].elevation,
        depth=inv[0][0][0].depth,
        #        azimuth=0.0,
        #        dip=-90.0,
        sample_rate=inv[0][0][0].sample_rate)

    # Now tie it all together.
    cha.response = inv[0][0][0].response  #response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    return inv
コード例 #7
0
 def set_station(self,
                 p_name="",
                 p_latitude=0,
                 p_longitude=0,
                 p_elevation=0,
                 p_creation_date="2020,2,1,0,0,0.00",
                 p_site_name=""):
     self.station_raw = {
         "name": p_name,
         "latitude": p_latitude,
         "longitude": p_longitude,
         "elevation": p_elevation,
         "creation_date": p_creation_date,
         "site_name": p_site_name
     }
     self.station = Station(code=self.station_raw["name"],
                            latitude=self.station_raw["latitude"],
                            longitude=self.station_raw["longitude"],
                            elevation=self.station_raw["elevation"],
                            creation_date=obspy.UTCDateTime(
                                self.station_raw["creation_date"]),
                            site=Site(name=self.station_raw["site_name"]))
コード例 #8
0
def staCsv2Xml(staCsvPath, staXmlPath, source='Lazylyst'):
    # Load the csv file
    info = np.genfromtxt(staCsvPath, delimiter=',', dtype=str)
    # For each network...
    networks = []
    unqNets = np.unique(info[:, 5])
    for net in unqNets:
        netInfo = info[np.where(info[:, 5] == net)]
        # ...gather its stations
        stations = []
        for entry in netInfo:
            stations.append(
                Station(entry[0],
                        entry[1],
                        entry[2],
                        entry[3],
                        site=Site(''),
                        creation_date=UTCDateTime(1970, 1, 1)))
        networks.append(Network(net, stations=stations))
    # Generate the inventory object, and save it as a station XML
    inv = Inventory(networks=networks, source=source)
    inv.write(staXmlPath, format='stationxml', validate=True)
コード例 #9
0
ファイル: utils.py プロジェクト: wjlei1990/pytomo3d
def create_simple_inventory(network, station, latitude=None, longitude=None,
                            elevation=None, depth=None, start_date=None,
                            end_date=None, location_code="S3",
                            channel_code="MX"):
    """
    Create simple inventory with only location information,
    for ZNE component, especially usefull for synthetic data
    """
    azi_dict = {"MXZ": 0.0,  "MXN": 0.0, "MXE": 90.0}
    dip_dict = {"MXZ": 90.0, "MXN": 0.0, "MXE": 0.0}
    channel_list = []

    if start_date is None:
        start_date = UTCDateTime(0)

    # specfem default channel code is MX
    for _comp in ["Z", "E", "N"]:
        _chan_code = "%s%s" % (channel_code, _comp)
        chan = Channel(_chan_code, location_code, latitude=latitude,
                       longitude=longitude, elevation=elevation,
                       depth=depth, azimuth=azi_dict[_chan_code],
                       dip=dip_dict[_chan_code], start_date=start_date,
                       end_date=end_date)
        channel_list.append(chan)

    site = Site("N/A")
    sta = Station(station, latitude=latitude, longitude=longitude,
                  elevation=elevation, channels=channel_list, site=site,
                  creation_date=start_date, total_number_of_channels=3,
                  selected_number_of_channels=3)

    nw = Network(network, stations=[sta, ], total_number_of_stations=1,
                 selected_number_of_stations=1)

    inv = Inventory([nw, ], source="SPECFEM3D_GLOBE", sender="Princeton",
                    created=UTCDateTime.now())

    return inv
コード例 #10
0
def main(argv):
    inv = read_inventory("IRIS-ALL.xml")
    # if os.path.exists("IRIS-ALL.pkl"): # doesn't work on CentOS for some reason
    #     with open('IRIS-ALL.pkl', 'rb') as f:
    #         import cPickle as pkl
    #         inv = pkl.load(f)
    # else:
    #     inv = read_inventory("IRIS-ALL.xml")
    #     with open('IRIS-ALL.pkl', 'wb') as f:
    #         import pickle as pkl
    #         pkl.dump(inv, f, pkl.HIGHEST_PROTOCOL)
    sensorDict, responseDict = extract_unique_sensors_responses(inv)
    print('\nFound {0} response objects with keys: {1}'.format(len(responseDict.keys()), responseDict.keys()))

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]), np.float(stn_found[j, 3]), np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]), np.float(stn_found[0, 3]), np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

            #                Now we try to find the same station in XML file
            #                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00',
                      '2599-12-31 23:59:59']
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude, xstn_found[j][0].longitude, np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

                    # last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00',
                      '2599-12-31 23:59:59']
            filed = True
        if xml:
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) & (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1], stations=[], description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(stn_found[stn_found == ' ']) > 0:
        print
        "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    netDict = defaultdict(list)
    for k in xrange(stn_found.shape[0]):
        result = inv.select(network=stn_found[k, 1])
        if (len(result.networks)):
            net = result.networks[0]
            net.stations = []
        else:
            net = Network(code=stn_found[k, 1], stations=[], description=' ')

        # print stn_found[k, 1]

        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta = Station(code=stn_found[k, 0], creation_date=utcdatetime.UTCDateTime(stn_found[k, 6]), \
                      termination_date=utcdatetime.UTCDateTime(stn_found[k, 7]), \
                      site=Site(name=' '), \
                      latitude=np.float(stn_found[k, 2]), \
                      longitude=np.float(stn_found[k, 3]), \
                      elevation=np.float(stn_found[k, 4]))

        if (stn_found[k, 5] in responseDict.keys()):
            r = responseDict[stn_found[k, 5]]

            cha = Channel(code=stn_found[k, 5], \
                          depth=0., \
                          azimuth=0., \
                          dip=-90., \
                          location_code='', \
                          latitude=np.float(stn_found[k, 2]), \
                          longitude=np.float(stn_found[k, 3]), \
                          elevation=np.float(stn_found[k, 4]), \
                          # sensor=sensorDict[stn_found[k,5]], \
                          response=r)

            sta.channels.append(cha)

            if (type(netDict[stn_found[k, 1]]) == Network):
                netDict[stn_found[k, 1]].stations.append(sta)
            else:
                net.stations.append(sta)
                netDict[stn_found[k, 1]] = net

            #                 print 'np',stn_found[k,:]
            # end if

    our_xml = Inventory(networks=netDict.values(), source='EHB')

    print 'Writing output files..'
    for inet, net in enumerate(our_xml.networks):
        currInv = Inventory(networks=[net], source='EHB')
        currInv.write("output/station.%d.xml" % (inet), format="stationxml", validate=True)

    # our_xml.write("station.xml",format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
コード例 #11
0
                # see if station is already in the station inv dictionary
                if new_station in station_inventory_dict.keys():
                    # station inventory is already in dict get the station inventory object and append the channel info
                    sta_inv = station_inventory_dict[new_station]
                    # add channel inventory
                    sta_inv.channels.append(select_inv[0][0][0])
                else:
                    # create the station inventory
                    sta_inv = Station(code=new_station,
                                      creation_date=select_inv[0][0].creation_date,
                                      start_date=select_inv[0][0].start_date,
                                      end_date=select_inv[0][0].end_date,
                                      latitude=select_inv[0][0].latitude,
                                      longitude=select_inv[0][0].longitude,
                                      elevation=select_inv[0][0].elevation,
                                      site=Site(new_station))


                    sta_inv.channels.append(select_inv[0][0][0])

                    # append it to the station inventory dict
                    station_inventory_dict[new_station] = sta_inv



                #
                # # lookup table for channel info:
                # channel_lookup_dict = {"HZ": (),
                #                        "HN":,}

コード例 #12
0
def get_inventory():
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="US",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABCD",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha1 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN1",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)
    cha2 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN2",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=90.0,
        dip=-90.0,
        sample_rate=1)
    cha3 = Channel(
        # This is the channel code according to the SEED standard.
        code="HNZ",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)

    # Now tie it all together.
    sta.channels.append(cha1)
    sta.channels.append(cha2)
    sta.channels.append(cha3)
    net.stations.append(sta)
    inv.networks.append(net)

    return inv
コード例 #13
0
net = Network(
    code="MT",
    # A list of stations. We'll add one later.
    stations=[],
    description="Test stations.",
    # Start-and end dates are optional.
    start_date=obspy.UTCDateTime(2016, 1, 2))
inv.networks.append(net)

for row, station_df in survey_df.iterrows():
    sta = Station(code=station_df['siteID'],
                  latitude=station_df['lat'],
                  longitude=station_df['lon'],
                  elevation=station_df['nm_elev'],
                  creation_date=obspy.UTCDateTime(2016, 1, 2),
                  site=Site(name=station_df['siteID']))

    for comp in ['ex', 'ey', 'hx', 'hy', 'hz']:
        if station_df['{0}_azm'.format(comp)] is not None:
            if 'h' in comp:
                cha = Channel(code=comp.upper(),
                              location_code="",
                              latitude=station_df['lat'],
                              longitude=station_df['lon'],
                              elevation=station_df['nm_elev'],
                              depth=0,
                              azimuth=station_df['{0}_azm'.format(comp)],
                              dip=0,
                              sample_rate=station_df['sampling_rate'])
                cha.channel_number = station_df['{0}_num'.format(comp)]
                cha.sensor = Equipment(
コード例 #14
0
def main():
    chans = "EHZ,EHN,EHE"
    # Get StationXML file
    print(f"Interactive StaXML builder")
    print(f"Work in progress...some things hardwired\n\n")
    inv_name = input(f"Enter StationXML file name: ")
    if (os.path.isfile(inv_name)):
        inv = read_inventory(inv_name)
    else:
        print(f"Making new inventory: {inv_name}\n")
        inv = Inventory(networks=[], source="Weston")

    # Net code
    ques = f"Enter Network Code ({str(netc)}) :"
    net_code = str(input(ques) or netc)
    net = Network(code=net_code, stations=[])
    print(f"\n")

    # connect to NRL
    nrl = NRL()

    # Datalogger info
    ret = 0
    digi = f"REF TEK|RT 130S & 130-SMHR|1|200"
    print(f"Input NRL Digi info ( | separated, careful with spaces)....")
    print(f"E.g manufacturer| model| gain| sps\n")
    while ret == 0:
        ques = f"Enter DIGI info ({digi}) :"
        digi = str(input(ques) or digi)
        print(f"\n")
        try:
            nrl.get_datalogger_response(digi.split('|'))
            ret = 1
            print("!!!!! DATA LOGGER SUCCESS!!!\n")
        except Exception as e:
            print(f"Try again ... {e}")

    # Sensor info
    ret = 0
    sensor = f"Streckeisen,STS-1,360 seconds"
    print(f"Input NRL Sensor info ....\n")
    print(f"E.g Manufact|model|Sensitivy\n")
    print(f"Guralp|CMG-40T,30s - 100Hz|800")
    print(f"Sercel/Mark Products|L-22D|5470 Ohms|20000 Ohms")
    print(f"Streckeisen|STS-1|360 seconds")
    print(f"Nanometrics|Trillium Compact 120 (Vault, Posthole, OBS)|754 V/m/s")
    while ret == 0:
        ques = f"Enter sensor info {str(sensor)} :"
        sensor = str(input(ques) or sensor)
        try:
            nrl.get_sensor_response(sensor.split('|'))
            ret = 1
            inst_info = f"{sensor.split('|')[0]} {sensor.split('|')[1]}"
            print("Sensor success!!!!")
        except Exception as e:
            print(f"Try again ... {e}")

    print("Getting full response...")
    try:
        response = nrl.get_response(sensor_keys=sensor.split('|'),
                                    datalogger_keys=digi.split('|'))
        print("Full response success \n\n")
    except Exception as e:
        print(f"Oops .. {e}")
    #
    nstas = int(
        input(
            "Enter number of stations to add with same sensor/digitizer (default 1):"
        ) or 1)
    for i in range(0, nstas):
        ques = "Station code (" + str(scode) + ") :"
        sta_code = str(input(ques) or scode)

        ques = "Station latitude (" + str(geolat) + ") :"
        sta_lat = float(input(ques) or geolat)

        ques = "Station longitude (" + str(geolon) + ") :"
        sta_lon = float(input(ques) or geolat)

        ques = "Station elev(" + str(geoelev) + ") :"
        sta_elev = float(input(ques) or geoelev)

        ques = "Station ondate (" + str(date) + ") :"
        sta_ondate = str(input(ques) or date)

        ques = "Station offdate (" + str(date) + ") :"
        sta_offdate = str(input(ques) or date)

        ques = "Station long name (" + str(longname) + ") :"
        sta_sitename = str(input(ques) or longname)

        sta = Station(code=sta_code,
                      latitude=sta_lat,
                      longitude=sta_lon,
                      elevation=sta_elev,
                      creation_date=UTCDateTime(sta_ondate),
                      site=Site(name=sta_sitename))
        # add station to network
        net.stations.append(sta)

        # Default chan info
        coords = {
            'latitude': sta_lat,
            'longitude': sta_lon,
            'elevation': sta_elev,
            'depth': 0.0,
            'sample_rate': sps
        }

        n = -1
        ques = f"Enter channel names, comma separated ({chans}) :"
        chans = str(input(ques) or chans)
        for j in chans.split(','):
            n += 1
            chantmp = j
            print("Doing channel ", chantmp)
            aztmp = azims[n]
            diptmp = dips[n]
            loc = locs[n]
            for k in coords.keys():
                ques = str(chantmp) + " enter " + k + "(" + str(
                    coords[k]) + "):"
                coords[k] = float(input(ques) or coords[k])

            chan = Channel(code=chantmp,
                           location_code=loc,
                           latitude=coords['latitude'],
                           longitude=coords['longitude'],
                           elevation=coords['elevation'],
                           depth=coords['depth'],
                           azimuth=aztmp,
                           dip=diptmp,
                           sample_rate=coords['sample_rate'],
                           sensor=Equipment(description=inst_info))
            chan.response = response
            sta.channels.append(chan)

    inv.networks.append(net)
    inv.write(inv_name, format="STATIONXML")
コード例 #15
0
    # A list of stations. We'll add one later.
    stations=[],
    description="Test stations.",
    # Start-and end dates are optional.
    start_date=obspy.UTCDateTime(2016, 1, 2),
)
inv.networks.append(net)

for row, station_df in survey_df.iterrows():
    sta = Station(
        code=station_df["siteID"],
        latitude=station_df["lat"],
        longitude=station_df["lon"],
        elevation=station_df["nm_elev"],
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name=station_df["siteID"]),
    )

    for comp in ["ex", "ey", "hx", "hy", "hz"]:
        if station_df["{0}_azm".format(comp)] is not None:
            if "h" in comp:
                cha = Channel(
                    code=comp.upper(),
                    location_code="",
                    latitude=station_df["lat"],
                    longitude=station_df["lon"],
                    elevation=station_df["nm_elev"],
                    depth=0,
                    azimuth=station_df["{0}_azm".format(comp)],
                    dip=0,
                    sample_rate=station_df["sampling_rate"],
コード例 #16
0
def create_new_skeleton_inventory_file(path2xmlfile):
    """
    write a NEW skeleton inventory xml file
    :param path2xmlfile: path to a new xml file.
    :return:
    """
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="XX",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=obspy.UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABC",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha = Channel(
        # This is the channel code according to the SEED standard.
        code="HHZ",
        # This is the location code according to the SEED standard.
        location_code="",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=200)

    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    response = nrl.get_response(  # doctest: +SKIP
        sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
        datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    inv.write(path2xmlfile, format="stationxml", validate=True)
コード例 #17
0
    def trace_to_inventory(self, trace):
        # if sac files are opened, it's useful to extract inventory from their streams so that we can populate the
        # stations tabs and the location widget
        new_inventory = None

        # The next bit is modified from the obspy webpage on building a stationxml site from scratch
        # https://docs.obspy.org/tutorial/code_snippets/stationxml_file_from_scratch.html
        #
        # We'll first create all the various objects. These strongly follow the
        # hierarchy of StationXML files.
        # initialize the lat/lon/ele
        lat = 0.0
        lon = 0.0
        ele = -1.0

        _network = trace.stats['network']
        _station = trace.stats['station']
        _channel = trace.stats['channel']
        _location = trace.stats['location']

        # if the trace is from a sac file, the sac header might have some inventory information
        if trace.stats['_format'] == 'SAC':

            if 'stla' in trace.stats['sac']:
                lat = trace.stats['sac']['stla']

            if 'stlo' in trace.stats['sac']:
                lon = trace.stats['sac']['stlo']

            if 'stel' in trace.stats['sac']:
                ele = trace.stats['sac']['stel']
            else:
                ele = 0.333

        if _network == 'LARSA' and _station == '121':
            if _channel == 'ai0':
                lat = 35.8492497
                lon = -106.2705465
            elif _channel == 'ai1':
                lat = 35.84924682
                lon = -106.2705505
            elif _channel == 'ai2':
                lat = 35.84925165
                lon = -106.2705516

        if lat == 0.0 or lon == 0.0 or ele < 0:
            if self.fill_sta_info_dialog.exec_(_network, _station, _location,
                                               _channel, lat, lon, ele):

                edited_values = self.fill_sta_info_dialog.get_values()

                lat = edited_values['lat']
                lon = edited_values['lon']
                ele = edited_values['ele']

                _network = edited_values['net']
                _station = edited_values['sta']
                _location = edited_values['loc']
                _channel = edited_values['cha']

                # (re)populate sac headers where possible
                if trace.stats['_format'] == 'SAC':
                    trace.stats['sac']['stla'] = lat
                    trace.stats['sac']['stlo'] = lon
                    trace.stats['sac']['stel'] = ele
                    trace.stats['sac']['knetwk'] = _network
                    trace.stats['sac']['kstnm'] = _station
                # (re)populate trace stats where possible
                trace.stats['network'] = _network
                trace.stats['station'] = _station
                trace.stats['location'] = _location
                trace.stats['channel'] = _channel
        try:
            new_inventory = Inventory(
                # We'll add networks later.
                networks=[],
                # The source should be the id whoever create the file.
                source="InfraView")

            net = Network(
                # This is the network code according to the SEED standard.
                code=_network,
                # A list of stations. We'll add one later.
                stations=[],
                # Description isn't something that's in the trace stats or SAC header, so lets set it to the network cod
                description=_network,
                # Start-and end dates are optional.

                # Start and end dates for the network are not stored in the sac header so lets set it to 1/1/1900
                start_date=UTCDateTime(1900, 1, 1))

            sta = Station(
                # This is the station code according to the SEED standard.
                code=_station,
                latitude=lat,
                longitude=lon,
                elevation=ele,
                # Creation_date is not saved in the trace stats or sac header
                creation_date=UTCDateTime(1900, 1, 1),
                # Site name is not in the trace stats or sac header, so set it to the site code
                site=Site(name=_station))

            # This is the channel code according to the SEED standard.
            cha = Channel(
                code=_channel,
                # This is the location code according to the SEED standard.
                location_code=_location,
                # Note that these coordinates can differ from the station coordinates.
                latitude=lat,
                longitude=lon,
                elevation=ele,
                depth=0.0)

            # Now tie it all together.
            # cha.response = response
            sta.channels.append(cha)
            net.stations.append(sta)
            new_inventory.networks.append(net)

            return new_inventory

        except ValueError:
            bad_values = ""
            if lon < -180 or lon > 180:
                bad_values = bad_values + "\tlon = " + str(lon) + "\n"
            if lat < -90 or lat > 90:
                bad_values = bad_values + "\tlat = " + str(lat)
            self.errorPopup("There seems to be a value error in " + _network +
                            "." + _station + "." + _channel +
                            "\nPossible bad value(s) are:\n" + bad_values)
コード例 #18
0
    def getInventory(self):
        """
        Extract an ObsPy inventory object from a Stream read in by gmprocess
        tools.
        """
        networks = [trace.stats.network for trace in self]
        if len(set(networks)) > 1:
            raise Exception(
                "Input stream has stations from multiple networks.")

        # We'll first create all the various objects. These strongly follow the
        # hierarchy of StationXML files.
        source = ''
        if 'standard' in self[0].stats and 'source' in self[0].stats.standard:
            source = self[0].stats.standard.source
        inv = Inventory(
            # We'll add networks later.
            networks=[],
            # The source should be the id whoever create the file.
            source=source)

        net = Network(
            # This is the network code according to the SEED standard.
            code=networks[0],
            # A list of stations. We'll add one later.
            stations=[],
            description="source",
            # Start-and end dates are optional.
        )
        channels = []
        for trace in self:
            logging.debug('trace: %s' % trace)
            channel = _channel_from_stats(trace.stats)
            channels.append(channel)

        subdict = {}
        for k in UNUSED_STANDARD_PARAMS:
            if k in self[0].stats.standard:
                subdict[k] = self[0].stats.standard[k]

        format_specific = {}
        if 'format_specific' in self[0].stats:
            format_specific = dict(self[0].stats.format_specific)

        big_dict = {'standard': subdict,
                    'format_specific': format_specific}
        try:
            jsonstr = json.dumps(big_dict)
        except Exception as e:
            raise GMProcessException('Exception in json.dumps: %s' % e)
        sta = Station(
            # This is the station code according to the SEED standard.
            code=self[0].stats.station,
            latitude=self[0].stats.coordinates.latitude,
            elevation=self[0].stats.coordinates.elevation,
            longitude=self[0].stats.coordinates.longitude,
            channels=channels,
            site=Site(name=self[0].stats.standard.station_name),
            description=jsonstr,
            creation_date=UTCDateTime(1970, 1, 1),  # this is bogus
            total_number_of_channels=len(self))

        net.stations.append(sta)
        inv.networks.append(net)

        return inv
コード例 #19
0
def getStation(stationBlock, units, transFuncs):
    ## Should probably do a check up here to see that the order given in block is consistent ##
    for entry in stationBlock:
        if entry.name == 'Station Identifier':
            #            print 'NewStation!',entry.station_call_letters
            staDict = {
                'code': entry.station_call_letters,
                'latitude': entry.latitude,
                'longitude': entry.longitude,
                'elevation': entry.elevation,
                'channels': [],
                'site': Site(entry.site_name),
                'creation_date':
                UTCDateTime(entry.start_effective_date),  # Allows for save
                'start_date': UTCDateTime(entry.start_effective_date),
                'end_date': UTCDateTime(entry.end_effective_date)
            }
            staNetCode = entry.network_code
        # If found a new channel, reset the stages
        elif entry.name == 'Channel Identifier':
            #            print 'NewChannel!',entry.channel_identifier
            stages = []
            chaDict = {
                'code': entry.channel_identifier,
                'location_code': entry.location_identifier,
                'latitude': entry.latitude,
                'longitude': entry.longitude,
                'elevation': entry.elevation,
                'depth': entry.local_depth,
                'sample_rate': entry.sample_rate,
                'start_date': UTCDateTime(entry.start_date),
                'end_date': UTCDateTime(entry.end_date),
                'azimuth': entry.azimuth,
                'dip': entry.dip
            }
        #code, location_code, latitude, longitude, elevation, depth
        # If on a new stage, set up the dictionary again
        # ...paz stage
        elif entry.name == 'Response Poles and Zeros':
            # Get units
            stageReqs = {}
            #            print entry.name,entry.stage_sequence_number
            #            print entry
            #            quit()
            stageReqs['input_units'] = units[entry.stage_signal_input_units]
            stageReqs['output_units'] = units[entry.stage_signal_output_units]
            # Collect the poles and zeros
            lastType = 'paz'
            if entry.number_of_complex_zeros == 0:
                zeros = np.array([], dtype=float)
            else:
                zeros = np.array(entry.real_zero, dtype=float) + np.array(
                    entry.imaginary_zero, dtype=float) * 1j
            if entry.number_of_complex_poles == 0:
                poles = np.array([], dtype=float)
            else:
                poles = np.array(entry.real_pole, dtype=float) + np.array(
                    entry.imaginary_pole, dtype=float) * 1j
            # Form the paz response dictionary (also ensure arrays are 1D)
            pazDict = {
                'pz_transfer_function_type':
                transFuncs[entry.transfer_function_types],
                'normalization_factor':
                entry.A0_normalization_factor,
                'normalization_frequency':
                entry.normalization_frequency,
                'zeros':
                setArrDim(zeros),
                'poles':
                setArrDim(poles)
            }
        # ...coeff stage
        elif entry.name == 'Response Coefficients':
            # Get units
            stageReqs = {}
            #            print entry.name,entry.stage_sequence_number
            stageReqs['input_units'] = units[entry.signal_input_units]
            stageReqs['output_units'] = units[entry.signal_output_units]
            # Collect the coefficients
            lastType = 'coef'
            if entry.number_of_denominators == 0:
                denom = np.array([], dtype=float)
                denomErr = np.array([], dtype=float)
            else:
                denom = np.array(entry.denominator_coefficient, dtype=float)
                denomErr = np.array(entry.denominator_error, dtype=float)
            if entry.number_of_numerators == 0:
                numer = np.array([], dtype=float)
                numerErr = np.array([], dtype=float)
            else:
                numer = np.array(entry.numerator_coefficient, dtype=float)
                numerErr = np.array(entry.numerator_error, dtype=float)
            # Convert these arrays into lists of numbers which have uncertainty (also ensure arrays are 1D)
            denomArr = genArrWithUncertainty(setArrDim(denom),
                                             setArrDim(denomErr))
            numerArr = genArrWithUncertainty(setArrDim(numer),
                                             setArrDim(numerErr))
            # Form the coeefficient response dictionary
            coefDict = {
                'cf_transfer_function_type': transFuncs[entry.response_type],
                'numerator': numerArr,
                'denominator': denomArr
            }
        # Get the decimation sampling info
        elif entry.name == 'Decimation':
            #            print entry.name,entry.stage_sequence_number
            stageReqs['decimation_input_sample_rate'] = Frequency(
                entry.input_sample_rate)
            stageReqs['decimation_factor'] = entry.decimation_factor
            stageReqs['decimation_offset'] = entry.decimation_offset
            stageReqs['decimation_delay'] = FloatWithUncertaintiesAndUnit(
                entry.estimated_delay)
            stageReqs['decimation_correction'] = FloatWithUncertaintiesAndUnit(
                entry.correction_applied)
        # Get the stage sensitivity
        elif entry.name == 'Channel Sensitivity Gain':
            #            print entry.name,entry.stage_sequence_number
            if entry.stage_sequence_number != 0:
                stageReqs[
                    'stage_sequence_number'] = entry.stage_sequence_number
                stageReqs['stage_gain'] = entry.sensitivity_gain
                stageReqs['stage_gain_frequency'] = entry.frequency
                # See what type of stage this was
                if lastType == 'paz':
                    pazDict.update(stageReqs)
                    stages.append(PolesZerosResponseStage(**pazDict))
                else:
                    coefDict.update(stageReqs)
                    stages.append(CoefficientsTypeResponseStage(**coefDict))
            # If on the last stage, send off the collected stage info
            else:
                if len(stages) > 0:
                    instrSens = InstrumentSensitivity(entry.sensitivity_gain,
                                                      entry.frequency,
                                                      stages[0].input_units,
                                                      stages[-1].output_units)
                    # Finalize the channel dictionary, and append this channel to the station dictionary
                    chaResp = Response(response_stages=stages,
                                       instrument_sensitivity=instrSens)
                    chaDict['response'] = chaResp
                staDict['channels'].append(Channel(**chaDict))
    # Return the stations to the list of stations (also track the network code)
    return Station(**staDict), staNetCode
コード例 #20
0
    # This is the network code according to the SEED standard.
    code="XX",
    # A list of stations. We'll add one later.
    stations=[],
    description="A test stations.",
    # Start-and end dates are optional.
    start_date=obspy.UTCDateTime(2016, 1, 2))

sta = Station(
    # This is the station code according to the SEED standard.
    code="ABC",
    latitude=1.0,
    longitude=2.0,
    elevation=345.0,
    creation_date=obspy.UTCDateTime(2016, 1, 2),
    site=Site(name="First station"))

cha = Channel(
    # This is the channel code according to the SEED standard.
    code="HHZ",
    # This is the location code according to the SEED standard.
    location_code="",
    # Note that these coordinates can differ from the station coordinates.
    latitude=1.0,
    longitude=2.0,
    elevation=345.0,
    depth=10.0,
    azimuth=0.0,
    dip=-90.0,
    sample_rate=200)
コード例 #21
0
def main(argv):
    '''@package isc2stnxml
       It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code.
       When proper network code can not be identified the program just guess it, sorry...
    '''
    inv = read_inventory("IRIS-ALL.xml")

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    our_xml = Inventory(networks=[], source='EHB')

    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]),
                                             np.float(stn_found[j, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]),
                                             np.float(stn_found[0, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

#                Now we try to find the same station in XML file
#                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude,
                                             xstn_found[j][0].longitude,
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

# last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            filed = True
        if xml:
            #our_xml.networks.append(record)
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) &
                                (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1],
                                  stations=[],
                                  description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(
            stn_found[stn_found == ' ']) > 0:
        print "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    for k in xrange(stn_found.shape[0]):

        net = Network(code=stn_found[k, 1], stations=[], description=' ')
        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \
        termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \
        site=Site(name=' '), \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        cha=Channel(code=stn_found[k,5], \
        depth=0., \
        azimuth=0., \
        dip=-90., \
        location_code='', \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        sta.channels.append(cha)
        net.stations.append(sta)
        our_xml.networks.append(net)


#             print 'np',stn_found[k,:]

    our_xml.write("station.xml", format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
コード例 #22
0
    def test_reading_station_file(self):
        """
        Test reading a file at the station level.
        """
        # Manually create an expected Inventory object.
        expected_inv = Inventory(
            source=None,
            networks=[
                Network(
                    code="TA",
                    stations=[
                        Station(
                            code="A04A",
                            latitude=48.7197,
                            longitude=-122.707,
                            elevation=23.0,
                            site=Site(name="Legoe Bay, Lummi Island, WA, USA"),
                            start_date=obspy.UTCDateTime(
                                "2004-09-19T00:00:00"),
                            end_date=obspy.UTCDateTime("2008-02-19T23:59:59")),
                        Station(
                            code="A04D",
                            latitude=48.7201,
                            longitude=-122.7063,
                            elevation=13.0,
                            site=Site(name="Lummi Island, WA, USA"),
                            start_date=obspy.UTCDateTime(
                                "2010-08-18T00:00:00"),
                            end_date=obspy.UTCDateTime("2599-12-31T23:59:59"))
                    ]),
                Network(
                    code="TR",
                    stations=[
                        Station(
                            code="ALNG",
                            latitude=10.1814,
                            longitude=-61.6883,
                            elevation=10.0,
                            site=Site(name="Trinidad, Point Fortin"),
                            start_date=obspy.UTCDateTime(
                                "2000-01-01T00:00:00"),
                            end_date=obspy.UTCDateTime("2599-12-31T23:59:59"))
                    ])
            ])

        # Read from a filename.
        filename = os.path.join(self.data_dir, "station_level_fdsn.txt")
        inv = read_fdsn_station_text_file(filename)
        inv_obs = obspy.read_inventory(filename)

        # Copy creation date as it will be slightly different otherwise.
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in text mode.
        with open(filename, "rt", encoding="utf8") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in binary mode.
        with open(filename, "rb") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from StringIO.
        with open(filename, "rt", encoding="utf8") as fh:
            with io.StringIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from BytesIO.
        with open(filename, "rb") as fh:
            with io.BytesIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)
コード例 #23
0
def read_fdsn_station_text_file(path_or_file_object):
    """
    Function reading a FDSN station text file to an inventory object.

    :param path_or_file_object: File name or file like object.
    """
    def _read(obj):
        r = unicode_csv_reader(obj, delimiter=native_str("|"))
        header = next(r)
        header[0] = header[0].lstrip("#")
        header = [_i.strip().lower() for _i in header]
        # IRIS currently has a wrong header name. Just map it.
        header = [
            _i.replace("instrument", "sensordescription") for _i in header
        ]

        all_lines = []
        for line in r:
            # Skip comment lines.
            if line[0].startswith("#"):
                continue
            all_lines.append([_i.strip() for _i in line])
        return {"header": tuple(header), "content": all_lines}

    # Enable reading from files and buffers opened in binary mode.
    if (hasattr(path_or_file_object, "mode") and
            "b" in path_or_file_object.mode) or \
            isinstance(path_or_file_object, io.BytesIO):
        buf = io.StringIO(path_or_file_object.read().decode("utf-8"))
        buf.seek(0, 0)
        path_or_file_object = buf

    if hasattr(path_or_file_object, "read"):
        content = _read(path_or_file_object)
    else:
        with open(path_or_file_object, "rt", newline="",
                  encoding="utf8") as fh:
            content = _read(fh)

    # Figure out the type.
    if content["header"] == network_components:
        level = "network"
        filetypes = network_types
    elif content["header"] == station_components:
        level = "station"
        filetypes = station_types
    elif content["header"] == channel_components:
        level = "channel"
        filetypes = channel_types
    else:
        raise ValueError("Unknown type of header.")

    content = content["content"]
    converted_content = []
    # Convert all types.
    for line in content:
        converted_content.append(
            [v_type(value) for value, v_type in zip(line, filetypes)])

    # Now convert to an inventory object.
    inv = Inventory(networks=[], source=None)

    if level == "network":
        for net in converted_content:
            network = Network(code=net[0],
                              description=net[1],
                              start_date=net[2],
                              end_date=net[3],
                              total_number_of_stations=net[4])
            inv.networks.append(network)
    elif level == "station":
        networks = collections.OrderedDict()
        for sta in converted_content:
            site = Site(name=sta[5])
            station = Station(code=sta[1],
                              latitude=sta[2],
                              longitude=sta[3],
                              elevation=sta[4],
                              site=site,
                              start_date=sta[6],
                              end_date=sta[7])
            if sta[0] not in networks:
                networks[sta[0]] = []
            networks[sta[0]].append(station)
        for network_code, stations in networks.items():
            net = Network(code=network_code, stations=stations)
            inv.networks.append(net)
    elif level == "channel":
        networks = collections.OrderedDict()
        stations = collections.OrderedDict()

        for channel in converted_content:
            net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \
                scale_freq, scale_units, s_r, st, et = channel

            if net not in networks:
                networks[net] = Network(code=net)

            if (net, sta) not in stations:
                station = Station(code=sta,
                                  latitude=lat,
                                  longitude=lng,
                                  elevation=ele)
                networks[net].stations.append(station)
                stations[(net, sta)] = station

            sensor = Equipment(type=inst)
            if scale is not None and scale_freq is not None:
                resp = Response(instrument_sensitivity=InstrumentSensitivity(
                    value=scale,
                    frequency=scale_freq,
                    input_units=scale_units,
                    output_units=None))
            else:
                resp = None
            try:
                channel = Channel(code=chan,
                                  location_code=loc,
                                  latitude=lat,
                                  longitude=lng,
                                  elevation=ele,
                                  depth=dep,
                                  azimuth=azi,
                                  dip=dip,
                                  sensor=sensor,
                                  sample_rate=s_r,
                                  start_date=st,
                                  end_date=et,
                                  response=resp)
            except Exception as e:
                warnings.warn(
                    "Failed to parse channel %s.%s.%s.%s due to: %s" %
                    (net, sta, loc, chan, str(e)), UserWarning)
                continue
            stations[(net, sta)].channels.append(channel)
        inv.networks.extend(list(networks.values()))
    else:
        # Cannot really happen - just a safety measure.
        raise NotImplementedError("Unknown level: %s" % str(level))
    return inv
コード例 #24
0
    def test_reading_unicode_file(self):
        """
        Tests reading a file with non ASCII characters.
        """
        # Manually create an expected Inventory object.
        expected_inv = Inventory(
            source=None,
            networks=[
                Network(
                    code="PR",
                    stations=[
                        Station(
                            code="CTN1",
                            latitude=18.43718,
                            longitude=-67.1303,
                            elevation=10.0,
                            site=Site(name="CATA¿O DEFENSA CIVIL"),
                            start_date=obspy.UTCDateTime(
                                "2004-01-27T00:00:00"),
                            end_date=obspy.UTCDateTime("2599-12-31T23:59:59"))
                    ])
            ])

        # Read from a filename.
        filename = os.path.join(self.data_dir, "unicode_example_fdsn.txt")
        inv = read_fdsn_station_text_file(filename)
        inv_obs = obspy.read_inventory(filename)

        # Copy creation date as it will be slightly different otherwise.
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in text mode.
        with open(filename, "rt", encoding="utf8") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from open file in binary mode.
        with open(filename, "rb") as fh:
            inv = read_fdsn_station_text_file(fh)
            fh.seek(0, 0)
            inv_obs = obspy.read_inventory(fh)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from StringIO.
        with open(filename, "rt", encoding="utf8") as fh:
            with io.StringIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)

        # Read from BytesIO.
        with open(filename, "rb") as fh:
            with io.BytesIO(fh.read()) as buf:
                buf.seek(0, 0)
                inv = read_fdsn_station_text_file(buf)
                buf.seek(0, 0)
                inv_obs = obspy.read_inventory(buf)
        inv.created = expected_inv.created
        inv_obs.created = expected_inv.created
        self.assertEqual(inv, expected_inv)
        self.assertEqual(inv_obs, expected_inv)
コード例 #25
0
def stats2inv(stats, resp=None, filexml=None, locs=None):

    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(networks=[], source="japan_from_resp")

    if locs is None:
        net = Network(
            # This is the network code according to the SEED standard.
            code=stats.network,
            # A list of stations. We'll add one later.
            stations=[],
            description="Marine created from SAC and resp files",
            # Start-and end dates are optional.
            start_date=stats.starttime)

        sta = Station(
            # This is the station code according to the SEED standard.
            code=stats.station,
            latitude=stats.sac["stla"],
            longitude=stats.sac["stlo"],
            elevation=stats.sac["stel"],
            creation_date=stats.starttime,
            site=Site(name="First station"))

        cha = Channel(
            # This is the channel code according to the SEED standard.
            code=stats.channel,
            # This is the location code according to the SEED standard.
            location_code=stats.location,
            # Note that these coordinates can differ from the station coordinates.
            latitude=stats.sac["stla"],
            longitude=stats.sac["stlo"],
            elevation=stats.sac["stel"],
            depth=-stats.sac["stel"],
            azimuth=stats.sac["cmpaz"],
            dip=stats.sac["cmpinc"],
            sample_rate=stats.sampling_rate)

    else:
        ista = locs[locs['station'] == stats.station].index.values.astype(
            'int64')[0]

        net = Network(
            # This is the network code according to the SEED standard.
            code=locs.iloc[ista]["network"],
            # A list of stations. We'll add one later.
            stations=[],
            description="Marine created from SAC and resp files",
            # Start-and end dates are optional.
            start_date=stats.starttime)

        sta = Station(
            # This is the station code according to the SEED standard.
            code=locs.iloc[ista]["station"],
            latitude=locs.iloc[ista]["latitude"],
            longitude=locs.iloc[ista]["longitude"],
            elevation=locs.iloc[ista]["elevation"],
            creation_date=stats.starttime,
            site=Site(name="First station"))
        cha = Channel(
            # This is the channel code according to the SEED standard.
            code=stats.channel,
            # This is the location code according to the SEED standard.
            location_code=stats.location,
            # Note that these coordinates can differ from the station coordinates.
            latitude=locs.iloc[ista]["latitude"],
            longitude=locs.iloc[ista]["longitude"],
            elevation=locs.iloc[ista]["elevation"],
            depth=-locs.iloc[ista]["elevation"],
            azimuth=0,
            dip=0,
            sample_rate=stats.sampling_rate)

    response = obspy.core.inventory.response.Response()
    if resp is not None:
        print('i dont have the response')
    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    # nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    #response = nrl.get_response( # doctest: +SKIP
    #    sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
    #    datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    if filexml is not None:
        inv.write(filexml, format="stationxml", validate=True)

    return inv
コード例 #26
0
            # Write the station to the Inventory
            _end_date = Stations[station]["To"]
            if _end_date == "None":
                _end_date = literal_eval(_end_date)
            else:
                _end_date = obspy.UTCDateTime(_end_date)

            _station = Station(
                code=station,
                latitude=Stations[station]["_latitude"],
                longitude=Stations[station]["_longitude"],
                elevation=Stations[station]["_elevation"],
                start_date=obspy.UTCDateTime(Stations[station]["From"]),
                creation_date=obspy.UTCDateTime(Stations[station]["From"]),
                end_date=_end_date,
                site=Site(name=Stations[station]["_site"]),
                geology=Stations[station]["_geology"],
                #                    equipments = obspy.core.inventory.Equipment(description = Stations[station]["_equipments"],
                description=Stations[station]["_description"])
            _network.stations.append(_station)

        except TypeError:
            print('That metadata has not been assigned yet.')

        try:
            Epochs = bank['Networks'][network]['Stations'][station]['Epoch']
            for epoch in Epochs.keys():
                try:
                    Channels = bank['Networks'][network]['Stations'][station][
                        'Epoch'][epoch]['Channels']
                    for channel in Channels.keys():