def test_read_nlloc_with_pick_seed_id_lookup(self): # create some bogus metadata for lookup cha = Channel('HHZ', '00', 0, 0, 0, 0) sta = Station('HM02', 0, 0, 0, channels=[cha]) cha = Channel('HHZ', '10', 0, 0, 0, 0) sta2 = Station('YYYY', 0, 0, 0, channels=[cha]) net = Network('XX', stations=[sta, sta2]) # second network with non matching data cha = Channel('HHZ', '00', 0, 0, 0, 0) sta = Station('ABCD', 0, 0, 0, channels=[cha]) cha = Channel('HHZ', '10', 0, 0, 0, 0) sta2 = Station('EFGH', 0, 0, 0, channels=[cha]) net2 = Network('YY', stations=[sta, sta2]) inv = Inventory(networks=[net, net2], source='') filename = get_example_file("nlloc_custom.hyp") # we get some warnings since we only provide sufficient metadata for # one pick with warnings.catch_warnings(): warnings.simplefilter("ignore") cat = read_events(filename, format="NLLOC_HYP", inventory=inv) self.assertEqual(len(cat), 1) for pick in cat[0].picks: wid = pick.waveform_id if wid.station_code == 'HM02': self.assertEqual(wid.network_code, 'XX') self.assertEqual(wid.location_code, '') else: self.assertEqual(wid.network_code, '') self.assertEqual(wid.location_code, None)
def test_add(self): """ Test shallow copies for inventory addition """ inv1 = read_inventory() inv2 = read_inventory() # __add__ creates two shallow copies inv_sum = inv1 + inv2 assert {id(net) for net in inv_sum} == \ {id(net) for net in inv1} | {id(net) for net in inv2} # __iadd__ creates a shallow copy of other and keeps self ids1 = {id(net) for net in inv1} inv1 += inv2 assert {id(net) for net in inv1} == ids1 | {id(net) for net in inv2} # __add__ with a network appends the network to a shallow copy of # the inventory net1 = Network('N1') inv_sum = inv1 + net1 assert {id(net) for net in inv_sum} == \ {id(net) for net in inv1} | {id(net1)} # __iadd__ with a network appends the network to the inventory net1 = Network('N1') ids1 = {id(net) for net in inv1} inv1 += net1 assert {id(net) for net in inv1} == ids1 | {id(net1)}
def test_get_response(self): response_n1_s1 = Response('RESPN1S1') response_n1_s2 = Response('RESPN1S2') response_n2_s1 = Response('RESPN2S1') channels_n1_s1 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s1)] channels_n1_s2 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s2)] channels_n2_s1 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n2_s1)] stations_1 = [Station(code='N1S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s1), Station(code='N1S2', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s2)] stations_2 = [Station(code='N2S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n2_s1)] networks = [Network('N1', stations=stations_1), Network('N2', stations=stations_2)] inv = Inventory(networks=networks, source='TEST') response = inv.get_response('N1.N1S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s1) response = inv.get_response('N1.N1S2..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s2) response = inv.get_response('N2.N2S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n2_s1)
def test_get_orientation(self): """ Test extracting orientation """ expected = {u'azimuth': 90.0, u'dip': 0.0} channels = [ Channel(code='EHZ', location_code='', start_date=UTCDateTime('2007-01-01'), latitude=47.737166999999999, longitude=12.795714, elevation=860.0, depth=0.0, azimuth=90.0, dip=0.0) ] stations = [ Station(code='RJOB', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels) ] networks = [Network('BW', stations=stations)] inv = Inventory(networks=networks, source='TEST') # 1 orientation = inv.get_orientation('BW.RJOB..EHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(sorted(orientation.items()), sorted(expected.items())) # 2 - without datetime orientation = inv.get_orientation('BW.RJOB..EHZ') self.assertEqual(sorted(orientation.items()), sorted(expected.items())) # 3 - unknown SEED ID should raise exception self.assertRaises(Exception, inv.get_orientation, 'BW.RJOB..XXX')
def set_network(self): self.network_raw = self.get_network() self.network = Network(code=self.network_raw["code"], stations=[], description=self.network_raw["description"], start_date=obspy.UTCDateTime( self.network_raw["start_date"]))
def dataframe_to_network(netcode, network_df, instrument_register, progressor=None): """ Convert Pandas dataframe with unique network code to obspy Network object. :param netcode: Network code :type netcode: str :param network_df: Dataframe containing records for a single network code. :type network_df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA :param instrument_register: Dictionary of nominal instrument responses indexed by channel code, defaults to None :param instrument_register: dict of {str, Instrument(obspy.core.inventory.util.Equipment, obspy.core.inventory.response.Response)}, optional :param progressor: Progress bar functor to receive progress updates, defaults to None :param progressor: Callable object receiving incremental update on progress, optional :return: Network object containing the network information from the dataframe :rtype: obspy.core.inventory.network.Network """ netcodes = network_df['NetworkCode'].unique() assert len(netcodes) == 1, "Non-unique network codes in network_df: {}".format(netcodes) assert netcodes[0] == netcode, "Network code mismatch, check netcode {} != {}".format(netcode, netcodes[0]) net = Network(netcode, stations=[], description=' ') for statcode, ch_data in network_df.groupby('StationCode'): station = _dataframe_to_station(statcode, ch_data, instrument_register) net.stations.append(station) if progressor: progressor(len(ch_data)) return net
def create_inv(network_code, station_code, location_code, channel_code, isr, sf, u): writethisinv = Inventory( networks=[ Network(code=network_code, start_date=obspy.UTCDateTime('2007-01-01'), stations=[ Station( code=station_code, latitude=1, longitude=2, elevation=3, creation_date=obspy.UTCDateTime('2007-01-01'), site=Site(name='site'), channels=[ Channel( code=channel_code, location_code=location_code, start_date=obspy.UTCDateTime('2007-01-01'), latitude=1, longitude=2, elevation=3, depth=4, response=create_response( inputsamplerate=isr, scaling_factor=sf, units=u)) ]) ]) ], source= 'Joseph Farrugia, Ocean Networks Canada', # The source should be the id whoever create the file. created=obspy.UTCDateTime(datetime.today())) return writethisinv
def test_get_coordinates(self): """ Test extracting coordinates """ expected = {u'latitude': 47.737166999999999, u'longitude': 12.795714, u'elevation': 860.0, u'local_depth': 0.0} channels = [Channel(code='EHZ', location_code='', start_date=UTCDateTime('2007-01-01'), latitude=47.737166999999999, longitude=12.795714, elevation=860.0, depth=0.0)] stations = [Station(code='RJOB', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels)] network = Network('BW', stations=stations) # 1 coordinates = network.get_coordinates('BW.RJOB..EHZ', UTCDateTime('2010-01-01T12:00')) assert sorted(coordinates.items()) == sorted(expected.items()) # 2 - without datetime coordinates = network.get_coordinates('BW.RJOB..EHZ') assert sorted(coordinates.items()) == sorted(expected.items()) # 3 - unknown SEED ID should raise exception with pytest.raises(Exception): network.get_coordinates('BW.RJOB..XXX')
def do_xml(): nrl = NRL('http://ds.iris.edu/NRL/') datalogger_keys = ['REF TEK', 'RT 130 & 130-SMA', '1', '40'] sensor_keys = ['Streckeisen', 'STS-2', '1500', '3 - installed 04/97 to present'] response = nrl.get_response(sensor_keys=sensor_keys, datalogger_keys=datalogger_keys) channel = Channel(code='BHZ', location_code='10', # required latitude=0, # required longitude=0, # required elevation=0.0, # required depth=0., # required ) channel.response = response station = Station(code='ABCD', latitude=0, longitude=0, elevation=0.0, creation_date=UTCDateTime(1970, 1, 1), # required site=Site(name='Fake Site'), # required channels=[channel], ) network = Network(code='XX', stations=[station]) inventory = Inventory(networks=[network], source="demo") inventory.write("Test.xml", format="stationxml", validate=True)
def test_get_response(self): responseN1S1 = Response('RESPN1S1') responseN1S2 = Response('RESPN1S2') responseN2S1 = Response('RESPN2S1') channelsN1S1 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=responseN1S1) ] channelsN1S2 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=responseN1S2) ] channelsN2S1 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=responseN2S1) ] stations1 = [ Station(code='N1S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channelsN1S1), Station(code='N1S2', latitude=0.0, longitude=0.0, elevation=0.0, channels=channelsN1S2), Station(code='N2S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channelsN2S1) ] network = Network('N1', stations=stations1) response = network.get_response('N1.N1S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, responseN1S1) response = network.get_response('N1.N1S2..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, responseN1S2) response = network.get_response('N1.N2S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, responseN2S1)
def sac2asdf_hinet(sac_directory, cmt_path, output_path): with pyasdf.ASDFDataSet(output_path, mode="w", compression=None, mpi=False) as ds: # read in eventxml event_xml = obspy.read_events(cmt_path) # add eventxml to ds ds.add_quakeml(event_xml) event = ds.events[0] # read in waves files = sorted(glob(join(sac_directory, "*"))) inv = Inventory() # pylint: disable=no-value-for-parameter net_inv = Network(code="N", stations=[]) # * we should sort files based on the station names sta_collection = {} # * here we add the waveforms along with the process of building the inventory for each_file in files: tr = obspy.read(each_file)[0] # here we need to modify some stats' values net_sta = tr.stats.station net, sta = net_sta.split(".") tr.stats.network = net tr.stats.station = sta # we change the channel names U->HHZ N->HHN E->HHE channel_mapper = {"U": "HHZ", "N": "HHN", "E": "HHE"} try: tr.stats.channel = channel_mapper[tr.stats.channel] except KeyError: continue # we have to consider the time difference in Japan tr.stats.starttime = tr.stats.starttime - 9 * 60 * 60 # * add the waveforms tr.data = np.require(tr.data, dtype="float32") ds.add_waveforms(tr, tag="raw", event_id=event) # * handle the stationxml cha = Channel(code=tr.stats.channel, location_code="", latitude=tr.stats.sac.stla, longitude=tr.stats.sac.stlo, elevation=tr.stats.sac.stel, depth=0.0, sample_rate=tr.stats.sampling_rate) if (sta in sta_collection): sta_collection[sta].channels.append(cha) else: sta_collection[sta] = Station(code=sta, latitude=tr.stats.sac.stla, longitude=tr.stats.sac.stlo, elevation=tr.stats.sac.stel) sta_collection[sta].channels.append(cha) # * now we can add all the sta to net for sta in sta_collection: if (len(sta_collection[sta].channels) == 3): net_inv.stations.append(sta_collection[sta]) # * we can add net to station_xml inv.networks.append(net_inv) # * now we can add inv to asdf ds.add_stationxml(inv)
def get_inventory(stations, depths, lat=50.45031, long=-112.12087, elevation=779.0, dip1=0, azi1=0, dip2=0, azi2=90, dip3=90, azi3=0): inv = Inventory(networks=[], source="Genevieve") net = Network(code="BH", stations=[], description=" ", start_date=UTCDateTime(2019, 1, 1)) for i, station in enumerate(stations): dep = depths[i] sta = Station(code=station, latitude=lat, longitude=long, elevation=elevation, creation_date=UTCDateTime(2019, 1, 1), site=Site(name="borehole")) chaz = Channel(code="DPZ", location_code="", latitude=lat, longitude=long, elevation=elevation, azimuth=azi3, dip=dip3, depth=dep, sample_rate=500) cha1 = Channel(code="DPN", location_code="", latitude=lat, longitude=long, elevation=elevation, azimuth=azi1, dip=dip1, depth=dep, sample_rate=500) cha2 = Channel(code="DPE", location_code="", latitude=lat, longitude=long, elevation=elevation, azimuth=azi2, dip=dip2, depth=dep, sample_rate=500) sta.channels.append(chaz) sta.channels.append(cha1) sta.channels.append(cha2) net.stations.append(sta) inv.networks.append(net) return inv
def test_writing_network_before_1990(self): inv = obspy.Inventory(networks=[ Network(code="XX", start_date=obspy.UTCDateTime(1880, 1, 1))], source="") with io.BytesIO() as buf: inv.write(buf, format="stationxml") buf.seek(0, 0) inv2 = read_inventory(buf) assert inv.networks[0] == inv2.networks[0]
def test_empty_network_code(self): """ Tests that an empty sring is acceptabble. """ # An empty string is allowed. n = Network(code="") assert n.code == "" # But None is not allowed. with pytest.raises(ValueError, match='A code is required'): Network(code=None) # Should still serialize to something. inv = Inventory(networks=[n]) with io.BytesIO() as buf: inv.write(buf, format="stationxml", validate=True) buf.seek(0, 0) inv2 = read_inventory(buf) assert inv == inv2
def test_empty_network_code(self): """ Tests that an empty sring is acceptabble. """ # An empty string is allowed. n = Network(code="") self.assertEqual(n.code, "") # But None is not allowed. with self.assertRaises(ValueError) as e: Network(code=None) self.assertEqual(e.exception.args[0], "A code is required") # Should still serialize to something. inv = Inventory(networks=[n]) with io.BytesIO() as buf: inv.write(buf, format="stationxml", validate=True) buf.seek(0, 0) inv2 = read_inventory(buf) self.assertEqual(inv, inv2)
def read_hyp_inventory(hyp, network, kml_output_dir=None): inventory = Inventory(networks=[], source="") net = Network(code=network, stations=[], description="") with open(hyp, 'r') as file: blank_line = 0 while True: line = file.readline().rstrip() if not len(line): blank_line += 1 continue if blank_line > 1: break elif blank_line == 1: lat = line[6:14] lon = line[14:23] elev = float(line[23:]) station = line[1:6] if lat[-1] == 'S': NS = -1 else: NS = 1 if lon[-1] == 'W': EW = -1 else: EW = 1 lat = (int(lat[0:2]) + float(lat[2:-1]) / 60) * NS lat = Latitude(lat) lon = (int(lon[0:3]) + float(lon[3:-1]) / 60) * EW lon = Longitude(lon) sta = Station(code=station, latitude=lat, longitude=lon, elevation=elev) net.stations.append(sta) inventory.networks.append(net) if kml_output_dir: os.makedirs(kml_output_dir, exist_ok=True) inventory.write(kml_output_dir + "/" + network + ".kml", format="KML") return inventory
def set_network(self, code="", description="", start_date="2020,2,1,0,0,0.00"): self.network_raw = { "code": code, "description": description, "start_date": start_date } self.network = Network(code=self.network_raw["code"], stations=[], description=self.network_raw["description"], start_date=obspy.UTCDateTime( self.network_raw["start_date"]))
def array_csvtoinventory(fh): """ Takes a ph5 array csv file and converts it to an obspy inventory object :type file :param fh :return: :class obspy.core.inventory """ net = [Network('XX')] net[0].extra = AttribDict({"channel_num": 1}) created = UTCDateTime.now() csv_inventory = Inventory(networks=net, source="", sender="", created=created, module="", module_uri="") return csv_inventory
def test_writing_module_tags(self): """ Tests the writing of ObsPy related tags. """ net = Network(code="UL") inv = Inventory(networks=[net], source="BLU") file_buffer = io.BytesIO() inv.write(file_buffer, format="StationXML", validate=True) file_buffer.seek(0, 0) lines = file_buffer.read().decode().splitlines() module_line = [_i.strip() for _i in lines if _i.strip().startswith( "<Module>")][0] self.assertTrue(fnmatch.fnmatch(module_line, "<Module>ObsPy *</Module>")) module_uri_line = [_i.strip() for _i in lines if _i.strip().startswith( "<ModuleURI>")][0] self.assertEqual(module_uri_line, "<ModuleURI>https://www.obspy.org</ModuleURI>")
def clone_inv(inv, net_name, sta_name): net = Network( # This is the network code according to the SEED standard. code=net_name, # A list of stations. We'll add one later. stations=[], # description="A test stations.", # Start-and end dates are optional. # start_date=obspy.UTCDateTime(2016, 1, 2)) ) sta = Station( # This is the station code according to the SEED standard. code=sta_name, latitude=inv[0][0].latitude, longitude=inv[0][0].longitude, elevation=inv[0][0].elevation, creation_date=obspy.UTCDateTime(2016, 1, 2), site=Site(name="station with cloned inv")) cha = Channel( # This is the channel code according to the SEED standard. code="HHZ", # This is the location code according to the SEED standard. location_code="", # Note that these coordinates can differ from the station coordinates. start_date=inv[0][0][0].start_date, latitude=inv[0][0][0].latitude, longitude=inv[0][0][0].longitude, elevation=inv[0][0][0].elevation, depth=inv[0][0][0].depth, # azimuth=0.0, # dip=-90.0, sample_rate=inv[0][0][0].sample_rate) # Now tie it all together. cha.response = inv[0][0][0].response #response sta.channels.append(cha) net.stations.append(sta) inv.networks.append(net) return inv
def dataless2stationXml(datalessFileName, xmlFileName): # Read the dataless seed file sp = Parser(datalessFileName) # Collect all potential unit abbreviations units = {} #genAbbrev={} for entry in sp.abbreviations: if entry.name == 'Units Abbreviations': units[entry.unit_lookup_code] = entry.unit_name # elif entry.name=='Generic Abbreviation': # genAbbrev[entry.abbreviation_lookup_code]=entry.abbreviation_description # Make a look-up dictionary for the transfer functions transFuncs = { 'A': 'LAPLACE (RADIANS/SECOND)', 'B': 'ANALOG (HERTZ)', 'C': 'COMPOSITE', 'D': 'DIGITAL (Z-TRANSFORM)' } # Collect each of the stations objects stations = [] staNetCodes = [] for stationBlock in sp.stations: station, staNetCode = getStation(stationBlock, units, transFuncs) stations.append(station) staNetCodes.append(staNetCode) # For each of the unique networks codes, collect the stations which relate to it networks = [] staNetCodes = np.array(staNetCodes) unqNets = np.unique(staNetCodes) for aNet in unqNets: netStas = [stations[arg] for arg in np.where(staNetCodes == aNet)[0]] networks.append(Network(aNet, stations=netStas)) # Finally turn this into an inventory and save inv = Inventory(networks, 'Lazylyst') inv.write(xmlFileName, format='stationxml', validate=True)
def staCsv2Xml(staCsvPath, staXmlPath, source='Lazylyst'): # Load the csv file info = np.genfromtxt(staCsvPath, delimiter=',', dtype=str) # For each network... networks = [] unqNets = np.unique(info[:, 5]) for net in unqNets: netInfo = info[np.where(info[:, 5] == net)] # ...gather its stations stations = [] for entry in netInfo: stations.append( Station(entry[0], entry[1], entry[2], entry[3], site=Site(''), creation_date=UTCDateTime(1970, 1, 1))) networks.append(Network(net, stations=stations)) # Generate the inventory object, and save it as a station XML inv = Inventory(networks=networks, source=source) inv.write(staXmlPath, format='stationxml', validate=True)
def create_simple_inventory(network, station, latitude=None, longitude=None, elevation=None, depth=None, start_date=None, end_date=None, location_code="S3", channel_code="MX"): """ Create simple inventory with only location information, for ZNE component, especially usefull for synthetic data """ azi_dict = {"MXZ": 0.0, "MXN": 0.0, "MXE": 90.0} dip_dict = {"MXZ": 90.0, "MXN": 0.0, "MXE": 0.0} channel_list = [] if start_date is None: start_date = UTCDateTime(0) # specfem default channel code is MX for _comp in ["Z", "E", "N"]: _chan_code = "%s%s" % (channel_code, _comp) chan = Channel(_chan_code, location_code, latitude=latitude, longitude=longitude, elevation=elevation, depth=depth, azimuth=azi_dict[_chan_code], dip=dip_dict[_chan_code], start_date=start_date, end_date=end_date) channel_list.append(chan) site = Site("N/A") sta = Station(station, latitude=latitude, longitude=longitude, elevation=elevation, channels=channel_list, site=site, creation_date=start_date, total_number_of_channels=3, selected_number_of_channels=3) nw = Network(network, stations=[sta, ], total_number_of_stations=1, selected_number_of_stations=1) inv = Inventory([nw, ], source="SPECFEM3D_GLOBE", sender="Princeton", created=UTCDateTime.now()) return inv
def select_stations(inventory, station_list): """ Select station within an Inventory according to a list. Parameters ---------- inventory : obspy.Inventory The inventory. station_list : TYPE The station list. Returns ------- obspy.Inventory An inventory only containing stations in station_list. """ if station_list is None: return inventory network, = inventory stations = [station for station in network if station.code in station_list] network = Network(code='YV', stations=stations) inventory = Inventory(networks=[network], source='') return inventory
def _make_inventory(self, df: pd.DataFrame): """ Loopy logic for creating the inventory form a dataframe. """ # get dataframe with correct columns/conditioning from input df = obsplus.stations_to_df(df).copy() # add responses (if requested) and drop response cols df["response"] = self._get_responses(df) df = df.drop(columns=self._drop_cols, errors="ignore") # warn if any unexpected columns are found in df self._maybe_warn_on_unexpected_columns(df) # Iterate networks and create stations networks = [] for net_code, net_df in self._groupby_if_exists(df, "network"): stations = [] for st_code, sta_df in self._groupby_if_exists(net_df, "station"): if not st_code[0]: continue channels = [] for ch_code, ch_df in self._groupby_if_exists(sta_df, "channel"): if not ch_code[0]: # skip empty channel lines continue chan_series = ch_df.iloc[0] kwargs = self._get_kwargs(chan_series, self.cha_map) # try to add the inventory channels.append(Channel(**kwargs)) kwargs = self._get_kwargs(sta_df.iloc[0], self.sta_map) self._add_dates(kwargs, channels) stations.append(Station(channels=channels, **kwargs)) kwargs = self._get_kwargs(net_df.iloc[0], self.net_map) self._add_dates(kwargs, stations) networks.append(Network(stations=stations, **kwargs)) return obspy.Inventory( networks=networks, source=f"ObsPlus_v{obsplus.__version__}" )
def stats2inv(stats, resp=None, filexml=None, locs=None): # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. inv = Inventory(networks=[], source="japan_from_resp") if locs is None: net = Network( # This is the network code according to the SEED standard. code=stats.network, # A list of stations. We'll add one later. stations=[], description="Marine created from SAC and resp files", # Start-and end dates are optional. start_date=stats.starttime) sta = Station( # This is the station code according to the SEED standard. code=stats.station, latitude=stats.sac["stla"], longitude=stats.sac["stlo"], elevation=stats.sac["stel"], creation_date=stats.starttime, site=Site(name="First station")) cha = Channel( # This is the channel code according to the SEED standard. code=stats.channel, # This is the location code according to the SEED standard. location_code=stats.location, # Note that these coordinates can differ from the station coordinates. latitude=stats.sac["stla"], longitude=stats.sac["stlo"], elevation=stats.sac["stel"], depth=-stats.sac["stel"], azimuth=stats.sac["cmpaz"], dip=stats.sac["cmpinc"], sample_rate=stats.sampling_rate) else: ista = locs[locs['station'] == stats.station].index.values.astype( 'int64')[0] net = Network( # This is the network code according to the SEED standard. code=locs.iloc[ista]["network"], # A list of stations. We'll add one later. stations=[], description="Marine created from SAC and resp files", # Start-and end dates are optional. start_date=stats.starttime) sta = Station( # This is the station code according to the SEED standard. code=locs.iloc[ista]["station"], latitude=locs.iloc[ista]["latitude"], longitude=locs.iloc[ista]["longitude"], elevation=locs.iloc[ista]["elevation"], creation_date=stats.starttime, site=Site(name="First station")) cha = Channel( # This is the channel code according to the SEED standard. code=stats.channel, # This is the location code according to the SEED standard. location_code=stats.location, # Note that these coordinates can differ from the station coordinates. latitude=locs.iloc[ista]["latitude"], longitude=locs.iloc[ista]["longitude"], elevation=locs.iloc[ista]["elevation"], depth=-locs.iloc[ista]["elevation"], azimuth=0, dip=0, sample_rate=stats.sampling_rate) response = obspy.core.inventory.response.Response() if resp is not None: print('i dont have the response') # By default this accesses the NRL online. Offline copies of the NRL can # also be used instead # nrl = NRL() # The contents of the NRL can be explored interactively in a Python prompt, # see API documentation of NRL submodule: # http://docs.obspy.org/packages/obspy.clients.nrl.html # Here we assume that the end point of data logger and sensor are already # known: #response = nrl.get_response( # doctest: +SKIP # sensor_keys=['Streckeisen', 'STS-1', '360 seconds'], # datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200']) # Now tie it all together. cha.response = response sta.channels.append(cha) net.stations.append(sta) inv.networks.append(net) # And finally write it to a StationXML file. We also force a validation against # the StationXML schema to ensure it produces a valid StationXML file. # # Note that it is also possible to serialize to any of the other inventory # output formats ObsPy supports. if filexml is not None: inv.write(filexml, format="stationxml", validate=True) return inv
def get_inventory(): # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. inv = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source="ObsPy-Tutorial") net = Network( # This is the network code according to the SEED standard. code="US", # A list of stations. We'll add one later. stations=[], description="A test stations.", # Start-and end dates are optional. start_date=UTCDateTime(2016, 1, 2)) sta = Station( # This is the station code according to the SEED standard. code="ABCD", latitude=1.0, longitude=2.0, elevation=345.0, creation_date=UTCDateTime(2016, 1, 2), site=Site(name="First station")) cha1 = Channel( # This is the channel code according to the SEED standard. code="HN1", # This is the location code according to the SEED standard. location_code="11", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=0.0, dip=-90.0, sample_rate=1) cha2 = Channel( # This is the channel code according to the SEED standard. code="HN2", # This is the location code according to the SEED standard. location_code="11", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=90.0, dip=-90.0, sample_rate=1) cha3 = Channel( # This is the channel code according to the SEED standard. code="HNZ", # This is the location code according to the SEED standard. location_code="11", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=0.0, dip=-90.0, sample_rate=1) # Now tie it all together. sta.channels.append(cha1) sta.channels.append(cha2) sta.channels.append(cha3) net.stations.append(sta) inv.networks.append(net) return inv
def read_fdsn_station_text_file(path_or_file_object): """ Function reading a FDSN station text file to an inventory object. :param path_or_file_object: File name or file like object. """ def _read(obj): r = unicode_csv_reader(obj, delimiter=native_str("|")) header = next(r) header[0] = header[0].lstrip("#") header = [_i.strip().lower() for _i in header] # IRIS currently has a wrong header name. Just map it. header = [ _i.replace("instrument", "sensordescription") for _i in header ] all_lines = [] for line in r: # Skip comment lines. if line[0].startswith("#"): continue all_lines.append([_i.strip() for _i in line]) return {"header": tuple(header), "content": all_lines} # Enable reading from files and buffers opened in binary mode. if (hasattr(path_or_file_object, "mode") and "b" in path_or_file_object.mode) or \ isinstance(path_or_file_object, io.BytesIO): buf = io.StringIO(path_or_file_object.read().decode("utf-8")) buf.seek(0, 0) path_or_file_object = buf if hasattr(path_or_file_object, "read"): content = _read(path_or_file_object) else: with open(path_or_file_object, "rt", newline="", encoding="utf8") as fh: content = _read(fh) # Figure out the type. if content["header"] == network_components: level = "network" filetypes = network_types elif content["header"] == station_components: level = "station" filetypes = station_types elif content["header"] == channel_components: level = "channel" filetypes = channel_types else: raise ValueError("Unknown type of header.") content = content["content"] converted_content = [] # Convert all types. for line in content: converted_content.append( [v_type(value) for value, v_type in zip(line, filetypes)]) # Now convert to an inventory object. inv = Inventory(networks=[], source=None) if level == "network": for net in converted_content: network = Network(code=net[0], description=net[1], start_date=net[2], end_date=net[3], total_number_of_stations=net[4]) inv.networks.append(network) elif level == "station": networks = collections.OrderedDict() for sta in converted_content: site = Site(name=sta[5]) station = Station(code=sta[1], latitude=sta[2], longitude=sta[3], elevation=sta[4], site=site, start_date=sta[6], end_date=sta[7]) if sta[0] not in networks: networks[sta[0]] = [] networks[sta[0]].append(station) for network_code, stations in networks.items(): net = Network(code=network_code, stations=stations) inv.networks.append(net) elif level == "channel": networks = collections.OrderedDict() stations = collections.OrderedDict() for channel in converted_content: net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \ scale_freq, scale_units, s_r, st, et = channel if net not in networks: networks[net] = Network(code=net) if (net, sta) not in stations: station = Station(code=sta, latitude=lat, longitude=lng, elevation=ele) networks[net].stations.append(station) stations[(net, sta)] = station sensor = Equipment(type=inst) if scale is not None and scale_freq is not None: resp = Response(instrument_sensitivity=InstrumentSensitivity( value=scale, frequency=scale_freq, input_units=scale_units, output_units=None)) else: resp = None try: channel = Channel(code=chan, location_code=loc, latitude=lat, longitude=lng, elevation=ele, depth=dep, azimuth=azi, dip=dip, sensor=sensor, sample_rate=s_r, start_date=st, end_date=et, response=resp) except Exception as e: warnings.warn( "Failed to parse channel %s.%s.%s.%s due to: %s" % (net, sta, loc, chan, str(e)), UserWarning) continue stations[(net, sta)].channels.append(channel) inv.networks.extend(list(networks.values())) else: # Cannot really happen - just a safety measure. raise NotImplementedError("Unknown level: %s" % str(level)) return inv
def getInventory(self): """ Extract an ObsPy inventory object from a Stream read in by gmprocess tools. """ networks = [trace.stats.network for trace in self] if len(set(networks)) > 1: raise Exception( "Input stream has stations from multiple networks.") # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. source = '' if 'standard' in self[0].stats and 'source' in self[0].stats.standard: source = self[0].stats.standard.source inv = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source=source) net = Network( # This is the network code according to the SEED standard. code=networks[0], # A list of stations. We'll add one later. stations=[], description="source", # Start-and end dates are optional. ) channels = [] for trace in self: logging.debug('trace: %s' % trace) channel = _channel_from_stats(trace.stats) channels.append(channel) subdict = {} for k in UNUSED_STANDARD_PARAMS: if k in self[0].stats.standard: subdict[k] = self[0].stats.standard[k] format_specific = {} if 'format_specific' in self[0].stats: format_specific = dict(self[0].stats.format_specific) big_dict = {'standard': subdict, 'format_specific': format_specific} try: jsonstr = json.dumps(big_dict) except Exception as e: raise GMProcessException('Exception in json.dumps: %s' % e) sta = Station( # This is the station code according to the SEED standard. code=self[0].stats.station, latitude=self[0].stats.coordinates.latitude, elevation=self[0].stats.coordinates.elevation, longitude=self[0].stats.coordinates.longitude, channels=channels, site=Site(name=self[0].stats.standard.station_name), description=jsonstr, creation_date=UTCDateTime(1970, 1, 1), # this is bogus total_number_of_channels=len(self)) net.stations.append(sta) inv.networks.append(net) return inv
def main(argv): '''@package isc2stnxml It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code. When proper network code can not be identified the program just guess it, sorry... ''' inv = read_inventory("IRIS-ALL.xml") # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE default_net = 'GE' ehb1 = read_eng('BMG.STN') ehb2 = read_eng('ISC.STN') ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0) isc1 = read_isc('ehb.stn') isc2 = read_isc('iscehb.stn') isc = np.unique(np.vstack((isc1, isc2)), axis=0) catalogue = [] our_xml = Inventory(networks=[], source='EHB') for i in xrange(ehb.shape[0]): filed = False xml = False stn_found = isc[isc[:, 0] == ehb[i, 0], :] min_dist = 10e10 if stn_found.shape[0] > 0: if stn_found.shape[0] > 1: for j in xrange(stn_found.shape[0]): dist = locations2degrees(np.float(stn_found[j, 2]), np.float(stn_found[j, 3]), np.float(ehb[i, 1]), np.float(ehb[i, 2])) if dist < min_dist: min_dist = dist record = stn_found[j, :] else: min_dist = locations2degrees(np.float(stn_found[0, 2]), np.float(stn_found[0, 3]), np.float(ehb[i, 1]), np.float(ehb[i, 2])) record = stn_found[0, :] # Now we try to find the same station in XML file # if min_dist > 1. or stn_found.shape[0]==0: xstn_found = inv.select(station=ehb[i, 0], channel="*HZ") if len(stn_found) == 0 and len(xstn_found) == 0: # we filed to find station anywhere and assign dummy values record = [ ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z', '1964-1-1 00:00:00', '2599-12-31 23:59:59' ] min_dist = 0. filed = True else: # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC if len(xstn_found) > 0: # print "----------",len(xstn_found) # print xstn_found[0][0].latitude min_dist = min_dist + 0.1 for j in xrange(len(xstn_found)): dist = locations2degrees(xstn_found[j][0].latitude, xstn_found[j][0].longitude, np.float(ehb[i, 1]), np.float(ehb[i, 2])) if min_dist > dist: min_dist = dist record = xstn_found[j] # print record xml = True # last defence if stations have been done but distance between declared and found locations are more than 1 degree if min_dist > 1: record = [ ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z', '1964-1-1 00:00:00', '2599-12-31 23:59:59' ] filed = True if xml: #our_xml.networks.append(record) xml = False else: if filed: if len(record[7]) < 5: record[7] = '2599-12-31 23:59:59' catalogue.append(record) else: stn_found = isc[(isc[:, 0] == record[0]) & (isc[:, 1] == record[1]), :] for k in xrange(stn_found.shape[0]): net = Network(code=stn_found[k, 1], stations=[], description=' ') if len(stn_found[k, 7]) < 5: stn_found[k, 7] = '2599-12-31 23:59:59' catalogue.append(stn_found[k, :]) stn_found = np.unique(np.array(catalogue), axis=0) if len(stn_found[stn_found == '']) > 0 or len( stn_found[stn_found == ' ']) > 0: print "Some elements are empty, check the list" # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten lost = [] for j in xrange(isc.shape[0]): # is there any common station name? common_st = stn_found[isc[j, 0] == stn_found[:, 0]] if common_st.shape[0] > 0: # is network code the same? common_net = common_st[common_st[:, 1] == isc[j, 1]] if common_net.shape[0] < 1: # ok we found forgotten one, check the XML if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0: # Bingo... lost.append(isc[j, :]) else: if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0: # Bingo... lost.append(isc[j, :]) stn_found = np.vstack((stn_found, np.array(lost))) for k in xrange(stn_found.shape[0]): net = Network(code=stn_found[k, 1], stations=[], description=' ') if len(stn_found[k, 7]) < 5: stn_found[k, 7] = '2599-12-31 23:59:59' catalogue.append(stn_found[k, :]) sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \ termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \ site=Site(name=' '), \ latitude=np.float(stn_found[k,2]), \ longitude=np.float(stn_found[k,3]), \ elevation=np.float(stn_found[k,4])) cha=Channel(code=stn_found[k,5], \ depth=0., \ azimuth=0., \ dip=-90., \ location_code='', \ latitude=np.float(stn_found[k,2]), \ longitude=np.float(stn_found[k,3]), \ elevation=np.float(stn_found[k,4])) sta.channels.append(cha) net.stations.append(sta) our_xml.networks.append(net) # print 'np',stn_found[k,:] our_xml.write("station.xml", format="stationxml", validate=True) our_xml.write("station.txt", format="stationtxt")