def test_get_coordinates(self): """ Test extracting coordinates """ expected = {u'latitude': 47.737166999999999, u'longitude': 12.795714, u'elevation': 860.0, u'local_depth': 0.0} channels = [Channel(code='EHZ', location_code='', start_date=UTCDateTime('2007-01-01'), latitude=47.737166999999999, longitude=12.795714, elevation=860.0, depth=0.0)] stations = [Station(code='RJOB', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels)] network = Network('BW', stations=stations) # 1 coordinates = network.get_coordinates('BW.RJOB..EHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(sorted(coordinates.items()), sorted(expected.items())) # 2 - without datetime coordinates = network.get_coordinates('BW.RJOB..EHZ') self.assertEqual(sorted(coordinates.items()), sorted(expected.items())) # 3 - unknown SEED ID should raise exception self.assertRaises(Exception, network.get_coordinates, 'BW.RJOB..XXX')
def test_get_response(self): response_n1_s1 = Response('RESPN1S1') response_n1_s2 = Response('RESPN1S2') response_n2_s1 = Response('RESPN2S1') channels_n1_s1 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s1)] channels_n1_s2 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s2)] channels_n2_s1 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n2_s1)] stations_1 = [Station(code='N1S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s1), Station(code='N1S2', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s2), Station(code='N2S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n2_s1)] network = Network('N1', stations=stations_1) response = network.get_response('N1.N1S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s1) response = network.get_response('N1.N1S2..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s2) response = network.get_response('N1.N2S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n2_s1)
def df_to_inventory(df) -> obspy.Inventory: """ Create a simple inventory from a dataframe. The dataframe must have the same columns as the once produced by :func:`obsplus.stations_to_df`. """ def _make_key_mappings(cls): """ Create a mapping from columns in df to kwargs for cls. """ base_params = set(inspect.signature(cls).parameters) new_map = mapping_keys[cls] base_map = {x: x for x in base_params - set(new_map)} base_map.update(new_map) return base_map def _groupby_if_exists(df, columns): """ Groupby columns if they exist on dataframe, else return empty. """ cols = list(obsplus.utils.iterate(columns)) if not set(cols).issubset(df.columns): return # copy df and set missing start/end times to reasonable values # this is needed so they get included in a groupby df = df.copy() isnan = df.isna() if "start_date" in columns: df["start_date"] = df["start_date"].fillna(0) if "end_date" in columns: df["end_date"] = df["end_date"].fillna(LARGE_NUMBER) for ind, df_sub in df.groupby(cols): # replace NaN values if isnan.any().any(): df_sub[isnan.loc[df_sub.index]] = np.nan yield ind, df_sub def _get_kwargs(series, key_mapping): """ create the kwargs from a series and key mapping. """ out = {} for k, v in key_mapping.items(): # skip if requested kwarg is not in the series if v not in series: continue value = series[v] out[k] = value if not pd.isnull(value) else None return out # first get key_mappings net_map = _make_key_mappings(Network) sta_map = _make_key_mappings(Station) cha_map = _make_key_mappings(Channel) # next define columns groupbys should be performed on net_columns = ["network"] sta_columns = ["station", "start_date", "end_date"] cha_columns = ["channel", "location", "start_date", "end_date"] # Ensure input is a dataframe df = obsplus.stations_to_df(df) # replace # Iterate networks and create stations networks = [] for net_code, net_df in _groupby_if_exists(df, net_columns): stations = [] for st_code, sta_df in _groupby_if_exists(net_df, sta_columns): channels = [] for ch_code, ch_df in _groupby_if_exists(sta_df, cha_columns): kwargs = _get_kwargs(ch_df.iloc[0], cha_map) channels.append(Channel(**kwargs)) kwargs = _get_kwargs(sta_df.iloc[0], sta_map) stations.append(Station(channels=channels, **kwargs)) kwargs = _get_kwargs(net_df.iloc[0], net_map) networks.append(Network(stations=stations, **kwargs)) return obspy.Inventory(networks=networks, source=f"ObsPlus_v{obsplus.__version__}")
def main(): chans = "EHZ,EHN,EHE" # Get StationXML file print(f"Interactive StaXML builder") print(f"Work in progress...some things hardwired\n\n") inv_name = input(f"Enter StationXML file name: ") if (os.path.isfile(inv_name)): inv = read_inventory(inv_name) else: print(f"Making new inventory: {inv_name}\n") inv = Inventory(networks=[], source="Weston") # Net code ques = f"Enter Network Code ({str(netc)}) :" net_code = str(input(ques) or netc) net = Network(code=net_code, stations=[]) print(f"\n") # connect to NRL nrl = NRL() # Datalogger info ret = 0 digi = f"REF TEK|RT 130S & 130-SMHR|1|200" print(f"Input NRL Digi info ( | separated, careful with spaces)....") print(f"E.g manufacturer| model| gain| sps\n") while ret == 0: ques = f"Enter DIGI info ({digi}) :" digi = str(input(ques) or digi) print(f"\n") try: nrl.get_datalogger_response(digi.split('|')) ret = 1 print("!!!!! DATA LOGGER SUCCESS!!!\n") except Exception as e: print(f"Try again ... {e}") # Sensor info ret = 0 sensor = f"Streckeisen,STS-1,360 seconds" print(f"Input NRL Sensor info ....\n") print(f"E.g Manufact|model|Sensitivy\n") print(f"Guralp|CMG-40T,30s - 100Hz|800") print(f"Sercel/Mark Products|L-22D|5470 Ohms|20000 Ohms") print(f"Streckeisen|STS-1|360 seconds") print(f"Nanometrics|Trillium Compact 120 (Vault, Posthole, OBS)|754 V/m/s") while ret == 0: ques = f"Enter sensor info {str(sensor)} :" sensor = str(input(ques) or sensor) try: nrl.get_sensor_response(sensor.split('|')) ret = 1 inst_info = f"{sensor.split('|')[0]} {sensor.split('|')[1]}" print("Sensor success!!!!") except Exception as e: print(f"Try again ... {e}") print("Getting full response...") try: response = nrl.get_response(sensor_keys=sensor.split('|'), datalogger_keys=digi.split('|')) print("Full response success \n\n") except Exception as e: print(f"Oops .. {e}") # nstas = int( input( "Enter number of stations to add with same sensor/digitizer (default 1):" ) or 1) for i in range(0, nstas): ques = "Station code (" + str(scode) + ") :" sta_code = str(input(ques) or scode) ques = "Station latitude (" + str(geolat) + ") :" sta_lat = float(input(ques) or geolat) ques = "Station longitude (" + str(geolon) + ") :" sta_lon = float(input(ques) or geolat) ques = "Station elev(" + str(geoelev) + ") :" sta_elev = float(input(ques) or geoelev) ques = "Station ondate (" + str(date) + ") :" sta_ondate = str(input(ques) or date) ques = "Station offdate (" + str(date) + ") :" sta_offdate = str(input(ques) or date) ques = "Station long name (" + str(longname) + ") :" sta_sitename = str(input(ques) or longname) sta = Station(code=sta_code, latitude=sta_lat, longitude=sta_lon, elevation=sta_elev, creation_date=UTCDateTime(sta_ondate), site=Site(name=sta_sitename)) # add station to network net.stations.append(sta) # Default chan info coords = { 'latitude': sta_lat, 'longitude': sta_lon, 'elevation': sta_elev, 'depth': 0.0, 'sample_rate': sps } n = -1 ques = f"Enter channel names, comma separated ({chans}) :" chans = str(input(ques) or chans) for j in chans.split(','): n += 1 chantmp = j print("Doing channel ", chantmp) aztmp = azims[n] diptmp = dips[n] loc = locs[n] for k in coords.keys(): ques = str(chantmp) + " enter " + k + "(" + str( coords[k]) + "):" coords[k] = float(input(ques) or coords[k]) chan = Channel(code=chantmp, location_code=loc, latitude=coords['latitude'], longitude=coords['longitude'], elevation=coords['elevation'], depth=coords['depth'], azimuth=aztmp, dip=diptmp, sample_rate=coords['sample_rate'], sensor=Equipment(description=inst_info)) chan.response = response sta.channels.append(chan) inv.networks.append(net) inv.write(inv_name, format="STATIONXML")
def getInventory(self): """ Extract an ObsPy inventory object from a Stream read in by gmprocess tools. """ networks = [trace.stats.network for trace in self] if len(set(networks)) > 1: raise Exception( "Input stream has stations from multiple networks.") # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. source = '' if 'standard' in self[0].stats and 'source' in self[0].stats.standard: source = self[0].stats.standard.source inv = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source=source) net = Network( # This is the network code according to the SEED standard. code=networks[0], # A list of stations. We'll add one later. stations=[], description="source", # Start-and end dates are optional. ) channels = [] for trace in self: logging.debug('trace: %s' % trace) channel = _channel_from_stats(trace.stats) channels.append(channel) subdict = {} for k in UNUSED_STANDARD_PARAMS: if k in self[0].stats.standard: subdict[k] = self[0].stats.standard[k] format_specific = {} if 'format_specific' in self[0].stats: format_specific = dict(self[0].stats.format_specific) big_dict = {'standard': subdict, 'format_specific': format_specific} try: jsonstr = json.dumps(big_dict) except Exception as e: raise GMProcessException('Exception in json.dumps: %s' % e) sta = Station( # This is the station code according to the SEED standard. code=self[0].stats.station, latitude=self[0].stats.coordinates.latitude, elevation=self[0].stats.coordinates.elevation, longitude=self[0].stats.coordinates.longitude, channels=channels, site=Site(name=self[0].stats.standard.station_name), description=jsonstr, creation_date=UTCDateTime(1970, 1, 1), # this is bogus total_number_of_channels=len(self)) net.stations.append(sta) inv.networks.append(net) return inv
def test_reading_channel_without_response_info(self): """ Test reading a file at the channel level with missing scale, scale frequency and units. This is common for the log channel of instruments. """ # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network(code="6E", stations=[ Station(code="SH01", latitude=37.7457, longitude=-88.1368, elevation=126.0, channels=[ Channel( code="LOG", location_code="", latitude=37.7457, longitude=-88.1368, elevation=126.0, depth=0.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( type="Reftek 130 Datalogger"), start_date=obspy.UTCDateTime( "2013-11-23T00:00:00"), end_date=obspy.UTCDateTime( "2016-12-31T23:59:59")) ]), ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "log_channel_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def test_reading_channel_file(self): """ Test reading a file at the channel level. """ resp_1 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.02, input_units="M/S", output_units=None, value=4.88233E8)) resp_2 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=4.98112E8)) resp_3 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=6.27252E8)) # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network( code="AK", stations=[ Station( code="BAGL", latitude=60.4896, longitude=-142.0915, elevation=1470, channels=[ Channel( code="LHZ", location_code="", latitude=60.4896, longitude=-142.0915, elevation=1470, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 240 Sec " "Response sn 400 and a"), start_date=obspy.UTCDateTime( "2013-01-01T00:00:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_1) ]), Station( code="BWN", latitude=64.1732, longitude=-149.2991, elevation=356.0, channels=[ Channel( code="LHZ", location_code="", latitude=64.1732, longitude=-149.2991, elevation=356.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 240 Sec " "Response sn 400 and a"), start_date=obspy.UTCDateTime( "2010-07-23T00:00:00"), end_date=obspy.UTCDateTime( "2014-05-28T23:59:59"), response=resp_1), Channel( code="LHZ", location_code="", latitude=64.1732, longitude=-149.2991, elevation=356.0, depth=1.5, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 120 Sec " "Response/Quanterra 33"), start_date=obspy.UTCDateTime( "2014-08-01T00:00:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_2) ]) ]), Network( code="AZ", stations=[ Station( code="BZN", latitude=33.4915, longitude=-116.667, elevation=1301.0, channels=[ Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2010-07-26T17:22:00"), end_date=obspy.UTCDateTime( "2013-07-15T21:22:23"), response=resp_3), Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2013-07-15T21:22:23"), end_date=obspy.UTCDateTime( "2013-10-22T19:30:00"), response=resp_3), Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2013-10-22T19:30:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_3) ]) ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "channel_level_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def test_reading_network_file(self): """ Test reading a file at the network level. """ # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network( code="TA", total_number_of_stations=1700, start_date=obspy.UTCDateTime("2003-01-01T00:00:00"), end_date=obspy.UTCDateTime("2500-12-31T23:59:59"), description="USArray Transportable Array (NSF EarthScope " "Project)"), Network(code="TC", total_number_of_stations=0, start_date=obspy.UTCDateTime("2011-01-01T00:00:00"), end_date=obspy.UTCDateTime("2500-12-31T23:59:59"), description="Red Sismologica Nacional") ]) # Read from a filename. filename = os.path.join(self.data_dir, "network_level_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def inventory_merge(iris_inv, custom_inv, output_file, test_mode=False): """Merge an IRIS inventory with a custom inventory, filtering out any custom inventory records that duplicate IRIS records. :param iris_inv: Station XML file from which to load IRIS inventory :type iris_inv: str or Path to file :param custom_inv: Station XML file with custom records to merge with IRIS inventory :type custom_inv: str or Path to file :param output_file: File name of output file which will contain merged IRIS and custom inventory. :type output_file: str or Path to file :return: The merged inventory :rtype: obspy.core.inventory.inventory.Inventory """ # Load IRIS inventory print("Loading {}...".format(iris_inv)) if not test_mode and os.path.exists( os.path.splitext(iris_inv)[0] + ".pkl"): # pragma: no cover with open(os.path.splitext(iris_inv)[0] + ".pkl", 'rb') as f: inv_iris = pkl.load(f) db_iris = pkl.load(f) else: inv_iris = load_station_xml(iris_inv) # and convert to Pandas dataframe db_iris = inventory_to_dataframe(inv_iris) if not test_mode: # pragma: no cover with open(os.path.splitext(iris_inv)[0] + ".pkl", 'wb') as f: pkl.dump(inv_iris, f, pkl.HIGHEST_PROTOCOL) pkl.dump(db_iris, f, pkl.HIGHEST_PROTOCOL) # end if # Load custom inventory print("Loading {}...".format(custom_inv)) if not test_mode and os.path.exists( os.path.splitext(custom_inv)[0] + ".pkl"): # pragma: no cover with open(os.path.splitext(custom_inv)[0] + ".pkl", 'rb') as f: inv_other = pkl.load(f) db_other = pkl.load(f) else: inv_other = load_station_xml(custom_inv) # and convert to Pandas dataframe db_other = inventory_to_dataframe(inv_other) if not test_mode: # pragma: no cover with open(os.path.splitext(custom_inv)[0] + ".pkl", 'wb') as f: pkl.dump(inv_other, f, pkl.HIGHEST_PROTOCOL) pkl.dump(db_other, f, pkl.HIGHEST_PROTOCOL) # end if print("Merging {} IRIS records with {} custom records...".format( len(db_iris), len(db_other))) num_before = len(db_other) db_other = prune_iris_duplicates(db_other, db_iris) db_other.sort_values(SORT_ORDERING, inplace=True) db_other.reset_index(drop=True, inplace=True) if len(db_other) < num_before: print("Removed {0}/{1} stations because they exist in IRIS".format( num_before - len(db_other), num_before)) print("{} custom records remaining".format(len(db_other))) # Merge inv_other into inv_iris, only keeping the records of inv_other that are present in db_other inv_merged = inv_iris # Note: this aliases inv_iris, it does not make a copy. print("Filtering {} records and merging with IRIS...".format(custom_inv)) num_added = 0 if show_progress: num_entries = sum( len(station.channels) for network in inv_other.networks for station in network.stations) pbar = tqdm.tqdm(total=num_entries, ascii=True) pbar.set_description("Matched {}/{}".format(num_added, len(db_other))) # end if # Filter inv_other records according to what records remain in db_other. # When a matching record(s) from inv_other is found, we add it to the nearest IRIS network of the same code # (based on centroid distance). for network in inv_other.networks: # Duplicate network data, but keep stations empty net = Network(network.code, stations=[], description=network.description, comments=network.comments, start_date=network.start_date, end_date=network.end_date) add_network = False for station in network.stations: if show_progress: pbar.update(len(station.channels)) # end if # Duplicate station data, but keep channels empty sta = Station(station.code, station.latitude, station.longitude, station.elevation, channels=[], site=station.site, creation_date=station.creation_date, termination_date=station.termination_date, description=station.description, comments=station.comments, start_date=station.start_date, end_date=station.end_date) add_station = False for channel in station.channels: # See if the record is in db_other. If so, flag it has needing to be added. lat = channel.latitude if channel.latitude else station.latitude lon = channel.longitude if channel.longitude else station.longitude ele = channel.elevation if channel.elevation else station.elevation sta_start = np.datetime64(station.start_date) cha_start = np.datetime64(channel.start_date) mask = ((db_other['NetworkCode'] == network.code) & (db_other['StationCode'] == station.code) & (db_other['ChannelCode'] == channel.code) & (db_other['Latitude'] == lat) & (db_other['Longitude'] == lon) & (db_other['Elevation'] == ele) & (db_other['StationStart'] == sta_start) & (db_other['ChannelStart'] == cha_start)) if np.any(mask): # The record from loaded inventory survived pruning of db_other, so it is not an IRIS duplicate # and should be added to the merged inventory. db_match = db_other[mask] assert len( db_match ) == 1, 'Found multiple matches, expected only one for {}'.format( db_match) add_station = True num_added += 1 if show_progress: pbar.set_description("Matched {}/{}".format( num_added, len(db_other))) sta.channels.append(channel) # end if # end for if add_station: add_network = True net.stations.append(sta) # end if # end for if add_network: # If the network code is new, add it directly to the inventory. # Otherwise, add it to the nearest network of the same network code. # This rigmarole of adding to nearest network of the same network code is # required because network codes are not necessarily unique. existing_networks = inv_merged.select(network=net.code) if existing_networks: # Add to nearest existing network net_mean_latlong = mean_lat_long(net) nearest_distance = 1.0e+20 for existing_net in existing_networks: temp_mean_latlong = mean_lat_long(existing_net) dist_apart = np.deg2rad(locations2degrees(net_mean_latlong[0], net_mean_latlong[1], temp_mean_latlong[0], temp_mean_latlong[1])) \ * NOMINAL_EARTH_RADIUS_KM if dist_apart < nearest_distance: nearest_distance = dist_apart nearest_existing = existing_net # end if # end for # Unfortunately existing_net here is NOT a reference to the original object within inv_merged, # so we still need to search for the same network in inv_merged same_source_net = get_matching_net(inv_merged, nearest_existing) if same_source_net is not None: same_source_net.stations.extend(net.stations) same_source_net.total_number_of_stations = len( same_source_net.stations) # end if else: # Network code is new in the inventory. inv_merged += net # end if # end if # end for if show_progress: pbar.close() print("Added {} custom records to IRIS inventory".format(num_added)) # Write merged inventory text file in FDSN stationxml inventory format. print("Writing merged inventory to {}".format(output_file)) inv_merged.write(output_file, format="stationxml") inv_merged.write(os.path.splitext(output_file)[0] + ".txt", format="stationtxt") return inv_merged
def load_from_excel(file_name) -> Inventory: """ Read in a multi-sheet excel file with network metadata sheets: Sites, Networks, Hubs, Stations, Components, Sites, Cables, Boreholes Organize these into a uquake Inventory object :param xls_file: path to excel file :type: xls_file: str :return: inventory :rtype: uquake.core.data.inventory.Inventory """ df_dict = pd.read_excel(file_name, sheet_name=None) source = df_dict['Sites'].iloc[0]['code'] # sender (str, optional) Name of the institution sending this message. sender = df_dict['Sites'].iloc[0]['operator'] net_code = df_dict['Networks'].iloc[0]['code'] net_descriptions = df_dict['Networks'].iloc[0]['name'] contact_name = df_dict['Networks'].iloc[0]['contact_name'] contact_email = df_dict['Networks'].iloc[0]['contact_email'] contact_phone = df_dict['Networks'].iloc[0]['contact_phone'] site_operator = df_dict['Sites'].iloc[0]['operator'] site_country = df_dict['Sites'].iloc[0]['country'] site_name = df_dict['Sites'].iloc[0]['name'] site_code = df_dict['Sites'].iloc[0]['code'] print("source=%s" % source) print("sender=%s" % sender) print("net_code=%s" % net_code) network = Network(net_code) inventory = Inventory([network], source) # obspy requirements for PhoneNumber are super specific: # So likely this will raise an error if/when someone changes the value in # Networks.contact_phone """ PhoneNumber(self, area_code, phone_number, country_code=None, description=None): :type area_code: int :param area_code: The area code. :type phone_number: str :param phone_number: The phone number minus the country and area code. Must be in the form "[0-9]+-[0-9]+", e.g. 1234-5678. :type country_code: int, optional :param country_code: The country code. """ import re phone = re.findall(r"[\d']+", contact_phone) area_code = int(phone[0]) number = "%s-%s" % (phone[1], phone[2]) phone_number = PhoneNumber(area_code=area_code, phone_number=number) person = Person(names=[contact_name], agencies=[site_operator], emails=[contact_email], phones=[phone_number]) operator = Operator(site_operator, contacts=[person]) site = Site(name=site_name, description=site_name, country=site_country) # Merge Stations+Components+Sites+Cables info into sorted stations + # channels dicts: df_dict['Stations']['station_code'] = df_dict['Stations']['code'] df_dict['Sites']['sensor_code'] = df_dict['Sites']['code'] df_dict['Components']['code_channel'] = df_dict['Components']['code'] df_dict['Components']['sensor'] = df_dict['Components']['sensor__code'] df_merge = pd.merge(df_dict['Stations'], df_dict['Sites'], left_on='code', right_on='station__code', how='inner', suffixes=('', '_channel')) df_merge2 = pd.merge(df_merge, df_dict['Components'], left_on='sensor_code', right_on='sensor__code', how='inner', suffixes=('', '_sensor')) df_merge3 = pd.merge(df_merge2, df_dict['Cable types'], left_on='cable__code', right_on='code', how='inner', suffixes=('', '_cable')) df_merge4 = pd.merge(df_merge3, df_dict['Site types'], left_on='sensor_type__model', right_on='model', how='inner', suffixes=('', '_sensor_type')) df = df_merge4.sort_values(['sensor_code', 'location_code']).fillna(0) # Need to sort by unique station codes, then look through 1-3 channels # to add stn_codes = set(df['sensor_code']) stations = [] for code in stn_codes: chan_rows = df.loc[df['sensor_code'] == code] row = chan_rows.iloc[0] station = {} # Set some keys explicitly # from ipdb import set_trace; set_trace() station['code'] = '{}'.format(row['sensor_code']) station['x'] = row['location_x_channel'] station['y'] = row['location_y_channel'] station['z'] = row['location_z_channel'] station['loc'] = np.array([station['x'], station['y'], station['z']]) station['long_name'] = "{}.{}.{:02d}".format(row['network__code'], row['station_code'], row['location_code']) # MTH: 2019/07 Seem to have moved from pF to F on Cables sheet: station['cable_capacitance_pF_per_meter'] = row['c'] * 1e12 # Set the rest (minus empty fields) directly from spreadsheet names: renamed_keys = { 'sensor_code', 'location_x', 'location_y', 'location_z', 'name' } # These keys are either redundant or specific to channel, not station: remove_keys = { 'code', 'id_channel', 'orientation_x', 'orientation_y', 'orientation_z', 'id_sensor', 'enabled_channel', 'station_id', 'id_cable' } keys = row.keys() empty_keys = keys[pd.isna(row)] keys = set(keys) - set(empty_keys) - renamed_keys - remove_keys for key in keys: station[key] = row[key] # Added keys: station['motion'] = 'VELOCITY' if row['sensor_type'].upper() == 'ACCELEROMETER': station['motion'] = 'ACCELERATION' # Attach channels: station['channels'] = [] for index, rr in chan_rows.iterrows(): chan = {} chan['cmp'] = rr['code_channel_sensor'].upper() chan['orientation'] = np.array([ rr['orientation_x'], rr['orientation_y'], rr['orientation_z'] ]) chan['x'] = row['location_x_channel'] chan['y'] = row['location_y_channel'] chan['z'] = row['location_z_channel'] chan['enabled'] = rr['enabled'] station['channels'].append(chan) stations.append(station) # from ipdb import set_trace; set_trace() # Convert these station dicts to inventory.Station objects and attach to # inventory.network: station_list = [] for station in stations: # This is where namespace is first employed: station = Station.from_station_dict(station, site_name) station.site = site station.operators = [operator] station_list.append(station) network.stations = station_list return inventory
def create_new_skeleton_inventory_file(path2xmlfile): """ write a NEW skeleton inventory xml file :param path2xmlfile: path to a new xml file. :return: """ # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. inv = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source="ObsPy-Tutorial") net = Network( # This is the network code according to the SEED standard. code="XX", # A list of stations. We'll add one later. stations=[], description="A test stations.", # Start-and end dates are optional. start_date=obspy.UTCDateTime(2016, 1, 2)) sta = Station( # This is the station code according to the SEED standard. code="ABC", latitude=1.0, longitude=2.0, elevation=345.0, creation_date=obspy.UTCDateTime(2016, 1, 2), site=Site(name="First station")) cha = Channel( # This is the channel code according to the SEED standard. code="HHZ", # This is the location code according to the SEED standard. location_code="", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=0.0, dip=-90.0, sample_rate=200) # By default this accesses the NRL online. Offline copies of the NRL can # also be used instead nrl = NRL() # The contents of the NRL can be explored interactively in a Python prompt, # see API documentation of NRL submodule: # http://docs.obspy.org/packages/obspy.clients.nrl.html # Here we assume that the end point of data logger and sensor are already # known: response = nrl.get_response( # doctest: +SKIP sensor_keys=['Streckeisen', 'STS-1', '360 seconds'], datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200']) # Now tie it all together. cha.response = response sta.channels.append(cha) net.stations.append(sta) inv.networks.append(net) # And finally write it to a StationXML file. We also force a validation against # the StationXML schema to ensure it produces a valid StationXML file. # # Note that it is also possible to serialize to any of the other inventory # output formats ObsPy supports. inv.write(path2xmlfile, format="stationxml", validate=True)
def trace_to_inventory(self, trace): # if sac files are opened, it's useful to extract inventory from their streams so that we can populate the # stations tabs and the location widget new_inventory = None # The next bit is modified from the obspy webpage on building a stationxml site from scratch # https://docs.obspy.org/tutorial/code_snippets/stationxml_file_from_scratch.html # # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. # initialize the lat/lon/ele lat = 0.0 lon = 0.0 ele = -1.0 _network = trace.stats['network'] _station = trace.stats['station'] _channel = trace.stats['channel'] _location = trace.stats['location'] # if the trace is from a sac file, the sac header might have some inventory information if trace.stats['_format'] == 'SAC': if 'stla' in trace.stats['sac']: lat = trace.stats['sac']['stla'] if 'stlo' in trace.stats['sac']: lon = trace.stats['sac']['stlo'] if 'stel' in trace.stats['sac']: ele = trace.stats['sac']['stel'] else: ele = 0.333 if _network == 'LARSA' and _station == '121': if _channel == 'ai0': lat = 35.8492497 lon = -106.2705465 elif _channel == 'ai1': lat = 35.84924682 lon = -106.2705505 elif _channel == 'ai2': lat = 35.84925165 lon = -106.2705516 if lat == 0.0 or lon == 0.0 or ele < 0: if self.fill_sta_info_dialog.exec_(_network, _station, _location, _channel, lat, lon, ele): edited_values = self.fill_sta_info_dialog.get_values() lat = edited_values['lat'] lon = edited_values['lon'] ele = edited_values['ele'] _network = edited_values['net'] _station = edited_values['sta'] _location = edited_values['loc'] _channel = edited_values['cha'] # (re)populate sac headers where possible if trace.stats['_format'] == 'SAC': trace.stats['sac']['stla'] = lat trace.stats['sac']['stlo'] = lon trace.stats['sac']['stel'] = ele trace.stats['sac']['knetwk'] = _network trace.stats['sac']['kstnm'] = _station # (re)populate trace stats where possible trace.stats['network'] = _network trace.stats['station'] = _station trace.stats['location'] = _location trace.stats['channel'] = _channel try: new_inventory = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source="InfraView") net = Network( # This is the network code according to the SEED standard. code=_network, # A list of stations. We'll add one later. stations=[], # Description isn't something that's in the trace stats or SAC header, so lets set it to the network cod description=_network, # Start-and end dates are optional. # Start and end dates for the network are not stored in the sac header so lets set it to 1/1/1900 start_date=UTCDateTime(1900, 1, 1)) sta = Station( # This is the station code according to the SEED standard. code=_station, latitude=lat, longitude=lon, elevation=ele, # Creation_date is not saved in the trace stats or sac header creation_date=UTCDateTime(1900, 1, 1), # Site name is not in the trace stats or sac header, so set it to the site code site=Site(name=_station)) # This is the channel code according to the SEED standard. cha = Channel( code=_channel, # This is the location code according to the SEED standard. location_code=_location, # Note that these coordinates can differ from the station coordinates. latitude=lat, longitude=lon, elevation=ele, depth=0.0) # Now tie it all together. # cha.response = response sta.channels.append(cha) net.stations.append(sta) new_inventory.networks.append(net) return new_inventory except ValueError: bad_values = "" if lon < -180 or lon > 180: bad_values = bad_values + "\tlon = " + str(lon) + "\n" if lat < -90 or lat > 90: bad_values = bad_values + "\tlat = " + str(lat) self.errorPopup("There seems to be a value error in " + _network + "." + _station + "." + _channel + "\nPossible bad value(s) are:\n" + bad_values)
from obspy.core.inventory import Inventory, Network, Station, Channel, Site from obspy.clients.nrl.client import NRL # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. inv = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source="ObsPy-Tutorial") net = Network( # This is the network code according to the SEED standard. code="XX", # A list of stations. We'll add one later. stations=[], description="A test stations.", # Start-and end dates are optional. start_date=obspy.UTCDateTime(2016, 1, 2)) sta = Station( # This is the station code according to the SEED standard. code="ABC", latitude=1.0, longitude=2.0, elevation=345.0, creation_date=obspy.UTCDateTime(2016, 1, 2), site=Site(name="First station")) cha = Channel( # This is the channel code according to the SEED standard.
# removing existing ASDF remove(ASDF_out) elif delete_queary == 'no': sys.exit(0) # create the log file ASDF_log_file = open(ASDF_log_out, 'w') # Create/open the ASDF file ds = pyasdf.ASDFDataSet(ASDF_out, compression="gzip-3") # create empty inventory to add all inventories together new_inv = Inventory(networks=[], source="Geoscience Australia AusArray") # create the inventory object for the network net_inv = Network(code=FDSNnetwork[:2]) # dictionary to keep end date/start date for each station station_start_end_dict ={} # dictionary to keep inventory for all stations (default dict) station_inventory_dict = {} # function to create the ASDF waveform ID tag def make_ASDF_tag(tr, tag): # def make_ASDF_tag(ri, tag): data_name = "{net}.{sta}.{loc}.{cha}__{start}__{end}__{tag}".format( net=tr.stats.network, sta=tr.stats.station,
def create_simple_inventory(network, station, latitude=None, longitude=None, elevation=None, depth=None, start_date=None, end_date=None, location_code="S3", channel_code="MX"): """ Create simple inventory with only location information, for ZNE component, especially usefull for synthetic data """ azi_dict = {"MXZ": 0.0, "MXN": 0.0, "MXE": 90.0} dip_dict = {"MXZ": 90.0, "MXN": 0.0, "MXE": 0.0} channel_list = [] if start_date is None: start_date = UTCDateTime(0) # specfem default channel code is MX for _comp in ["Z", "E", "N"]: _chan_code = "%s%s" % (channel_code, _comp) chan = Channel(_chan_code, location_code, latitude=latitude, longitude=longitude, elevation=elevation, depth=depth, azimuth=azi_dict[_chan_code], dip=dip_dict[_chan_code], start_date=start_date, end_date=end_date) channel_list.append(chan) site = Site("N/A") sta = Station(station, latitude=latitude, longitude=longitude, elevation=elevation, channels=channel_list, site=site, creation_date=start_date, total_number_of_channels=3, selected_number_of_channels=3) nw = Network(network, stations=[ sta, ], total_number_of_stations=1, selected_number_of_stations=1) inv = Inventory([ nw, ], source="SPECFEM3D_GLOBE", sender="Princeton", created=UTCDateTime.now()) return inv
def read_fdsn_station_text_file(path_or_file_object): """ Function reading a FDSN station text file to an inventory object. :param path_or_file_object: File name or file like object. """ def _read(obj): r = unicode_csv_reader(obj, delimiter=native_str("|")) header = next(r) header[0] = header[0].lstrip("#") header = [_i.strip().lower() for _i in header] # IRIS currently has a wrong header name. Just map it. header = [_i.replace("instrument", "sensordescription") for _i in header] all_lines = [] for line in r: # Skip comment lines. if line[0].startswith("#"): continue all_lines.append([_i.strip() for _i in line]) return {"header": tuple(header), "content": all_lines} # Enable reading from files and buffers opened in binary mode. if (hasattr(path_or_file_object, "mode") and "b" in path_or_file_object.mode) or \ isinstance(path_or_file_object, io.BytesIO): buf = io.StringIO(path_or_file_object.read().decode("utf-8")) buf.seek(0, 0) path_or_file_object = buf if hasattr(path_or_file_object, "read"): content = _read(path_or_file_object) else: with open(path_or_file_object, "rt", newline="", encoding="utf8") as fh: content = _read(fh) # Figure out the type. if content["header"] == network_components: level = "network" filetypes = network_types elif content["header"] == station_components: level = "station" filetypes = station_types elif content["header"] == channel_components: level = "channel" filetypes = channel_types else: raise ValueError("Unknown type of header.") content = content["content"] converted_content = [] # Convert all types. for line in content: converted_content.append([ v_type(value) for value, v_type in zip(line, filetypes)]) # Now convert to an inventory object. inv = Inventory(networks=[], source=None) if level == "network": for net in converted_content: network = Network( code=net[0], description=net[1], start_date=net[2], end_date=net[3], total_number_of_stations=net[4]) inv.networks.append(network) elif level == "station": networks = collections.OrderedDict() for sta in converted_content: site = Site(name=sta[5]) station = Station( code=sta[1], latitude=sta[2], longitude=sta[3], elevation=sta[4], site=site, start_date=sta[6], end_date=sta[7]) if sta[0] not in networks: networks[sta[0]] = [] networks[sta[0]].append(station) for network_code, stations in networks.items(): net = Network(code=network_code, stations=stations) inv.networks.append(net) elif level == "channel": networks = collections.OrderedDict() stations = collections.OrderedDict() for channel in converted_content: net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \ scale_freq, scale_units, s_r, st, et = channel if net not in networks: networks[net] = Network(code=net) if (net, sta) not in stations: station = Station(code=sta, latitude=lat, longitude=lng, elevation=ele) networks[net].stations.append(station) stations[(net, sta)] = station sensor = Equipment(type=inst) if scale is not None and scale_freq is not None: resp = Response( instrument_sensitivity=InstrumentSensitivity( value=scale, frequency=scale_freq, input_units=scale_units, output_units=None)) else: resp = None try: channel = Channel( code=chan, location_code=loc, latitude=lat, longitude=lng, elevation=ele, depth=dep, azimuth=azi, dip=dip, sensor=sensor, sample_rate=s_r, start_date=st, end_date=et, response=resp) except Exception as e: warnings.warn( "Failed to parse channel %s.%s.%s.%s due to: %s" % ( net, sta, loc, chan, str(e)), UserWarning) continue stations[(net, sta)].channels.append(channel) inv.networks.extend(list(networks.values())) else: # Cannot really happen - just a safety measure. raise NotImplementedError("Unknown level: %s" % str(level)) return inv
def main(argv): '''@package isc2stnxml It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code. When proper network code can not be identified the program just guess it, sorry... ''' inv = read_inventory("IRIS-ALL.xml") # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE default_net = 'GE' ehb1 = read_eng('BMG.STN') ehb2 = read_eng('ISC.STN') ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0) isc1 = read_isc('ehb.stn') isc2 = read_isc('iscehb.stn') isc = np.unique(np.vstack((isc1, isc2)), axis=0) catalogue = [] our_xml = Inventory(networks=[], source='EHB') for i in xrange(ehb.shape[0]): filed = False xml = False stn_found = isc[isc[:, 0] == ehb[i, 0], :] min_dist = 10e10 if stn_found.shape[0] > 0: if stn_found.shape[0] > 1: for j in xrange(stn_found.shape[0]): dist = locations2degrees(np.float(stn_found[j, 2]), np.float(stn_found[j, 3]), np.float(ehb[i, 1]), np.float(ehb[i, 2])) if dist < min_dist: min_dist = dist record = stn_found[j, :] else: min_dist = locations2degrees(np.float(stn_found[0, 2]), np.float(stn_found[0, 3]), np.float(ehb[i, 1]), np.float(ehb[i, 2])) record = stn_found[0, :] # Now we try to find the same station in XML file # if min_dist > 1. or stn_found.shape[0]==0: xstn_found = inv.select(station=ehb[i, 0], channel="*HZ") if len(stn_found) == 0 and len(xstn_found) == 0: # we filed to find station anywhere and assign dummy values record = [ ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z', '1964-1-1 00:00:00', '2599-12-31 23:59:59' ] min_dist = 0. filed = True else: # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC if len(xstn_found) > 0: # print "----------",len(xstn_found) # print xstn_found[0][0].latitude min_dist = min_dist + 0.1 for j in xrange(len(xstn_found)): dist = locations2degrees(xstn_found[j][0].latitude, xstn_found[j][0].longitude, np.float(ehb[i, 1]), np.float(ehb[i, 2])) if min_dist > dist: min_dist = dist record = xstn_found[j] # print record xml = True # last defence if stations have been done but distance between declared and found locations are more than 1 degree if min_dist > 1: record = [ ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z', '1964-1-1 00:00:00', '2599-12-31 23:59:59' ] filed = True if xml: #our_xml.networks.append(record) xml = False else: if filed: if len(record[7]) < 5: record[7] = '2599-12-31 23:59:59' catalogue.append(record) else: stn_found = isc[(isc[:, 0] == record[0]) & (isc[:, 1] == record[1]), :] for k in xrange(stn_found.shape[0]): net = Network(code=stn_found[k, 1], stations=[], description=' ') if len(stn_found[k, 7]) < 5: stn_found[k, 7] = '2599-12-31 23:59:59' catalogue.append(stn_found[k, :]) stn_found = np.unique(np.array(catalogue), axis=0) if len(stn_found[stn_found == '']) > 0 or len( stn_found[stn_found == ' ']) > 0: print "Some elements are empty, check the list" # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten lost = [] for j in xrange(isc.shape[0]): # is there any common station name? common_st = stn_found[isc[j, 0] == stn_found[:, 0]] if common_st.shape[0] > 0: # is network code the same? common_net = common_st[common_st[:, 1] == isc[j, 1]] if common_net.shape[0] < 1: # ok we found forgotten one, check the XML if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0: # Bingo... lost.append(isc[j, :]) else: if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0: # Bingo... lost.append(isc[j, :]) stn_found = np.vstack((stn_found, np.array(lost))) for k in xrange(stn_found.shape[0]): net = Network(code=stn_found[k, 1], stations=[], description=' ') if len(stn_found[k, 7]) < 5: stn_found[k, 7] = '2599-12-31 23:59:59' catalogue.append(stn_found[k, :]) sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \ termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \ site=Site(name=' '), \ latitude=np.float(stn_found[k,2]), \ longitude=np.float(stn_found[k,3]), \ elevation=np.float(stn_found[k,4])) cha=Channel(code=stn_found[k,5], \ depth=0., \ azimuth=0., \ dip=-90., \ location_code='', \ latitude=np.float(stn_found[k,2]), \ longitude=np.float(stn_found[k,3]), \ elevation=np.float(stn_found[k,4])) sta.channels.append(cha) net.stations.append(sta) our_xml.networks.append(net) # print 'np',stn_found[k,:] our_xml.write("station.xml", format="stationxml", validate=True) our_xml.write("station.txt", format="stationtxt")
def test_reading_station_file(self): """ Test reading a file at the station level. """ # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network( code="TA", stations=[ Station( code="A04A", latitude=48.7197, longitude=-122.707, elevation=23.0, site=Site(name="Legoe Bay, Lummi Island, WA, USA"), start_date=obspy.UTCDateTime( "2004-09-19T00:00:00"), end_date=obspy.UTCDateTime("2008-02-19T23:59:59")), Station( code="A04D", latitude=48.7201, longitude=-122.7063, elevation=13.0, site=Site(name="Lummi Island, WA, USA"), start_date=obspy.UTCDateTime( "2010-08-18T00:00:00"), end_date=obspy.UTCDateTime("2599-12-31T23:59:59")) ]), Network( code="TR", stations=[ Station( code="ALNG", latitude=10.1814, longitude=-61.6883, elevation=10.0, site=Site(name="Trinidad, Point Fortin"), start_date=obspy.UTCDateTime( "2000-01-01T00:00:00"), end_date=obspy.UTCDateTime("2599-12-31T23:59:59")) ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "station_level_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def main(argv): with open("IRIS-ALL.xml", 'r', buffering=1024 * 1024) as f: inv = read_inventory(f) # if os.path.exists("IRIS-ALL.pkl"): # doesn't work on CentOS for some reason # with open('IRIS-ALL.pkl', 'rb') as f: # import cPickle as pkl # inv = pkl.load(f) # else: # inv = read_inventory("IRIS-ALL.xml") # with open('IRIS-ALL.pkl', 'wb') as f: # import pickle as pkl # pkl.dump(inv, f, pkl.HIGHEST_PROTOCOL) sensorDict, responseDict = extract_unique_sensors_responses(inv) print('\nFound {0} response objects with keys: {1}'.format( len(responseDict.keys()), responseDict.keys())) # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE default_net = 'GE' ehb1 = read_eng('BMG.STN') ehb2 = read_eng('ISC.STN') ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0) isc1 = read_isc('ehb.stn') isc2 = read_isc('iscehb.stn') isc = np.unique(np.vstack((isc1, isc2)), axis=0) catalogue = [] for i in xrange(ehb.shape[0]): filed = False xml = False stn_found = isc[isc[:, 0] == ehb[i, 0], :] min_dist = 10e10 if stn_found.shape[0] > 0: if stn_found.shape[0] > 1: for j in xrange(stn_found.shape[0]): dist = locations2degrees(np.float(stn_found[j, 2]), np.float(stn_found[j, 3]), np.float(ehb[i, 1]), np.float(ehb[i, 2])) if dist < min_dist: min_dist = dist record = stn_found[j, :] else: min_dist = locations2degrees(np.float(stn_found[0, 2]), np.float(stn_found[0, 3]), np.float(ehb[i, 1]), np.float(ehb[i, 2])) record = stn_found[0, :] # Now we try to find the same station in XML file # if min_dist > 1. or stn_found.shape[0]==0: xstn_found = inv.select(station=ehb[i, 0], channel="*HZ") if len(stn_found) == 0 and len(xstn_found) == 0: # we failed to find station anywhere and assign dummy values record = [ ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00', '2599-12-31 23:59:59' ] min_dist = 0. filed = True else: # if station is found somewhere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC if len(xstn_found) > 0: # print "----------",len(xstn_found) # print xstn_found[0][0].latitude min_dist = min_dist + 0.1 for j in xrange(len(xstn_found)): dist = locations2degrees(xstn_found[j][0].latitude, xstn_found[j][0].longitude, np.float(ehb[i, 1]), np.float(ehb[i, 2])) if min_dist > dist: min_dist = dist record = xstn_found[j] # print record xml = True # last defence if stations have been done but distance between declared and found locations are more than 1 degree if min_dist > 1: record = [ ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00', '2599-12-31 23:59:59' ] filed = True if xml: xml = False else: if filed: if len(record[7]) < 5: record[7] = '2599-12-31 23:59:59' catalogue.append( record) # Alexei: should be extend, not append else: stn_found = isc[(isc[:, 0] == record[0]) & (isc[:, 1] == record[1]), :] for k in xrange(stn_found.shape[0]): net = Network(code=stn_found[k, 1], stations=[], description=' ') if len(stn_found[k, 7]) < 5: stn_found[k, 7] = '2599-12-31 23:59:59' catalogue.append(stn_found[ k, :]) # Alexei: should be extend, not append stn_found = np.unique(np.array(catalogue), axis=0) if len(stn_found[stn_found == '']) > 0 or len( stn_found[stn_found == ' ']) > 0: print "Some elements are empty, check the list" # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten lost = [] for j in xrange(isc.shape[0]): # is there any common station name? common_st = stn_found[isc[j, 0] == stn_found[:, 0]] if common_st.shape[0] > 0: # is network code the same? common_net = common_st[common_st[:, 1] == isc[j, 1]] if common_net.shape[0] < 1: # ok we found forgotten one, check the XML if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0: # Bingo... lost.append(isc[j, :]) else: if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0: # Bingo... lost.append(isc[j, :]) stn_found = np.vstack((stn_found, np.array(lost))) netDict = defaultdict(list) for k in xrange(stn_found.shape[0]): result = inv.select(network=stn_found[k, 1]) if (len(result.networks)): net = result.networks[0] net.stations = [] else: net = Network(code=stn_found[k, 1], stations=[], description=' ') # print stn_found[k, 1] if len(stn_found[k, 7]) < 5: stn_found[k, 7] = '2599-12-31 23:59:59' catalogue.append(stn_found[k, :]) sta = Station(code=stn_found[k, 0], creation_date=utcdatetime.UTCDateTime(stn_found[k, 6]), \ termination_date=utcdatetime.UTCDateTime(stn_found[k, 7]), \ site=Site(name=' '), \ latitude=np.float(stn_found[k, 2]), \ longitude=np.float(stn_found[k, 3]), \ elevation=np.float(stn_found[k, 4])) if (stn_found[k, 5] in responseDict.keys()): r = responseDict[stn_found[k, 5]] cha = Channel(code=stn_found[k, 5], \ depth=0., \ azimuth=0., \ dip=-90., \ location_code='', \ latitude=np.float(stn_found[k, 2]), \ longitude=np.float(stn_found[k, 3]), \ elevation=np.float(stn_found[k, 4]), \ # sensor=sensorDict[stn_found[k,5]], \ response=r) sta.channels.append(cha) if (type(netDict[stn_found[k, 1]]) == Network): netDict[stn_found[k, 1]].stations.append(sta) else: net.stations.append(sta) netDict[stn_found[k, 1]] = net # print 'np',stn_found[k,:] # end if our_xml = Inventory(networks=netDict.values(), source='EHB') print 'Writing output files..' output_folder = "output_old" pathlib.Path(output_folder).mkdir(exist_ok=True) for net in our_xml.networks: currInv = Inventory(networks=[net], source='EHB') fname = "network_{0}.xml".format(net.code) try: currInv.write(os.path.join(output_folder, fname), format="stationxml", validate=True) except Exception as e: print("FAILED writing file {0} for network {1}, continuing".format( fname, net.code)) continue # our_xml.write("station.xml",format="stationxml", validate=True) our_xml.write("station.txt", format="stationtxt")
def test_reading_unicode_file(self): """ Tests reading a file with non ASCII characters. """ # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network( code="PR", stations=[ Station( code="CTN1", latitude=18.43718, longitude=-67.1303, elevation=10.0, site=Site(name="CATA¿O DEFENSA CIVIL"), start_date=obspy.UTCDateTime( "2004-01-27T00:00:00"), end_date=obspy.UTCDateTime("2599-12-31T23:59:59")) ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "unicode_example_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def get_inventory(): # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. inv = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source="ObsPy-Tutorial") net = Network( # This is the network code according to the SEED standard. code="US", # A list of stations. We'll add one later. stations=[], description="A test stations.", # Start-and end dates are optional. start_date=UTCDateTime(2016, 1, 2)) sta = Station( # This is the station code according to the SEED standard. code="ABCD", latitude=1.0, longitude=2.0, elevation=345.0, creation_date=UTCDateTime(2016, 1, 2), site=Site(name="First station")) cha1 = Channel( # This is the channel code according to the SEED standard. code="HN1", # This is the location code according to the SEED standard. location_code="11", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=0.0, dip=-90.0, sample_rate=1) cha2 = Channel( # This is the channel code according to the SEED standard. code="HN2", # This is the location code according to the SEED standard. location_code="11", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=90.0, dip=-90.0, sample_rate=1) cha3 = Channel( # This is the channel code according to the SEED standard. code="HNZ", # This is the location code according to the SEED standard. location_code="11", # Note that these coordinates can differ from the station coordinates. latitude=1.0, longitude=2.0, elevation=345.0, depth=10.0, azimuth=0.0, dip=-90.0, sample_rate=1) # Now tie it all together. sta.channels.append(cha1) sta.channels.append(cha2) sta.channels.append(cha3) net.stations.append(sta) inv.networks.append(net) return inv
def test_write_stationtxt(self): """ Test writing stationtxt at channel level """ # Manually create a test Inventory object. resp_1 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.02, input_units="M/S", output_units=None, value=8.48507E8)) resp_2 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=1.0, input_units="M/S**2", output_units=None, value=53435.4)) resp_3 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=6.27252E8)) test_inv = Inventory( source=None, networks=[ Network( code="IU", start_date=obspy.UTCDateTime("1988-01-01T00:00:00"), end_date=obspy.UTCDateTime("2500-12-31T23:59:59"), total_number_of_stations=1, description="Global Seismograph Network (GSN - IRIS/USGS)", stations=[ Station(code="ANMO", latitude=34.9459, longitude=-106.4572, elevation=1850.0, channels=[ Channel(code="BCI", location_code="", latitude=34.9459, longitude=-106.4572, elevation=1850.0, depth=100.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( description= "Geotech KS-36000-I Borehole " "Seismometer"), start_date=obspy.UTCDateTime( "1989-08-29T00:00:00"), end_date=obspy.UTCDateTime( "1995-02-01T00:00:00"), response=resp_1), Channel( code="LNZ", location_code="20", latitude=34.9459, longitude=-106.4572, elevation=1820.7, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=0.0, sensor=Equipment( description="Titan Accelerometer"), start_date=obspy.UTCDateTime( "2013-06-20T16:30:00"), response=resp_2), ]), ]), Network( code="6E", start_date=obspy.UTCDateTime("2013-01-01T00:00:00"), end_date=obspy.UTCDateTime("2016-12-31T23:59:59"), total_number_of_stations=1, description="Wabash Valley Seismic Zone", stations=[ Station( code="SH01", latitude=37.7457, longitude=-88.1368, elevation=126.0, channels=[ Channel( code="LOG", location_code="", latitude=37.7457, longitude=-88.1368, elevation=126.0, depth=0.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( description="Reftek 130 Datalogger"), start_date=obspy.UTCDateTime( "2013-11-23T00:00:00"), end_date=obspy.UTCDateTime( "2016-12-31T23:59:59"), response=resp_3) ]), ]) ]) # CHANNEL level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="CHANNEL") # check contents content = stio.getvalue() expected = [ ("Network|Station|Location|Channel|Latitude|Longitude|" "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime"), ("IU|ANMO||BCI|34.9459|-106.4572|1850.0|100.0|0.0|" "0.0|Geotech KS-36000-I Borehole Seismometer|" "848507000.0|0.02|M/S|0.0|1989-08-29T00:00:00|" "1995-02-01T00:00:00"), ("IU|ANMO|20|LNZ|34.9459|-106.4572|1820.7|0.0|0.0|" "-90.0|Titan Accelerometer|53435.4|1.0|M/S**2|0.0|" "2013-06-20T16:30:00|"), ("6E|SH01||LOG|37.7457|-88.1368|126.0|0.0|0.0|0.0|" "Reftek 130 Datalogger|627252000.0|0.03|M/S|0.0|" "2013-11-23T00:00:00|2016-12-31T23:59:59"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected)) # STATION level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="STATION") # check contents content = stio.getvalue() expected = [ ("Network|Station|Latitude|Longitude|" "Elevation|SiteName|StartTime|EndTime"), ("IU|ANMO|34.9459|-106.4572|1850.0||"), ("6E|SH01|37.7457|-88.1368|126.0||"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected)) # NETWORK level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="NETWORK") # check contents content = stio.getvalue() expected = [ ("Network|Description|StartTime|EndTime|TotalStations"), ("IU|Global Seismograph Network (GSN - IRIS/USGS)|" "1988-01-01T00:00:00|2500-12-31T23:59:59|1"), ("6E|Wabash Valley Seismic Zone|" "2013-01-01T00:00:00|2016-12-31T23:59:59|1"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected))
import obspy from obspy.core.inventory import Inventory, Network, Station, Channel, Site from obspy.core.inventory.util import Equipment import pandas as pd survey_csv = r"c:\Users\jpeacock\Documents\imush\Archive\survey_summary.csv" survey_df = pd.read_csv(survey_csv) # We'll first create all the various objects. These strongly follow the # hierarchy of StationXML files. inv = Inventory(networks=[], source="MT Test") net = Network( code="MT", # A list of stations. We'll add one later. stations=[], description="Test stations.", # Start-and end dates are optional. start_date=obspy.UTCDateTime(2016, 1, 2), ) inv.networks.append(net) for row, station_df in survey_df.iterrows(): sta = Station( code=station_df["siteID"], latitude=station_df["lat"], longitude=station_df["lon"], elevation=station_df["nm_elev"], creation_date=obspy.UTCDateTime(2016, 1, 2), site=Site(name=station_df["siteID"]), )
created=obspy.UTCDateTime(datetime.today())) # By default this accesses the NRL online nrl = NRL() # Loop to construct the filled-Inventory Networks = bank['Networks'] for network in bank['Networks'].keys(): # Write network to the Inventory _net_end_date = Networks[network]["To"] if _net_end_date == "None": _net_end_date = literal_eval(_net_end_date) else: _net_end_date = obspy.UTCDateTime(_net_end_date) _network = Network(code=network, start_date=obspy.UTCDateTime(Networks[network]["From"]), end_date=_net_end_date, description=Networks[network]["_description"]) inv.networks.append(_network) Stations = bank['Networks'][network]['Stations'] for station in Stations.keys(): print('---\n' + station + '\n') try: # Write the station to the Inventory _end_date = Stations[station]["To"] if _end_date == "None": _end_date = literal_eval(_end_date) else: _end_date = obspy.UTCDateTime(_end_date)
def df_to_inventory(df) -> obspy.Inventory: """ Create a station inventory from a dataframe. Parameters ---------- df A dataframe which must have the same columns as the once produced by :func:`obsplus.stations_to_df`. Notes ----- The dataframe can also contain columns named "sensor_keys" and "datalogger_keys" which will indicate the response information should be fetched suing obspy's ability to interact with the nominal response library. Each of these columns should either contain tuples or strings where the keys are separated by double underscores (__). """ def _make_key_mappings(cls): """ Create a mapping from columns in df to kwargs for cls. """ base_params = set(inspect.signature(cls).parameters) new_map = mapping_keys[cls] base_map = {x: x for x in base_params - set(new_map)} base_map.update(new_map) return base_map def _groupby_if_exists(df, columns): """ Groupby columns if they exist on dataframe, else return empty. """ cols = list(obsplus.utils.iterate(columns)) if not set(cols).issubset(df.columns): return # copy df and set missing start/end times to reasonable values # this is needed so they get included in a groupby df = df.copy() isnan = df.isna() default_start = pd.Timestamp(SMALLDT64) default_end = pd.Timestamp(LARGEDT64) if "start_date" in columns: df["start_date"] = df["start_date"].fillna(default_start) if "end_date" in columns: df["end_date"] = df["end_date"].fillna(default_end) for ind, df_sub in df.groupby(cols): # replace NaN values if isnan.any().any(): df_sub[isnan.loc[df_sub.index]] = np.nan yield ind, df_sub def _get_kwargs(series, key_mapping): """ create the kwargs from a series and key mapping. """ out = {} for k, v in key_mapping.items(): # skip if requested kwarg is not in the series if v not in series: continue value = series[v] value = value if not pd.isnull(value) else None # if the type needs to be cast to something else if k in type_mappings and value is not None: value = type_mappings[k](value) out[k] = value return out @lru_cache() def get_nrl(): """ Initiate a nominal response library object. """ from obspy.clients.nrl import NRL return NRL() @lru_cache() def get_response(datalogger_keys, sensor_keys): nrl = get_nrl() kwargs = dict(datalogger_keys=datalogger_keys, sensor_keys=sensor_keys) return nrl.get_response(**kwargs) def _get_resp_key(key): """ Get response keys from various types. """ if isinstance(key, str) or key is None: return tuple((key or "").split("__")) else: return tuple(key) def _maybe_add_response(series, channel_kwargs): """ Maybe add the response information if required columns exist. """ # bail out of required columns do not exist if not {"sensor_keys", "datalogger_keys"}.issubset(set(series.index)): return # determine if both required columns are populated, else bail out sensor_keys = _get_resp_key(series["sensor_keys"]) datalogger_keys = _get_resp_key(series["datalogger_keys"]) if not (sensor_keys and datalogger_keys): return # at this point all the required info for resp lookup should be there channel_kwargs["response"] = get_response(datalogger_keys, sensor_keys) # Deal with pandas dtype weirdness # TODO remove this when custom column functions are supported by DataFrame # Extractor (part of the big refactor in #131) for col in NSLC: df[col] = df[col].astype(str).str.replace(".0", "") # first get key_mappings net_map = _make_key_mappings(Network) sta_map = _make_key_mappings(Station) cha_map = _make_key_mappings(Channel) # next define columns groupbys should be performed on net_columns = ["network"] sta_columns = ["station", "start_date", "end_date"] cha_columns = ["channel", "location", "start_date", "end_date"] # Ensure input is a dataframe df = obsplus.stations_to_df(df) # Iterate networks and create stations networks = [] for net_code, net_df in _groupby_if_exists(df, net_columns): stations = [] for st_code, sta_df in _groupby_if_exists(net_df, sta_columns): channels = [] for ch_code, ch_df in _groupby_if_exists(sta_df, cha_columns): chan_series = ch_df.iloc[0] kwargs = _get_kwargs(chan_series, cha_map) # try to add the inventory _maybe_add_response(chan_series, kwargs) channels.append(Channel(**kwargs)) kwargs = _get_kwargs(sta_df.iloc[0], sta_map) stations.append(Station(channels=channels, **kwargs)) kwargs = _get_kwargs(net_df.iloc[0], net_map) networks.append(Network(stations=stations, **kwargs)) return obspy.Inventory(networks=networks, source=f"ObsPlus_v{obsplus.__version__}")
def test_get_response(self): response_n1_s1 = Response('RESPN1S1') response_n1_s2 = Response('RESPN1S2') response_n2_s1 = Response('RESPN2S1') channels_n1_s1 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s1) ] channels_n1_s2 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s2) ] channels_n2_s1 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n2_s1) ] stations_1 = [ Station(code='N1S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s1), Station(code='N1S2', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s2) ] stations_2 = [ Station(code='N2S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n2_s1) ] networks = [ Network('N1', stations=stations_1), Network('N2', stations=stations_2) ] inv = Inventory(networks=networks, source='TEST') response = inv.get_response('N1.N1S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s1) response = inv.get_response('N1.N1S2..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s2) response = inv.get_response('N2.N2S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n2_s1)
channel='LH2').networks[0].stations[0].channels[0].copy() l3 = inv.select( station=ista.code, channel='LH3').networks[0].stations[0].channels[0].copy() l2.code = 'LHE' l2.azimuth = Azimuth(90.0) l3.code = 'LHN' l3.azimuth = Azimuth(0.0) sta.channels.append(l2) sta.channels.append(l3) YJ_net.stations.append(sta) inv_all.networks.append(YJ_net) # EN E_net = Network(code='EN',stations=[],\ description='ENAP from Rodrigo',\ start_date=UTCDateTime(2019,3,1)) E_file = glob('seed/dataless/ENAP*.xml') for f in E_file: nsta = read_inventory(f) for ch in nsta.networks[0].stations[0].channels: ch.code = 'L' + ch.code[1:] E_net.stations.append(nsta.networks[0].stations[0]) inv_all.networks.append(E_net) esta = read_inventory('seed/dataless/ENAP-ANMA.EN.xml') for ch in esta.networks[0].stations[0].channels: ch.code = 'L' + ch.code[1:] inv_all.networks[-1].stations.append(esta.networks[0].stations[0]) # write everything!