def _channel_from_stats(stats): if stats.standard.units in UNITS: units = UNITS[stats.standard.units] else: units = '' instrument = stats.standard.instrument serialnum = stats.standard.sensor_serial_number if len(instrument) or len(serialnum): equipment = Equipment(type=instrument, serial_number=serialnum) else: equipment = None depth = 0.0 azimuth = None c1 = 'horizontal_orientation' in stats.standard c2 = c1 and not np.isnan(stats.standard.horizontal_orientation) if c2: azimuth = stats.standard.horizontal_orientation else: azimuth = 0 if not (azimuth >= 0 and azimuth <= 360): azimuth = 0 response = None if 'response' in stats: response = stats['response'] else: # we may have instrument sensitivity... frequency = 1 / stats['standard']['instrument_period'] units = stats.standard.units if not np.isnan(stats['standard']['instrument_sensitivity']): sens = stats['standard']['instrument_sensitivity'] else: sens = 1.0 sensitivity = InstrumentSensitivity(sens, frequency=frequency, input_units=units, output_units='COUNTS') response = Response(instrument_sensitivity=sensitivity) comments = Comment(stats.standard.comments) logging.debug('channel: %s' % stats.channel) channel = Channel(stats.channel, stats.location, stats.coordinates['latitude'], stats.coordinates['longitude'], stats.coordinates['elevation'], depth, azimuth=azimuth, sample_rate=stats.sampling_rate, storage_format=stats.standard.source_format, calibration_units=units, comments=[comments], response=response, sensor=equipment) return channel
def _channel_from_stats(stats): if stats.standard.units in UNITS: units = UNITS[stats.standard.units] else: units = "" instrument = stats.standard.instrument serialnum = stats.standard.sensor_serial_number if len(instrument) or len(serialnum): equipment = Equipment(type=instrument, serial_number=serialnum) else: equipment = None depth = 0.0 azimuth = None c1 = "horizontal_orientation" in stats.standard c2 = c1 and not np.isnan(stats.standard.horizontal_orientation) if c2: azimuth = stats.standard.horizontal_orientation else: azimuth = 0 if not (azimuth >= 0 and azimuth <= 360): azimuth = 0 response = None if "response" in stats: response = stats["response"] else: # we may have instrument sensitivity... frequency = 1 / stats["standard"]["instrument_period"] units = stats.standard.units if not np.isnan(stats["standard"]["instrument_sensitivity"]): sens = stats["standard"]["instrument_sensitivity"] else: sens = 1.0 sensitivity = InstrumentSensitivity(sens, frequency=frequency, input_units=units, output_units="COUNTS") response = Response(instrument_sensitivity=sensitivity) comments = Comment(stats.standard.comments) logging.debug(f"channel: {stats.channel}") channel = Channel( stats.channel, stats.location, stats.coordinates["latitude"], stats.coordinates["longitude"], stats.coordinates["elevation"], depth, azimuth=azimuth, sample_rate=stats.sampling_rate, calibration_units=units, comments=[comments], response=response, sensor=equipment, ) return channel
def _channel_from_stats(stats): if stats.standard.units in UNITS: units = UNITS[stats.standard.units] else: units = '' instrument = stats.standard.instrument serialnum = stats.standard.sensor_serial_number if len(instrument) or len(serialnum): equipment = Equipment(type=instrument, serial_number=serialnum) else: equipment = None depth = 0.0 azimuth = None c1 = 'horizontal_orientation' in stats.standard c2 = c1 and not np.isnan(stats.standard.horizontal_orientation) if c2: azimuth = stats.standard.horizontal_orientation else: azimuth = 0 response = None if 'response' in stats: response = stats['response'] comments = Comment(stats.standard.comments) logging.debug('channel: %s' % stats.channel) channel = Channel(stats.channel, stats.location, stats.coordinates['latitude'], stats.coordinates['longitude'], stats.coordinates['elevation'], depth, azimuth=azimuth, sample_rate=stats.sampling_rate, storage_format=stats.standard.source_format, calibration_units=units, comments=[comments], response=response, sensor=equipment) return channel
def read_fdsn_station_text_file(path_or_file_object): """ Function reading a FDSN station text file to an inventory object. :param path_or_file_object: File name or file like object. """ def _read(obj): r = unicode_csv_reader(obj, delimiter=native_str("|")) header = next(r) header[0] = header[0].lstrip("#") header = [_i.strip().lower() for _i in header] # IRIS currently has a wrong header name. Just map it. header = [ _i.replace("instrument", "sensordescription") for _i in header ] all_lines = [] for line in r: # Skip comment lines. if line[0].startswith("#"): continue all_lines.append([_i.strip() for _i in line]) return {"header": tuple(header), "content": all_lines} # Enable reading from files and buffers opened in binary mode. if (hasattr(path_or_file_object, "mode") and "b" in path_or_file_object.mode) or \ isinstance(path_or_file_object, io.BytesIO): buf = io.StringIO(path_or_file_object.read().decode("utf-8")) buf.seek(0, 0) path_or_file_object = buf if hasattr(path_or_file_object, "read"): content = _read(path_or_file_object) else: with open(path_or_file_object, "rt", newline="", encoding="utf8") as fh: content = _read(fh) # Figure out the type. if content["header"] == network_components: level = "network" filetypes = network_types elif content["header"] == station_components: level = "station" filetypes = station_types elif content["header"] == channel_components: level = "channel" filetypes = channel_types else: raise ValueError("Unknown type of header.") content = content["content"] converted_content = [] # Convert all types. for line in content: converted_content.append( [v_type(value) for value, v_type in zip(line, filetypes)]) # Now convert to an inventory object. inv = Inventory(networks=[], source=None) if level == "network": for net in converted_content: network = Network(code=net[0], description=net[1], start_date=net[2], end_date=net[3], total_number_of_stations=net[4]) inv.networks.append(network) elif level == "station": networks = collections.OrderedDict() for sta in converted_content: site = Site(name=sta[5]) station = Station(code=sta[1], latitude=sta[2], longitude=sta[3], elevation=sta[4], site=site, start_date=sta[6], end_date=sta[7]) if sta[0] not in networks: networks[sta[0]] = [] networks[sta[0]].append(station) for network_code, stations in networks.items(): net = Network(code=network_code, stations=stations) inv.networks.append(net) elif level == "channel": networks = collections.OrderedDict() stations = collections.OrderedDict() for channel in converted_content: net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \ scale_freq, scale_units, s_r, st, et = channel if net not in networks: networks[net] = Network(code=net) if (net, sta) not in stations: station = Station(code=sta, latitude=lat, longitude=lng, elevation=ele) networks[net].stations.append(station) stations[(net, sta)] = station sensor = Equipment(type=inst) if scale is not None and scale_freq is not None: resp = Response(instrument_sensitivity=InstrumentSensitivity( value=scale, frequency=scale_freq, input_units=scale_units, output_units=None)) else: resp = None try: channel = Channel(code=chan, location_code=loc, latitude=lat, longitude=lng, elevation=ele, depth=dep, azimuth=azi, dip=dip, sensor=sensor, sample_rate=s_r, start_date=st, end_date=et, response=resp) except Exception as e: warnings.warn( "Failed to parse channel %s.%s.%s.%s due to: %s" % (net, sta, loc, chan, str(e)), UserWarning) continue stations[(net, sta)].channels.append(channel) inv.networks.extend(list(networks.values())) else: # Cannot really happen - just a safety measure. raise NotImplementedError("Unknown level: %s" % str(level)) return inv
def test_channel_str(self): """ Tests the __str__ method of the channel object. """ c = Channel(code="BHE", location_code="10", latitude=1, longitude=2, elevation=3, depth=4, azimuth=5, dip=6) assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n") # Adding channel types. c.types = ["A", "B"] assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n") # Adding channel types. c.sample_rate = 10.0 assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n" "\tSampling Rate: 10.00 Hz\n") # "Adding" response c.response = True assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n" "\tSampling Rate: 10.00 Hz\n" "\tResponse information available") # Adding an empty sensor. c.sensor = Equipment(type=None) assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n" "\tSampling Rate: 10.00 Hz\n" "\tSensor (Description): None (None)\n" "\tResponse information available") # Adding a sensor with only a type. c.sensor = Equipment(type="random") assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n" "\tSampling Rate: 10.00 Hz\n" "\tSensor (Description): random (None)\n" "\tResponse information available") # Adding a sensor with only a description c.sensor = Equipment(description="some description") assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n" "\tSampling Rate: 10.00 Hz\n" "\tSensor (Description): None (some description)\n" "\tResponse information available") # Adding a sensor with type and description c.sensor = Equipment(type="random", description="some description") assert str(c) == ( "Channel 'BHE', Location '10' \n" "\tTime range: -- - --\n" "\tLatitude: 1.00, Longitude: 2.00, Elevation: 3.0 m, " "Local Depth: 4.0 m\n" "\tAzimuth: 5.00 degrees from north, clockwise\n" "\tDip: 6.00 degrees down from horizontal\n" "\tChannel types: A, B\n" "\tSampling Rate: 10.00 Hz\n" "\tSensor (Description): random (some description)\n" "\tResponse information available")
def test_write_stationtxt(self): """ Test writing stationtxt at channel level """ # Manually create a test Inventory object. resp_1 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.02, input_units="M/S", output_units=None, value=8.48507E8)) resp_2 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=1.0, input_units="M/S**2", output_units=None, value=53435.4)) resp_3 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=6.27252E8)) test_inv = Inventory( source=None, networks=[ Network( code="IU", start_date=obspy.UTCDateTime("1988-01-01T00:00:00"), end_date=obspy.UTCDateTime("2500-12-31T23:59:59"), total_number_of_stations=1, description="Global Seismograph Network (GSN - IRIS/USGS)", stations=[ Station(code="ANMO", latitude=34.9459, longitude=-106.4572, elevation=1850.0, channels=[ Channel(code="BCI", location_code="", latitude=34.9459, longitude=-106.4572, elevation=1850.0, depth=100.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( description= "Geotech KS-36000-I Borehole " "Seismometer"), start_date=obspy.UTCDateTime( "1989-08-29T00:00:00"), end_date=obspy.UTCDateTime( "1995-02-01T00:00:00"), response=resp_1), Channel( code="LNZ", location_code="20", latitude=34.9459, longitude=-106.4572, elevation=1820.7, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=0.0, sensor=Equipment( description="Titan Accelerometer"), start_date=obspy.UTCDateTime( "2013-06-20T16:30:00"), response=resp_2), ]), ]), Network( code="6E", start_date=obspy.UTCDateTime("2013-01-01T00:00:00"), end_date=obspy.UTCDateTime("2016-12-31T23:59:59"), total_number_of_stations=1, description="Wabash Valley Seismic Zone", stations=[ Station( code="SH01", latitude=37.7457, longitude=-88.1368, elevation=126.0, channels=[ Channel( code="LOG", location_code="", latitude=37.7457, longitude=-88.1368, elevation=126.0, depth=0.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( description="Reftek 130 Datalogger"), start_date=obspy.UTCDateTime( "2013-11-23T00:00:00"), end_date=obspy.UTCDateTime( "2016-12-31T23:59:59"), response=resp_3) ]), ]) ]) # CHANNEL level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="CHANNEL") # check contents content = stio.getvalue() expected = [ ("Network|Station|Location|Channel|Latitude|Longitude|" "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime"), ("IU|ANMO||BCI|34.9459|-106.4572|1850.0|100.0|0.0|" "0.0|Geotech KS-36000-I Borehole Seismometer|" "848507000.0|0.02|M/S|0.0|1989-08-29T00:00:00|" "1995-02-01T00:00:00"), ("IU|ANMO|20|LNZ|34.9459|-106.4572|1820.7|0.0|0.0|" "-90.0|Titan Accelerometer|53435.4|1.0|M/S**2|0.0|" "2013-06-20T16:30:00|"), ("6E|SH01||LOG|37.7457|-88.1368|126.0|0.0|0.0|0.0|" "Reftek 130 Datalogger|627252000.0|0.03|M/S|0.0|" "2013-11-23T00:00:00|2016-12-31T23:59:59"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected)) # STATION level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="STATION") # check contents content = stio.getvalue() expected = [ ("Network|Station|Latitude|Longitude|" "Elevation|SiteName|StartTime|EndTime"), ("IU|ANMO|34.9459|-106.4572|1850.0||"), ("6E|SH01|37.7457|-88.1368|126.0||"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected)) # NETWORK level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="NETWORK") # check contents content = stio.getvalue() expected = [ ("Network|Description|StartTime|EndTime|TotalStations"), ("IU|Global Seismograph Network (GSN - IRIS/USGS)|" "1988-01-01T00:00:00|2500-12-31T23:59:59|1"), ("6E|Wabash Valley Seismic Zone|" "2013-01-01T00:00:00|2016-12-31T23:59:59|1"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected))
def test_reading_channel_without_response_info(self): """ Test reading a file at the channel level with missing scale, scale frequency and units. This is common for the log channel of instruments. """ # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network(code="6E", stations=[ Station(code="SH01", latitude=37.7457, longitude=-88.1368, elevation=126.0, channels=[ Channel( code="LOG", location_code="", latitude=37.7457, longitude=-88.1368, elevation=126.0, depth=0.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( type="Reftek 130 Datalogger"), start_date=obspy.UTCDateTime( "2013-11-23T00:00:00"), end_date=obspy.UTCDateTime( "2016-12-31T23:59:59")) ]), ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "log_channel_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def test_reading_channel_file(self): """ Test reading a file at the channel level. """ resp_1 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.02, input_units="M/S", output_units=None, value=4.88233E8)) resp_2 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=4.98112E8)) resp_3 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=6.27252E8)) # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network( code="AK", stations=[ Station( code="BAGL", latitude=60.4896, longitude=-142.0915, elevation=1470, channels=[ Channel( code="LHZ", location_code="", latitude=60.4896, longitude=-142.0915, elevation=1470, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 240 Sec " "Response sn 400 and a"), start_date=obspy.UTCDateTime( "2013-01-01T00:00:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_1) ]), Station( code="BWN", latitude=64.1732, longitude=-149.2991, elevation=356.0, channels=[ Channel( code="LHZ", location_code="", latitude=64.1732, longitude=-149.2991, elevation=356.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 240 Sec " "Response sn 400 and a"), start_date=obspy.UTCDateTime( "2010-07-23T00:00:00"), end_date=obspy.UTCDateTime( "2014-05-28T23:59:59"), response=resp_1), Channel( code="LHZ", location_code="", latitude=64.1732, longitude=-149.2991, elevation=356.0, depth=1.5, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 120 Sec " "Response/Quanterra 33"), start_date=obspy.UTCDateTime( "2014-08-01T00:00:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_2) ]) ]), Network( code="AZ", stations=[ Station( code="BZN", latitude=33.4915, longitude=-116.667, elevation=1301.0, channels=[ Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2010-07-26T17:22:00"), end_date=obspy.UTCDateTime( "2013-07-15T21:22:23"), response=resp_3), Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2013-07-15T21:22:23"), end_date=obspy.UTCDateTime( "2013-10-22T19:30:00"), response=resp_3), Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2013-10-22T19:30:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_3) ]) ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "channel_level_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)
def main(): chans = "EHZ,EHN,EHE" # Get StationXML file print(f"Interactive StaXML builder") print(f"Work in progress...some things hardwired\n\n") inv_name = input(f"Enter StationXML file name: ") if (os.path.isfile(inv_name)): inv = read_inventory(inv_name) else: print(f"Making new inventory: {inv_name}\n") inv = Inventory(networks=[], source="Weston") # Net code ques = f"Enter Network Code ({str(netc)}) :" net_code = str(input(ques) or netc) net = Network(code=net_code, stations=[]) print(f"\n") # connect to NRL nrl = NRL() # Datalogger info ret = 0 digi = f"REF TEK|RT 130S & 130-SMHR|1|200" print(f"Input NRL Digi info ( | separated, careful with spaces)....") print(f"E.g manufacturer| model| gain| sps\n") while ret == 0: ques = f"Enter DIGI info ({digi}) :" digi = str(input(ques) or digi) print(f"\n") try: nrl.get_datalogger_response(digi.split('|')) ret = 1 print("!!!!! DATA LOGGER SUCCESS!!!\n") except Exception as e: print(f"Try again ... {e}") # Sensor info ret = 0 sensor = f"Streckeisen,STS-1,360 seconds" print(f"Input NRL Sensor info ....\n") print(f"E.g Manufact|model|Sensitivy\n") print(f"Guralp|CMG-40T,30s - 100Hz|800") print(f"Sercel/Mark Products|L-22D|5470 Ohms|20000 Ohms") print(f"Streckeisen|STS-1|360 seconds") print(f"Nanometrics|Trillium Compact 120 (Vault, Posthole, OBS)|754 V/m/s") while ret == 0: ques = f"Enter sensor info {str(sensor)} :" sensor = str(input(ques) or sensor) try: nrl.get_sensor_response(sensor.split('|')) ret = 1 inst_info = f"{sensor.split('|')[0]} {sensor.split('|')[1]}" print("Sensor success!!!!") except Exception as e: print(f"Try again ... {e}") print("Getting full response...") try: response = nrl.get_response(sensor_keys=sensor.split('|'), datalogger_keys=digi.split('|')) print("Full response success \n\n") except Exception as e: print(f"Oops .. {e}") # nstas = int( input( "Enter number of stations to add with same sensor/digitizer (default 1):" ) or 1) for i in range(0, nstas): ques = "Station code (" + str(scode) + ") :" sta_code = str(input(ques) or scode) ques = "Station latitude (" + str(geolat) + ") :" sta_lat = float(input(ques) or geolat) ques = "Station longitude (" + str(geolon) + ") :" sta_lon = float(input(ques) or geolat) ques = "Station elev(" + str(geoelev) + ") :" sta_elev = float(input(ques) or geoelev) ques = "Station ondate (" + str(date) + ") :" sta_ondate = str(input(ques) or date) ques = "Station offdate (" + str(date) + ") :" sta_offdate = str(input(ques) or date) ques = "Station long name (" + str(longname) + ") :" sta_sitename = str(input(ques) or longname) sta = Station(code=sta_code, latitude=sta_lat, longitude=sta_lon, elevation=sta_elev, creation_date=UTCDateTime(sta_ondate), site=Site(name=sta_sitename)) # add station to network net.stations.append(sta) # Default chan info coords = { 'latitude': sta_lat, 'longitude': sta_lon, 'elevation': sta_elev, 'depth': 0.0, 'sample_rate': sps } n = -1 ques = f"Enter channel names, comma separated ({chans}) :" chans = str(input(ques) or chans) for j in chans.split(','): n += 1 chantmp = j print("Doing channel ", chantmp) aztmp = azims[n] diptmp = dips[n] loc = locs[n] for k in coords.keys(): ques = str(chantmp) + " enter " + k + "(" + str( coords[k]) + "):" coords[k] = float(input(ques) or coords[k]) chan = Channel(code=chantmp, location_code=loc, latitude=coords['latitude'], longitude=coords['longitude'], elevation=coords['elevation'], depth=coords['depth'], azimuth=aztmp, dip=diptmp, sample_rate=coords['sample_rate'], sensor=Equipment(description=inst_info)) chan.response = response sta.channels.append(chan) inv.networks.append(net) inv.write(inv_name, format="STATIONXML")