def test_get_response(self): response_n1_s1 = Response('RESPN1S1') response_n1_s2 = Response('RESPN1S2') response_n2_s1 = Response('RESPN2S1') channels_n1_s1 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s1) ] channels_n1_s2 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s2) ] channels_n2_s1 = [ Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n2_s1) ] stations_1 = [ Station(code='N1S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s1), Station(code='N1S2', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s2), Station(code='N2S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n2_s1) ] network = Network('N1', stations=stations_1) response = network.get_response('N1.N1S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s1) response = network.get_response('N1.N1S2..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n1_s2) response = network.get_response('N1.N2S1..BHZ', UTCDateTime('2010-01-01T12:00')) self.assertEqual(response, response_n2_s1)
def test_get_response(self): response_n1_s1 = Response('RESPN1S1') response_n1_s2 = Response('RESPN1S2') response_n2_s1 = Response('RESPN2S1') channels_n1_s1 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s1)] channels_n1_s2 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n1_s2)] channels_n2_s1 = [Channel(code='BHZ', location_code='', latitude=0.0, longitude=0.0, elevation=0.0, depth=0.0, response=response_n2_s1)] stations_1 = [Station(code='N1S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s1), Station(code='N1S2', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n1_s2)] stations_2 = [Station(code='N2S1', latitude=0.0, longitude=0.0, elevation=0.0, channels=channels_n2_s1)] networks = [Network('N1', stations=stations_1), Network('N2', stations=stations_2)] inv = Inventory(networks=networks, source='TEST') response = inv.get_response('N1.N1S1..BHZ', UTCDateTime('2010-01-01T12:00')) assert response == response_n1_s1 response = inv.get_response('N1.N1S2..BHZ', UTCDateTime('2010-01-01T12:00')) assert response == response_n1_s2 response = inv.get_response('N2.N2S1..BHZ', UTCDateTime('2010-01-01T12:00')) assert response == response_n2_s1
def geophone_response(resonance_frequency, gain, damping=0.707, output_resistance=np.inf, cable_length=np.inf, cable_capacitance=np.inf, sensitivity=1, stage_sequence_number=1): paz = corn_freq_2_paz(resonance_frequency, damp=damping) l = cable_length R = output_resistance C = cable_capacitance if ((R * l * C) != np.inf) and ((R * l * C) != 0): pole_cable = -1 / (R * l * C) paz['poles'].append(pole_cable) i_s = InstrumentSensitivity(sensitivity, resonance_frequency, input_units='M/S', output_units='M/S', input_units_description='velocity', output_units_description='velocity') pzr = PolesZerosResponseStage(stage_sequence_number, gain, resonance_frequency, 'M/S', 'M/S', 'LAPLACE (RADIANT/SECOND)', resonance_frequency, paz['zeros'], paz['poles']) return Response(instrument_sensitivity=i_s, response_stages=[pzr])
def _channel_from_stats(stats): if stats.standard.units in UNITS: units = UNITS[stats.standard.units] else: units = '' instrument = stats.standard.instrument serialnum = stats.standard.sensor_serial_number if len(instrument) or len(serialnum): equipment = Equipment(type=instrument, serial_number=serialnum) else: equipment = None depth = 0.0 azimuth = None c1 = 'horizontal_orientation' in stats.standard c2 = c1 and not np.isnan(stats.standard.horizontal_orientation) if c2: azimuth = stats.standard.horizontal_orientation else: azimuth = 0 if not (azimuth >= 0 and azimuth <= 360): azimuth = 0 response = None if 'response' in stats: response = stats['response'] else: # we may have instrument sensitivity... frequency = 1 / stats['standard']['instrument_period'] units = stats.standard.units if not np.isnan(stats['standard']['instrument_sensitivity']): sens = stats['standard']['instrument_sensitivity'] else: sens = 1.0 sensitivity = InstrumentSensitivity(sens, frequency=frequency, input_units=units, output_units='COUNTS') response = Response(instrument_sensitivity=sensitivity) comments = Comment(stats.standard.comments) logging.debug('channel: %s' % stats.channel) channel = Channel(stats.channel, stats.location, stats.coordinates['latitude'], stats.coordinates['longitude'], stats.coordinates['elevation'], depth, azimuth=azimuth, sample_rate=stats.sampling_rate, storage_format=stats.standard.source_format, calibration_units=units, comments=[comments], response=response, sensor=equipment) return channel
def _channel_from_stats(stats): if stats.standard.units in UNITS: units = UNITS[stats.standard.units] else: units = "" instrument = stats.standard.instrument serialnum = stats.standard.sensor_serial_number if len(instrument) or len(serialnum): equipment = Equipment(type=instrument, serial_number=serialnum) else: equipment = None depth = 0.0 azimuth = None c1 = "horizontal_orientation" in stats.standard c2 = c1 and not np.isnan(stats.standard.horizontal_orientation) if c2: azimuth = stats.standard.horizontal_orientation else: azimuth = 0 if not (azimuth >= 0 and azimuth <= 360): azimuth = 0 response = None if "response" in stats: response = stats["response"] else: # we may have instrument sensitivity... frequency = 1 / stats["standard"]["instrument_period"] units = stats.standard.units if not np.isnan(stats["standard"]["instrument_sensitivity"]): sens = stats["standard"]["instrument_sensitivity"] else: sens = 1.0 sensitivity = InstrumentSensitivity(sens, frequency=frequency, input_units=units, output_units="COUNTS") response = Response(instrument_sensitivity=sensitivity) comments = Comment(stats.standard.comments) logging.debug(f"channel: {stats.channel}") channel = Channel( stats.channel, stats.location, stats.coordinates["latitude"], stats.coordinates["longitude"], stats.coordinates["elevation"], depth, azimuth=azimuth, sample_rate=stats.sampling_rate, calibration_units=units, comments=[comments], response=response, sensor=equipment, ) return channel
def test_ppsd_time_checks(self): """ Some tests that make sure checking if a new PSD slice to be addded to existing PPSD has an invalid overlap or not works as expected. """ ppsd = PPSD(Stats(), Response()) one_second = 1000000000 t0 = 946684800000000000 # 2000-01-01T00:00:00 time_diffs = [ 0, one_second, one_second * 2, one_second * 3, one_second * 8, one_second * 9, one_second * 10 ] ppsd._times_processed = [t0 + td for td in time_diffs] ppsd.ppsd_length = 2 ppsd.overlap = 0.5 # valid time stamps to insert data for (i.e. data that overlaps with # existing data at most "overlap" times "ppsd_length") ns_ok = [ t0 - 3 * one_second, t0 - 1.01 * one_second, t0 - one_second, t0 + 4 * one_second, t0 + 4.01 * one_second, t0 + 6 * one_second, t0 + 7 * one_second, t0 + 6.99 * one_second, t0 + 11 * one_second, t0 + 11.01 * one_second, t0 + 15 * one_second, ] for ns in ns_ok: t = UTCDateTime(ns=int(ns)) # getting False means time is not present yet and a PSD slice would # be added to the PPSD data self.assertFalse(ppsd._PPSD__check_time_present(t)) # invalid time stamps to insert data for (i.e. data that overlaps with # existing data more than "overlap" times "ppsd_length") ns_bad = [ t0 - 0.99 * one_second, t0 - 0.5 * one_second, t0, t0 + 1.1 * one_second, t0 + 3.99 * one_second, t0 + 7.01 * one_second, t0 + 7.5 * one_second, t0 + 8 * one_second, t0 + 8.8 * one_second, t0 + 10 * one_second, t0 + 10.99 * one_second, ] for ns in ns_bad: t = UTCDateTime(ns=int(ns)) # getting False means time is not present yet and a PSD slice would # be added to the PPSD data self.assertTrue(ppsd._PPSD__check_time_present(t))
def accelerometer_response(resonance_frequency, gain, sensitivity=1, stage_sequence_number=1, damping=0.707): i_s = InstrumentSensitivity(sensitivity, resonance_frequency, input_units='M/S/S', output_units='M/S/S', input_units_description='acceleration', output_units_description='acceleration') paz = corn_freq_2_paz(resonance_frequency, damp=damping) paz['zeros'] = [] pzr = PolesZerosResponseStage(1, 1, 14, 'M/S/S', 'M/S', 'LAPLACE (RADIANT/SECOND)', 1, [], paz['poles']) return Response(instrument_sensitivity=i_s, response_stages=[pzr])
def read_fdsn_station_text_file(path_or_file_object): """ Function reading a FDSN station text file to an inventory object. :param path_or_file_object: File name or file like object. """ def _read(obj): r = unicode_csv_reader(obj, delimiter=native_str("|")) header = next(r) header[0] = header[0].lstrip("#") header = [_i.strip().lower() for _i in header] # IRIS currently has a wrong header name. Just map it. header = [ _i.replace("instrument", "sensordescription") for _i in header ] all_lines = [] for line in r: # Skip comment lines. if line[0].startswith("#"): continue all_lines.append([_i.strip() for _i in line]) return {"header": tuple(header), "content": all_lines} # Enable reading from files and buffers opened in binary mode. if (hasattr(path_or_file_object, "mode") and "b" in path_or_file_object.mode) or \ isinstance(path_or_file_object, io.BytesIO): buf = io.StringIO(path_or_file_object.read().decode("utf-8")) buf.seek(0, 0) path_or_file_object = buf if hasattr(path_or_file_object, "read"): content = _read(path_or_file_object) else: with open(path_or_file_object, "rt", newline="", encoding="utf8") as fh: content = _read(fh) # Figure out the type. if content["header"] == network_components: level = "network" filetypes = network_types elif content["header"] == station_components: level = "station" filetypes = station_types elif content["header"] == channel_components: level = "channel" filetypes = channel_types else: raise ValueError("Unknown type of header.") content = content["content"] converted_content = [] # Convert all types. for line in content: converted_content.append( [v_type(value) for value, v_type in zip(line, filetypes)]) # Now convert to an inventory object. inv = Inventory(networks=[], source=None) if level == "network": for net in converted_content: network = Network(code=net[0], description=net[1], start_date=net[2], end_date=net[3], total_number_of_stations=net[4]) inv.networks.append(network) elif level == "station": networks = collections.OrderedDict() for sta in converted_content: site = Site(name=sta[5]) station = Station(code=sta[1], latitude=sta[2], longitude=sta[3], elevation=sta[4], site=site, start_date=sta[6], end_date=sta[7]) if sta[0] not in networks: networks[sta[0]] = [] networks[sta[0]].append(station) for network_code, stations in networks.items(): net = Network(code=network_code, stations=stations) inv.networks.append(net) elif level == "channel": networks = collections.OrderedDict() stations = collections.OrderedDict() for channel in converted_content: net, sta, loc, chan, lat, lng, ele, dep, azi, dip, inst, scale, \ scale_freq, scale_units, s_r, st, et = channel if net not in networks: networks[net] = Network(code=net) if (net, sta) not in stations: station = Station(code=sta, latitude=lat, longitude=lng, elevation=ele) networks[net].stations.append(station) stations[(net, sta)] = station sensor = Equipment(type=inst) if scale is not None and scale_freq is not None: resp = Response(instrument_sensitivity=InstrumentSensitivity( value=scale, frequency=scale_freq, input_units=scale_units, output_units=None)) else: resp = None try: channel = Channel(code=chan, location_code=loc, latitude=lat, longitude=lng, elevation=ele, depth=dep, azimuth=azi, dip=dip, sensor=sensor, sample_rate=s_r, start_date=st, end_date=et, response=resp) except Exception as e: warnings.warn( "Failed to parse channel %s.%s.%s.%s due to: %s" % (net, sta, loc, chan, str(e)), UserWarning) continue stations[(net, sta)].channels.append(channel) inv.networks.extend(list(networks.values())) else: # Cannot really happen - just a safety measure. raise NotImplementedError("Unknown level: %s" % str(level)) return inv
def getStation(stationBlock, units, transFuncs): ## Should probably do a check up here to see that the order given in block is consistent ## for entry in stationBlock: if entry.name == 'Station Identifier': # print 'NewStation!',entry.station_call_letters staDict = { 'code': entry.station_call_letters, 'latitude': entry.latitude, 'longitude': entry.longitude, 'elevation': entry.elevation, 'channels': [], 'site': Site(entry.site_name), 'creation_date': UTCDateTime(entry.start_effective_date), # Allows for save 'start_date': UTCDateTime(entry.start_effective_date), 'end_date': UTCDateTime(entry.end_effective_date) } staNetCode = entry.network_code # If found a new channel, reset the stages elif entry.name == 'Channel Identifier': # print 'NewChannel!',entry.channel_identifier stages = [] chaDict = { 'code': entry.channel_identifier, 'location_code': entry.location_identifier, 'latitude': entry.latitude, 'longitude': entry.longitude, 'elevation': entry.elevation, 'depth': entry.local_depth, 'sample_rate': entry.sample_rate, 'start_date': UTCDateTime(entry.start_date), 'end_date': UTCDateTime(entry.end_date), 'azimuth': entry.azimuth, 'dip': entry.dip } #code, location_code, latitude, longitude, elevation, depth # If on a new stage, set up the dictionary again # ...paz stage elif entry.name == 'Response Poles and Zeros': # Get units stageReqs = {} # print entry.name,entry.stage_sequence_number # print entry # quit() stageReqs['input_units'] = units[entry.stage_signal_input_units] stageReqs['output_units'] = units[entry.stage_signal_output_units] # Collect the poles and zeros lastType = 'paz' if entry.number_of_complex_zeros == 0: zeros = np.array([], dtype=float) else: zeros = np.array(entry.real_zero, dtype=float) + np.array( entry.imaginary_zero, dtype=float) * 1j if entry.number_of_complex_poles == 0: poles = np.array([], dtype=float) else: poles = np.array(entry.real_pole, dtype=float) + np.array( entry.imaginary_pole, dtype=float) * 1j # Form the paz response dictionary (also ensure arrays are 1D) pazDict = { 'pz_transfer_function_type': transFuncs[entry.transfer_function_types], 'normalization_factor': entry.A0_normalization_factor, 'normalization_frequency': entry.normalization_frequency, 'zeros': setArrDim(zeros), 'poles': setArrDim(poles) } # ...coeff stage elif entry.name == 'Response Coefficients': # Get units stageReqs = {} # print entry.name,entry.stage_sequence_number stageReqs['input_units'] = units[entry.signal_input_units] stageReqs['output_units'] = units[entry.signal_output_units] # Collect the coefficients lastType = 'coef' if entry.number_of_denominators == 0: denom = np.array([], dtype=float) denomErr = np.array([], dtype=float) else: denom = np.array(entry.denominator_coefficient, dtype=float) denomErr = np.array(entry.denominator_error, dtype=float) if entry.number_of_numerators == 0: numer = np.array([], dtype=float) numerErr = np.array([], dtype=float) else: numer = np.array(entry.numerator_coefficient, dtype=float) numerErr = np.array(entry.numerator_error, dtype=float) # Convert these arrays into lists of numbers which have uncertainty (also ensure arrays are 1D) denomArr = genArrWithUncertainty(setArrDim(denom), setArrDim(denomErr)) numerArr = genArrWithUncertainty(setArrDim(numer), setArrDim(numerErr)) # Form the coeefficient response dictionary coefDict = { 'cf_transfer_function_type': transFuncs[entry.response_type], 'numerator': numerArr, 'denominator': denomArr } # Get the decimation sampling info elif entry.name == 'Decimation': # print entry.name,entry.stage_sequence_number stageReqs['decimation_input_sample_rate'] = Frequency( entry.input_sample_rate) stageReqs['decimation_factor'] = entry.decimation_factor stageReqs['decimation_offset'] = entry.decimation_offset stageReqs['decimation_delay'] = FloatWithUncertaintiesAndUnit( entry.estimated_delay) stageReqs['decimation_correction'] = FloatWithUncertaintiesAndUnit( entry.correction_applied) # Get the stage sensitivity elif entry.name == 'Channel Sensitivity Gain': # print entry.name,entry.stage_sequence_number if entry.stage_sequence_number != 0: stageReqs[ 'stage_sequence_number'] = entry.stage_sequence_number stageReqs['stage_gain'] = entry.sensitivity_gain stageReqs['stage_gain_frequency'] = entry.frequency # See what type of stage this was if lastType == 'paz': pazDict.update(stageReqs) stages.append(PolesZerosResponseStage(**pazDict)) else: coefDict.update(stageReqs) stages.append(CoefficientsTypeResponseStage(**coefDict)) # If on the last stage, send off the collected stage info else: if len(stages) > 0: instrSens = InstrumentSensitivity(entry.sensitivity_gain, entry.frequency, stages[0].input_units, stages[-1].output_units) # Finalize the channel dictionary, and append this channel to the station dictionary chaResp = Response(response_stages=stages, instrument_sensitivity=instrSens) chaDict['response'] = chaResp staDict['channels'].append(Channel(**chaDict)) # Return the stations to the list of stations (also track the network code) return Station(**staDict), staNetCode
def surf_stations_to_inv(excel_file, debug=0): """ Take Petrs orientation excel file for the hydrophones/accelerometers and build an inventory for later use. :param excel_file: path to Petr's excel file (formatting hard-coded) :return: obspy.core.Inventory """ # Call coordinate converter converter = SURF_converter() sta_df = pd.read_excel(excel_file, skiprows=[0, 1, 2, 3], header=1, nrows=90) # Assemble dictionary of {station: {channel: infoz}} # Create dict before, then build inventory from channel level upwards sta_dict = {} extra_dict = {} for i, row in sta_df.iterrows(): # Station location # Convert from SURF coords to lat lon, but keep local for actual use lon, lat, elev = converter.to_lonlat( (row['Easting(m)'], row['Northing(m)'], row['Elev(m)'])) # Correct for arbitrary zero 'depth' of 130m elev -= 130 # Already accounted for in the elevation but will include here as its # ...a required arg for Channel() depth = row['Depth (m)'] # Save HMC coords to custom attributes of Station and Channel extra = AttribDict({ 'hmc_east': { 'value': row['Easting(m)'], 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': row['Northing(m)'], 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': row['Elev(m)'], # extra will preserve absolute elev 'namespace': 'smi:local/hmc' } }) # Sort out azimuth and dip for this channel (if it exists) if not np.isnan(row['Sx']): # TODO Something is real effed here. Answers are right though. dip_rad = np.arcsin(-row['Sz']) az_rad = np.arcsin(row['Sx'] / np.cos(dip_rad)) dip = np.rad2deg(dip_rad) az = np.rad2deg(az_rad) # Force positive if az < 0: az += 360. # Correct if row['Sx'] < 0 and row['Sy'] < 0: az -= 270. az = 270. - az elif row['Sy'] < 0: az = 180 - az if debug > 0: print(np.array((row['Sx'], row['Sy'], row['Sz']))) print(az, dip) if row['Sensor'].endswith(('Z', 'X', 'Y')): chan = 'XN{}'.format(row['Sensor'][-1]) # Geophones if row['Sensor'].startswith('G'): continue # Accelerometers else: no = row['Sensor'].split('_')[1] sta_name = '{}{}'.format(row['Desc'], no) if sta_name in ['OB14', 'OT17', 'PDT2', 'PDT5', 'PSB8', 'PST11']: # These are geode stations only, skip continue channel = Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, azimuth=az, dip=dip, response=Response()) # channel.extra = extra elif row['Sensor'].startswith('Hydro'): chan = 'XN1' sta_name = '{}{}'.format(row['Desc'], row['Sensor'].split('-')[-1].zfill(2)) channel = Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) extra_dict[sta_name] = extra # channel.extra = extra if sta_name in sta_dict.keys(): sta_dict[sta_name].append(channel) else: sta_dict[sta_name] = [channel] # Now loop station dict to create inventory stas = [] for nm, chans in sta_dict.items(): station = Station(code=nm, latitude=chans[0].latitude, longitude=chans[0].longitude, elevation=chans[0].elevation, channels=chans) station.extra = extra_dict[nm] stas.append(station) # Build inventory inventory = Inventory(networks=[Network(code='SV', stations=stas)], source='SURF') return inventory
def surf_4100_to_inv(location_file, response_inv, plot=False): """ Combine the xyz Homestake locations and MMF calibration responses into an Inventory object for the 4100L """ converter = SURF_converter() sta_df = pd.read_csv(location_file) inv = Inventory() serial_map = {'GMF1': '21010', 'GMF2': '21015', 'GMF3': '21027'} inv.networks = [Network(code='CB')] for _, row in sta_df.iterrows(): print(row) sta_code = row['Sensor name'] # Station location # Convert from SURF coords to lat lon, but keep local for actual use lon, lat, elev = converter.to_lonlat( (row['x_ft'] * 0.3048, row['y_ft'] * 0.3048, row['z_ft'] * 0.3048)) print(lon, lat, elev) # Just leave as zero here and convert HMC feet elevation to m depth = 0.0 # Save HMC coords to custom attributes of Station and Channel extra = AttribDict({ 'hmc_east': { 'value': row['x_ft'], 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': row['y_ft'], 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': row['z_ft'] * 0.3048, 'namespace': 'smi:local/hmc' } }) if sta_code.startswith('TS'): # Hydrophone or CASSM, wet well if 'SS' in sta_code: # Cassm (Y for unspecified instrument) chan_code = 'XY1' chans = [ Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) ] else: # Hydrophone (D), Downhole (H) per SEED manual chan_code = 'XDH' chans = [ Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) ] elif 'S' in sta_code: # Grouted CASSM chan_code = 'XY1' chans = [ Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) ] else: # Grouted accelerometer chans = [] try: serial = serial_map[sta_code] except KeyError: serial = '9999' for chan_code in ['XNX', 'XNY', 'XNZ']: # Set samp_rate to 40 kHz so that Nyquist is below max shake f chan = Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=0., sample_rate=40000., sensor=Equipment( type='IEPE Accelerometer', description='Piezoelectric accelerometer', manufacturer='MMF', model='KS943B.100', serial_number=serial)) # Apply exact response for the three tested sensors, # ...otherwise use the average avg_resp = response_inv.select( station='AVG', channel=chan_code)[0][0][0].response chan.response = avg_resp chans.append(chan) sta = Station(code=sta_code, latitude=chans[0].latitude, longitude=chans[0].longitude, elevation=chans[0].elevation, channels=chans) sta.extra = extra inv[0].stations.append(sta) return inv
def fsb_to_inv(path, orientations=False, debug=0): """ Take excel file of sensor locations and build an Inventory :param path: Path to excel spreadsheet :param orientations: False or dict of orientation info :param debug: :return: """ inventory = Inventory() inventory.networks = [Network(code='FS')] converter = FSB_converter() sens_dict = read_fsb_asbuilt(path) # Assemble dictionary of {station: {channel: infoz}} # Create dict before, then build inventory from channel level upwards sta_dict = {} extra_dict = {} for sta, loc in sens_dict.items(): # Station location # Convert from SURF coords to lat lon, but keep local for actual use lon, lat, elev = converter.to_lonlat((loc[0], loc[1], loc[2])) depth = 0.0 # Until we do any orientations? # Save HMC coords to custom attributes of Station and Channel extra = AttribDict({ 'ch1903_east': { 'value': loc[0], 'namespace': 'smi:local/hmc' }, 'ch1903_north': { 'value': loc[1], 'namespace': 'smi:local/hmc' }, 'ch1903_elev': { 'value': loc[2], # extra will preserve absolute elev 'namespace': 'smi:local/hmc' } }) # Not yet implemented; Pass orientations dict when we do if orientations: # TODO Something is real effed here. Answers are right though. dip_rad = np.arcsin(-orientations[sta]['Sz']) az_rad = np.arcsin(orientations[sta]['Sx'] / np.cos(dip_rad)) dip = np.rad2deg(dip_rad) az = np.rad2deg(az_rad) # Force positive if az < 0: az += 360. # Correct if orientations[sta]['Sx'] < 0 and orientations[sta]['Sy'] < 0: az -= 270. az = 270. - az elif orientations[sta]['Sy'] < 0: az = 180 - az if debug > 0: print( np.array((orientations[sta]['Sx'], orientations[sta]['Sy'], orientations[sta]['Sz']))) print(az, dip) try: if orientations[sta]['Sensor'].endswith(('Z', 'X', 'Y')): chan = 'XN{}'.format(orientations[sta]['Sensor'][-1]) # Geophones if orientations[sta]['Sensor'].startswith('G'): no = orientations[sta]['Sensor'][-3] # Accelerometers else: no = orientations[sta]['Sensor'].split('_')[1] sta_name = '{}{}'.format(orientations[sta]['Desc'], no) channel = Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, azimuth=az, dip=dip, response=Response()) # channel.extra = extra elif orientations[sta]['Sensor'].startswith('Hydro'): chan = 'XN1' sta_name = '{}{}'.format( orientations[sta]['Desc'], orientations[sta]['Sensor'].split('-')[-1].zfill(2)) channel = Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) except TypeError as e: sta_name = sta if sta in fsb_accelerometers: channels = [] for chan in ['XNZ', 'XNX', 'XNY']: channels.append( Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response())) else: channel = Channel(code='XN1', location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) channels = [channel] extra_dict[sta_name] = extra sta_dict[sta_name] = channels for nm, chans in sta_dict.items(): station = Station(code=nm, latitude=chans[0].latitude, longitude=chans[0].longitude, elevation=chans[0].elevation, channels=chans) station.extra = extra_dict[nm] inventory[0].stations.append(station) return inventory
def MMF_calibration_to_response(directory, plot=False): """ Take directory of MMF calibration spreadsheets and convert to Obspy inventory object """ inv = Inventory(networks=[Network(code='MMF')]) lat = Latitude(0.) lon = Longitude(0.) chan_map = { 'Tabellenblatt3': 'X', 'Tabellenblatt4': 'Y', 'Tabellenblatt5': 'Z' } calibs = glob('{}/*.xls'.format(directory)) avg_amp = {'XNZ': [], 'XNY': [], 'XNX': []} avg_phase = {'XNZ': [], 'XNY': [], 'XNX': []} avg_sensitivity = {'XNZ': [], 'XNY': [], 'XNX': []} avg_freq = [] for c in calibs: serial = c.split()[-2] sta = Station(code=serial[1:], latitude=lat, longitude=lon, elevation=0.) # Tables slightly shifted for each channel due to comments dict_xyz = pd.read_excel(c, sheet_name=['Tabellenblatt3'], header=14, usecols=list(np.arange(4, 14)), nrows=37) dict_xyz.update( pd.read_excel(c, sheet_name=['Tabellenblatt4'], header=14, usecols=list(np.arange(5, 15)), nrows=37)) dict_xyz.update( pd.read_excel(c, sheet_name=['Tabellenblatt5'], header=13, usecols=list(np.arange(9, 20)), nrows=37)) # Get array of sensitivities at 80 Hz for X, Y, Z sens = pd.read_excel(c, sheet_name=['Tabellenblatt2'], header=84, usecols=[27], nrows=3)['Tabellenblatt2'].values.squeeze() # mV/m/s**2 to V/m/s**2 sens_dict = { 'Tabellenblatt3': float(sens[0].replace(',', '.')) * 1e-3, 'Tabellenblatt4': float(sens[1].replace(',', '.')) * 1e-3, 'Tabellenblatt5': float(sens[2].replace(',', '.')) * 1e-3 } # Resp for each channel for nm, df in dict_xyz.items(): # Dummy channel chan_code = 'XN{}'.format(chan_map[nm]) # Set samp_rate to 40 kHz so that Nyquist is below max shake freq chan = Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=0., depth=0., sample_rate=40000., sensor=Equipment( type='IEPE Accelerometer', description='Piezoelectric accelerometer', manufacturer='MMF', model='KS943B.100', serial_number=serial)) values = df[['[Hz]', '[m/s²]', '[°]']].values # Add to dict for average channel estimate later avg_amp[chan_code].append(values[:, 1]) avg_phase[chan_code].append(values[:, 2]) avg_sensitivity[chan_code].append(float(sens_dict[nm])) avg_freq = values[:, 0] response_elements = [ ResponseListElement(frequency=values[i][0], amplitude=values[i][1], phase=values[i][2]) for i in range(values.shape[0]) ] # Add a value at zero to avoid deconvolution errors response_elements.insert( 0, ResponseListElement(frequency=0., amplitude=values[0][1], phase=values[0][2])) resp_stage = ResponseListResponseStage( response_list_elements=response_elements, stage_gain=1, stage_gain_frequency=80., input_units='M/S**2', output_units='V', stage_sequence_number=1) sensitivity = InstrumentSensitivity(value=float(sens_dict[nm]), frequency=80., input_units='M/S**2', output_units='V', frequency_range_start=5, frequency_range_end=15850, frequency_range_db_variation=3) response = Response(instrument_sensitivity=sensitivity, response_stages=[resp_stage]) chan.response = response sta.channels.append(chan) # chan.response.plot(min_freq=2.4, sampling_rate=40000.) inv[0].stations.append(sta) # Now make an 'average' channel for the other sensors avg_sta = Station(code='AVG', latitude=lat, longitude=lon, elevation=0.) for c in ['XNX', 'XNY', 'XNZ']: chan = Channel(code=c, location_code='', latitude=lat, longitude=lon, elevation=0., depth=0., sample_rate=40000., sensor=Equipment( type='IEPE Accelerometer', description='Piezoelectric accelerometer', manufacturer='MMF', model='KS943B.100', serial_number='9999')) amp = np.array(avg_amp[c]).mean(axis=0) pha = np.array(avg_phase[c]).mean(axis=0) response_elements = [ ResponseListElement(frequency=avg_freq[i], amplitude=amp[i], phase=pha[i]) for i in range(avg_freq.size) ] # Add a value at zero to avoid deconvolution errors response_elements.insert( 0, ResponseListElement(frequency=0., amplitude=amp[0], phase=pha[0])) resp_stage = ResponseListResponseStage( response_list_elements=response_elements, stage_gain=1, stage_gain_frequency=80., input_units='M/S**2', output_units='V', stage_sequence_number=1) sensitivity = InstrumentSensitivity(value=np.array( avg_sensitivity[c]).mean(), frequency=80., input_units='M/S**2', output_units='V', frequency_range_start=5, frequency_range_end=15850, frequency_range_db_variation=3) response = Response(instrument_sensitivity=sensitivity, response_stages=[resp_stage]) chan.response = response avg_sta.channels.append(chan) inv[0].stations.append(avg_sta) if plot: inv.plot_response(min_freq=2.4, plot_degrees=True) return inv
def test_write_stationtxt(self): """ Test writing stationtxt at channel level """ # Manually create a test Inventory object. resp_1 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.02, input_units="M/S", output_units=None, value=8.48507E8)) resp_2 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=1.0, input_units="M/S**2", output_units=None, value=53435.4)) resp_3 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=6.27252E8)) test_inv = Inventory( source=None, networks=[ Network( code="IU", start_date=obspy.UTCDateTime("1988-01-01T00:00:00"), end_date=obspy.UTCDateTime("2500-12-31T23:59:59"), total_number_of_stations=1, description="Global Seismograph Network (GSN - IRIS/USGS)", stations=[ Station(code="ANMO", latitude=34.9459, longitude=-106.4572, elevation=1850.0, channels=[ Channel(code="BCI", location_code="", latitude=34.9459, longitude=-106.4572, elevation=1850.0, depth=100.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( description= "Geotech KS-36000-I Borehole " "Seismometer"), start_date=obspy.UTCDateTime( "1989-08-29T00:00:00"), end_date=obspy.UTCDateTime( "1995-02-01T00:00:00"), response=resp_1), Channel( code="LNZ", location_code="20", latitude=34.9459, longitude=-106.4572, elevation=1820.7, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=0.0, sensor=Equipment( description="Titan Accelerometer"), start_date=obspy.UTCDateTime( "2013-06-20T16:30:00"), response=resp_2), ]), ]), Network( code="6E", start_date=obspy.UTCDateTime("2013-01-01T00:00:00"), end_date=obspy.UTCDateTime("2016-12-31T23:59:59"), total_number_of_stations=1, description="Wabash Valley Seismic Zone", stations=[ Station( code="SH01", latitude=37.7457, longitude=-88.1368, elevation=126.0, channels=[ Channel( code="LOG", location_code="", latitude=37.7457, longitude=-88.1368, elevation=126.0, depth=0.0, azimuth=0.0, dip=0.0, sample_rate=0.0, sensor=Equipment( description="Reftek 130 Datalogger"), start_date=obspy.UTCDateTime( "2013-11-23T00:00:00"), end_date=obspy.UTCDateTime( "2016-12-31T23:59:59"), response=resp_3) ]), ]) ]) # CHANNEL level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="CHANNEL") # check contents content = stio.getvalue() expected = [ ("Network|Station|Location|Channel|Latitude|Longitude|" "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime"), ("IU|ANMO||BCI|34.9459|-106.4572|1850.0|100.0|0.0|" "0.0|Geotech KS-36000-I Borehole Seismometer|" "848507000.0|0.02|M/S|0.0|1989-08-29T00:00:00|" "1995-02-01T00:00:00"), ("IU|ANMO|20|LNZ|34.9459|-106.4572|1820.7|0.0|0.0|" "-90.0|Titan Accelerometer|53435.4|1.0|M/S**2|0.0|" "2013-06-20T16:30:00|"), ("6E|SH01||LOG|37.7457|-88.1368|126.0|0.0|0.0|0.0|" "Reftek 130 Datalogger|627252000.0|0.03|M/S|0.0|" "2013-11-23T00:00:00|2016-12-31T23:59:59"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected)) # STATION level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="STATION") # check contents content = stio.getvalue() expected = [ ("Network|Station|Latitude|Longitude|" "Elevation|SiteName|StartTime|EndTime"), ("IU|ANMO|34.9459|-106.4572|1850.0||"), ("6E|SH01|37.7457|-88.1368|126.0||"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected)) # NETWORK level test stio = io.StringIO() test_inv.write(stio, format="STATIONTXT", level="NETWORK") # check contents content = stio.getvalue() expected = [ ("Network|Description|StartTime|EndTime|TotalStations"), ("IU|Global Seismograph Network (GSN - IRIS/USGS)|" "1988-01-01T00:00:00|2500-12-31T23:59:59|1"), ("6E|Wabash Valley Seismic Zone|" "2013-01-01T00:00:00|2016-12-31T23:59:59|1"), ] num_lines_written = 0 for line in expected: self.assertIn(line, content) num_lines_written = num_lines_written + 1 # assert that the number of lines written equals # the number of lines expected self.assertEqual(num_lines_written, len(expected))
def test_reading_channel_file(self): """ Test reading a file at the channel level. """ resp_1 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.02, input_units="M/S", output_units=None, value=4.88233E8)) resp_2 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=4.98112E8)) resp_3 = Response( instrument_sensitivity=InstrumentSensitivity(frequency=0.03, input_units="M/S", output_units=None, value=6.27252E8)) # Manually create an expected Inventory object. expected_inv = Inventory( source=None, networks=[ Network( code="AK", stations=[ Station( code="BAGL", latitude=60.4896, longitude=-142.0915, elevation=1470, channels=[ Channel( code="LHZ", location_code="", latitude=60.4896, longitude=-142.0915, elevation=1470, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 240 Sec " "Response sn 400 and a"), start_date=obspy.UTCDateTime( "2013-01-01T00:00:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_1) ]), Station( code="BWN", latitude=64.1732, longitude=-149.2991, elevation=356.0, channels=[ Channel( code="LHZ", location_code="", latitude=64.1732, longitude=-149.2991, elevation=356.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 240 Sec " "Response sn 400 and a"), start_date=obspy.UTCDateTime( "2010-07-23T00:00:00"), end_date=obspy.UTCDateTime( "2014-05-28T23:59:59"), response=resp_1), Channel( code="LHZ", location_code="", latitude=64.1732, longitude=-149.2991, elevation=356.0, depth=1.5, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type="Nanometrics Trillium 120 Sec " "Response/Quanterra 33"), start_date=obspy.UTCDateTime( "2014-08-01T00:00:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_2) ]) ]), Network( code="AZ", stations=[ Station( code="BZN", latitude=33.4915, longitude=-116.667, elevation=1301.0, channels=[ Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2010-07-26T17:22:00"), end_date=obspy.UTCDateTime( "2013-07-15T21:22:23"), response=resp_3), Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2013-07-15T21:22:23"), end_date=obspy.UTCDateTime( "2013-10-22T19:30:00"), response=resp_3), Channel( code="LHZ", location_code="", latitude=33.4915, longitude=-116.667, elevation=1301.0, depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=1.0, sensor=Equipment( type= "Streckeisen STS-2 G1/Quanterra 330 " "Linear Phase Be"), start_date=obspy.UTCDateTime( "2013-10-22T19:30:00"), end_date=obspy.UTCDateTime( "2599-12-31T23:59:59"), response=resp_3) ]) ]) ]) # Read from a filename. filename = os.path.join(self.data_dir, "channel_level_fdsn.txt") inv = read_fdsn_station_text_file(filename) inv_obs = obspy.read_inventory(filename) # Copy creation date as it will be slightly different otherwise. inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in text mode. with open(filename, "rt", encoding="utf8") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from open file in binary mode. with open(filename, "rb") as fh: inv = read_fdsn_station_text_file(fh) fh.seek(0, 0) inv_obs = obspy.read_inventory(fh) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from StringIO. with open(filename, "rt", encoding="utf8") as fh: with io.StringIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv) # Read from BytesIO. with open(filename, "rb") as fh: with io.BytesIO(fh.read()) as buf: buf.seek(0, 0) inv = read_fdsn_station_text_file(buf) buf.seek(0, 0) inv_obs = obspy.read_inventory(buf) inv.created = expected_inv.created inv_obs.created = expected_inv.created self.assertEqual(inv, expected_inv) self.assertEqual(inv_obs, expected_inv)