def surf_4100_to_inv(location_file, response_inv, plot=False): """ Combine the xyz Homestake locations and MMF calibration responses into an Inventory object for the 4100L """ converter = SURF_converter() sta_df = pd.read_csv(location_file) inv = Inventory() serial_map = {'GMF1': '21010', 'GMF2': '21015', 'GMF3': '21027'} inv.networks = [Network(code='CB')] for _, row in sta_df.iterrows(): print(row) sta_code = row['Sensor name'] # Station location # Convert from SURF coords to lat lon, but keep local for actual use lon, lat, elev = converter.to_lonlat( (row['x_ft'] * 0.3048, row['y_ft'] * 0.3048, row['z_ft'] * 0.3048)) print(lon, lat, elev) # Just leave as zero here and convert HMC feet elevation to m depth = 0.0 # Save HMC coords to custom attributes of Station and Channel extra = AttribDict({ 'hmc_east': { 'value': row['x_ft'], 'namespace': 'smi:local/hmc' }, 'hmc_north': { 'value': row['y_ft'], 'namespace': 'smi:local/hmc' }, 'hmc_elev': { 'value': row['z_ft'] * 0.3048, 'namespace': 'smi:local/hmc' } }) if sta_code.startswith('TS'): # Hydrophone or CASSM, wet well if 'SS' in sta_code: # Cassm (Y for unspecified instrument) chan_code = 'XY1' chans = [ Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) ] else: # Hydrophone (D), Downhole (H) per SEED manual chan_code = 'XDH' chans = [ Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) ] elif 'S' in sta_code: # Grouted CASSM chan_code = 'XY1' chans = [ Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) ] else: # Grouted accelerometer chans = [] try: serial = serial_map[sta_code] except KeyError: serial = '9999' for chan_code in ['XNX', 'XNY', 'XNZ']: # Set samp_rate to 40 kHz so that Nyquist is below max shake f chan = Channel(code=chan_code, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=0., sample_rate=40000., sensor=Equipment( type='IEPE Accelerometer', description='Piezoelectric accelerometer', manufacturer='MMF', model='KS943B.100', serial_number=serial)) # Apply exact response for the three tested sensors, # ...otherwise use the average avg_resp = response_inv.select( station='AVG', channel=chan_code)[0][0][0].response chan.response = avg_resp chans.append(chan) sta = Station(code=sta_code, latitude=chans[0].latitude, longitude=chans[0].longitude, elevation=chans[0].elevation, channels=chans) sta.extra = extra inv[0].stations.append(sta) return inv
def modify_invenory(self, gps_clock_corr_csv=None, orient_corr_json=None, equipment_csv=None): """ Modify the existing station XML files to include new metadata: - add equipment sensor digitizer - add extra metadata: GPS correction - add extra metadata: Orientation correction Args: Returns: the final station_xml file modified with new metadata: inv2_xml_file """ # Construct a new inventory object of networks. # This will use new obspy version and new attributes: inv2 = Inventory( # We'll add networks later. networks=[], # The source should be the id whoever create the file. source="Geoscience Australia EFTF AusArray PST") # output dir for modified station inventory xml files out_dir = self.output_dir # "/home/fzhang/tmpdir" net, sta, csv_data = get_csv_correction_data(gps_clock_corr_csv) net_sta, oricorr_json_data = get_orientation_corr(orient_corr_json) my_equip_obj = EquipmentExtractor(csvfile=equipment_csv) big_inv = self.inv_obj for a_net in big_inv.networks: print("The number of station-nodes in the network =", len(a_net.stations)) for a_sta in a_net.stations: # print(a_net.code, a_sta.code) # this contains 328 pairs, but they are NOT unique, station code may repeat. a_inv = big_inv.select( network=a_net.code, station=a_sta.code) # .copy appears to have no effect here # print (a_sta.code, " stations has %s channels"%len(a_sta)) _sensors = my_equip_obj.get_sensors(a_net.code, a_sta.code) if len(_sensors) > 0: sensor_desc = _sensors[0].get("Description") sensor_sernumb = _sensors[0].get("SerNumber") else: print("%s %s No sensors !" % (a_net.code, a_sta.code)) # sensor_desc = "NA Sensor for (%s,%s)" % (a_net.code, a_sta.code) sensor_desc = "Nanometrics Trillium Compact 120s" sensor_sernumb = "N/A" _digitizers = my_equip_obj.get_digitizer( a_net.code, a_sta.code) if len(_digitizers) > 0: dig_desc = _digitizers[0].get("Description") dig_sernumb = _digitizers[0].get("SerNumber") else: print("%s %s No digitizers !" % (a_net.code, a_sta.code)) #dig_desc = "NA Digitizer for (%s,%s)" % (a_net.code, a_sta.code) dig_desc = "Guralp Minimus" dig_sernumb = "N/A" # modify station metadata my_sensor = obspy.core.inventory.util.Equipment( type="Sensor", description=sensor_desc, serial_number=sensor_sernumb) # my_digitizer = obspy.core.inventory.util.Equipment(type="Digitizer", description="Guralp Minimus",serial_number="MIN-A456") my_digitizer = obspy.core.inventory.util.Equipment( type="Digitizer", description=dig_desc, serial_number=dig_sernumb) a_sta.equipments = [my_sensor, my_digitizer] # get station start_ end_date and split csv_data start_dt = a_sta.start_date end_dt = a_sta.end_date ajson = StationMetadataExtra(a_net.code, a_sta.code, start_datetime=start_dt, end_datetime=end_dt) # generate/format extra metadata from inputs mpdf = ajson.add_gps_correction_from_csv(csv_data) # updated the ajson object with more metadata, such as orientation corr ajson.add_orientation_correction(oricorr_json_data) ajson.write_metadata2json( os.path.join( out_dir, "%s.%s_%s_extra_metadata.json" % (a_net.code, a_sta.code, str(start_dt)))) # Now, ready to write the ajson obj into new xml file mformat = "JSON" my_tag = AttribDict() my_tag.namespace = GA_NameSpace my_tag.value = ajson.make_json_string( ) # store all the extra metadata into a json string. a_sta.extra = AttribDict() a_sta.extra.GAMetadata = my_tag # prepare to write out a modified xml file stationxml_with_extra = '%s.%s_station_metadata_%s.xml' % ( a_net.code, a_sta.code, mformat) if out_dir is not None and os.path.isdir(out_dir): stationxml_with_extra = os.path.join( out_dir, stationxml_with_extra) a_inv.write(stationxml_with_extra, format='STATIONXML', nsmap={'GeoscienceAustralia': GA_NameSpace}) # Problem: # sta_file_name2 = "%s_%s_station2.xml"%(a_net.code, a_sta.code) # # OA_CE28 was written 3-times!!!!!! due to multiple (OA,CE28)-station-nodes # There will be 119 xml files written in this loop of 328 items. However, the final results missed 119 equipments!! # outxml2 = os.path.join(OUTPUT_DIR, sta_file_name2) # # inv2.networks = a_inv.networks # # inv2.write(outxml2,format="stationxml", validate=True) # nsmap={'GeoscienceAustralia': GA_NameSpace}) # After the modification of ALL the station objects, # write the big inventory in new object inv2 inv2.networks = [] inv2.networks.append(a_net) inv2_xml_file = os.path.join(out_dir, a_net.code + "_stations2.xml") inv2.write(inv2_xml_file, format="stationxml", nsmap={'GeoscienceAustralia': GA_NameSpace}, validate=True) # every Station got equipment # Add responses: resp_obj = read_response() self.add_response_into_stationxml(inv2, resp_obj) # and the original write out again to check what has been modified? post_orig = os.path.join(out_dir, a_net.code + "_stations_post_orig.xml") big_inv.write(post_orig, format="stationxml", nsmap={'GeoscienceAustralia': GA_NameSpace}, validate=True) # also has the Sensors etc return inv2_xml_file
def fsb_to_inv(path, orientations=False, debug=0): """ Take excel file of sensor locations and build an Inventory :param path: Path to excel spreadsheet :param orientations: False or dict of orientation info :param debug: :return: """ inventory = Inventory() inventory.networks = [Network(code='FS')] converter = FSB_converter() sens_dict = read_fsb_asbuilt(path) # Assemble dictionary of {station: {channel: infoz}} # Create dict before, then build inventory from channel level upwards sta_dict = {} extra_dict = {} for sta, loc in sens_dict.items(): # Station location # Convert from SURF coords to lat lon, but keep local for actual use lon, lat, elev = converter.to_lonlat((loc[0], loc[1], loc[2])) depth = 0.0 # Until we do any orientations? # Save HMC coords to custom attributes of Station and Channel extra = AttribDict({ 'ch1903_east': { 'value': loc[0], 'namespace': 'smi:local/hmc' }, 'ch1903_north': { 'value': loc[1], 'namespace': 'smi:local/hmc' }, 'ch1903_elev': { 'value': loc[2], # extra will preserve absolute elev 'namespace': 'smi:local/hmc' } }) # Not yet implemented; Pass orientations dict when we do if orientations: # TODO Something is real effed here. Answers are right though. dip_rad = np.arcsin(-orientations[sta]['Sz']) az_rad = np.arcsin(orientations[sta]['Sx'] / np.cos(dip_rad)) dip = np.rad2deg(dip_rad) az = np.rad2deg(az_rad) # Force positive if az < 0: az += 360. # Correct if orientations[sta]['Sx'] < 0 and orientations[sta]['Sy'] < 0: az -= 270. az = 270. - az elif orientations[sta]['Sy'] < 0: az = 180 - az if debug > 0: print( np.array((orientations[sta]['Sx'], orientations[sta]['Sy'], orientations[sta]['Sz']))) print(az, dip) try: if orientations[sta]['Sensor'].endswith(('Z', 'X', 'Y')): chan = 'XN{}'.format(orientations[sta]['Sensor'][-1]) # Geophones if orientations[sta]['Sensor'].startswith('G'): no = orientations[sta]['Sensor'][-3] # Accelerometers else: no = orientations[sta]['Sensor'].split('_')[1] sta_name = '{}{}'.format(orientations[sta]['Desc'], no) channel = Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, azimuth=az, dip=dip, response=Response()) # channel.extra = extra elif orientations[sta]['Sensor'].startswith('Hydro'): chan = 'XN1' sta_name = '{}{}'.format( orientations[sta]['Desc'], orientations[sta]['Sensor'].split('-')[-1].zfill(2)) channel = Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) except TypeError as e: sta_name = sta if sta in fsb_accelerometers: channels = [] for chan in ['XNZ', 'XNX', 'XNY']: channels.append( Channel(code=chan, location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response())) else: channel = Channel(code='XN1', location_code='', latitude=lat, longitude=lon, elevation=elev, depth=depth, response=Response()) channels = [channel] extra_dict[sta_name] = extra sta_dict[sta_name] = channels for nm, chans in sta_dict.items(): station = Station(code=nm, latitude=chans[0].latitude, longitude=chans[0].longitude, elevation=chans[0].elevation, channels=chans) station.extra = extra_dict[nm] inventory[0].stations.append(station) return inventory