def create_obs_network(self): obs_stations = self.read_stations() for errmsg, logtype in sorted(list(self.unique_errors)): if logtype == 'error': LOGGER.error(errmsg) else: LOGGER.warning(errmsg) if obs_stations: obs_network = inventory.Network(self.experiment_t[0]['net_code_s']) obs_network.description = self.experiment_t[0]['longname_s'] start_time, end_time = self.get_network_date() obs_network.start_date = UTCDateTime(start_time) obs_network.end_date = UTCDateTime(end_time) obs_network.total_number_of_stations = self.total_number_stations extra = AttribDict({ 'PH5ReportNum': { 'value': self.experiment_t[0]['experiment_id_s'], 'namespace': self.manager.iris_custom_ns, 'type': 'attribute' } }) obs_network.extra = extra obs_network.stations = obs_stations return obs_network else: return
def create_obs_network(self): obs_stations = self.read_stations() if obs_stations: obs_network = inventory.Network(self.experiment_t[0]['net_code_s']) obs_network.description = self.experiment_t[0]['longname_s'] start_time, end_time = self.get_network_date() obs_network.start_date = UTCDateTime(start_time) obs_network.end_date = UTCDateTime(end_time) obs_network.total_number_of_stations = self.total_number_stations extra = AttribDict({ 'PH5ReportNum': { 'value': self.experiment_t[0]['experiment_id_s'], 'namespace': self.manager.iris_custom_ns, 'type': 'attribute' } }) obs_network.extra = extra obs_network.stations = obs_stations return obs_network else: return
def test_parse_inventory(self): """ test parsing inventory """ # valid station xml # open file to pass handle f = open("ph5/test_data/metadata/station.xml", "r") # should be a station TXT, valid inventory_ = self.metadata.read_metadata(f, "station.xml") f.close() parsed_array = self.metadata.parse_inventory(inventory_) # expect an array kef with 3 channels HHN, LHN, LOG self.assertTrue(parsed_array) self.assertEqual(3, len(parsed_array)) self.assertEqual('5553', parsed_array[0]['das/serial_number_s']) self.assertEqual('5553', parsed_array[1]['das/serial_number_s']) self.assertEqual('5553', parsed_array[2]['das/serial_number_s']) self.assertEqual('H', parsed_array[0]['seed_band_code_s']) self.assertEqual('L', parsed_array[1]['seed_band_code_s']) self.assertEqual('L', parsed_array[2]['seed_band_code_s']) self.assertEqual('H', parsed_array[0]['seed_instrument_code_s']) self.assertEqual('H', parsed_array[1]['seed_instrument_code_s']) self.assertEqual('O', parsed_array[2]['seed_instrument_code_s']) self.assertEqual('N', parsed_array[0]['seed_orientation_code_s']) self.assertEqual('N', parsed_array[1]['seed_orientation_code_s']) self.assertEqual('G', parsed_array[2]['seed_orientation_code_s']) # check response manager for loaded responses sensor_keys = [ parsed_array[0]['sensor/manufacturer_s'], parsed_array[0]['sensor/model_s'] ] datalogger_keys = [ parsed_array[0]['das/manufacturer_s'], parsed_array[0]['das/model_s'], parsed_array[0]['sample_rate_i'] ] self.assertTrue( self.metadata.resp_manager.is_already_requested( sensor_keys, datalogger_keys)) # create empty inventory net = [inventory.Network('XX')] created = UTCDateTime.now() inventory_ = inventory.Inventory(networks=net, source="", sender="", created=created, module="", module_uri="") # should return empty list for parsed_array parsed_array = self.metadata.parse_inventory(inventory_) self.assertFalse(parsed_array) # test dataless seed # should be a dataless SEED file, valid f = open("ph5/test_data/metadata/1B.13.AAA.2018123.dataless", "r") inventory_ = self.metadata.read_metadata(f, "1B.13.AAA.2018123.dataless") f.close() parsed_array = self.metadata.parse_inventory(inventory_) self.assertTrue(parsed_array) self.assertTrue(19, len(parsed_array)) # dataless doesn't have datalogger serial numbers self.assertEqual("", parsed_array[0]['das/serial_number_s']) self.assertEqual("", parsed_array[9]['das/serial_number_s']) self.assertEqual("", parsed_array[17]['das/serial_number_s']) # check response manager for loaded responses sensor_keys = [ parsed_array[0]['sensor/manufacturer_s'], parsed_array[0]['sensor/model_s'] ] datalogger_keys = [ parsed_array[0]['das/manufacturer_s'], parsed_array[0]['das/model_s'], parsed_array[0]['sample_rate_i'] ] self.assertFalse( self.metadata.resp_manager.is_already_requested( sensor_keys, datalogger_keys))
start_date = cha[u'start_date'] if start_date == '': start_date = UTCDateTime(1970, 1, 1, 0, 0) end_date = cha[u'end_date'] if end_date =='': end_date = UTCDateTime(2999, 1, 1, 0, 0) #create inventory for station stan_channel = inventory.Channel(code=chn_name, location_code=site_name, depth=depth, azimuth=270, dip=0, sample_rate=sampling, clock_drift_in_seconds_per_sample=0, latitude=latitude, longitude=longitude, elevation=elevation) channel_list.append(stan_channel) site = inventory.Site(name=site_name, description=instrument) station = inventory.Station(code=station_code, creation_date=start_date, termination_date=end_date, latitude=latitude, longitude=longitude, elevation=elevation, vault=station_id, channels=channel_list, site=site) station_list.append(station) network = inventory.Network(code=network_code, alternate_code=network_name, start_date=start_date, stations=station_list) network_list.append(network) inv = inventory.Inventory(networks=network_list, source='YN.dataless') #print inv inv.write(path_or_file_object='yn_station' + '.xml', format='STATIONXML')
network_start_end = False # go through station start/end date dict and get the overall start_end date for key, (start, end) in station_start_end_dict.iteritems(): if not network_start_end: network_start_end = [start, end] continue if start < network_start_end[0]: network_start_end[0] = start elif end > network_start_end[1]: network_start_end[1] = end # now make network level inventory network_inv = inventory.Network( code=FDSNnetwork[0:2], start_date=UTCDateTime(network_start_end[0]), end_date=UTCDateTime(network_start_end[1]), stations=station_inventories_list, total_number_of_stations=len(station_inventories_list)) # create the inventory inv = inventory.Inventory(networks=[network_inv], source="Geoscience Australia") print "+==============================+++" print "" print(inv) print(inv[0]) print(inv[0][0]) print(inv[0][0][0]) print(inv[0][0][1])
def mt_survey_to_inventory_network(survey_obj, namespace="MT"): """ Translate MT survey metadata to inventory Network in StationXML Metadata that does not fit under StationXML schema is added as extra. :param survey_obj: MT survey metadata :type survey_obj: :class:`~mth5.metadata.Survey` :return: DESCRIPTION :rtype: TYPE """ network_obj = inventory.Network( survey_obj.get_attr_from_name(network_translator["code"])) used_list = [ "northwest_corner.latitude", "northwest_corner.longitude", "southeast_corner.latitude", "southeast_corner.longitude", "time_period.start_date", "time_period.end_date", "archive_id", "country", "datum", "geographic_name", "name", "hdf5_reference", "mth5_type", ] for inv_key, mth5_key in network_translator.items(): if mth5_key is None: msg = "cannot currently map mth5.survey to network.{0}".format( inv_key) logger.debug(msg) continue if inv_key == "operators": operator = inventory.Operator( agency=[survey_obj.project_lead.organization]) person = inventory.Person( names=[survey_obj.project_lead.author], emails=[survey_obj.project_lead.email], ) operator.contacts = [person] network_obj.operators = [operator] used_list.append("project_lead.author") used_list.append("project_lead.email") used_list.append("project_lead.organization") elif inv_key == "comments": if survey_obj.comments is not None: comment = inventory.Comment(survey_obj.comments, id=0) network_obj.comments.append(comment) elif inv_key == "restricted_status": network_obj.restricted_status = release_dict[ survey_obj.release_license] elif inv_key == "identifiers": for s_key in mth5_key: doi = survey_obj.get_attr_from_name(s_key) network_obj.identifiers.append(f"doi: {doi}") used_list.append(s_key) else: setattr(network_obj, inv_key, survey_obj.get_attr_from_name(mth5_key)) used_list.append(mth5_key) # add any extra metadata that does not fit with StationXML schema network_obj.extra = AttribDict() # network_obj.extra.MT = AttribDict({'namespace': namespace, # 'value':AttribDict()}) for mt_key in survey_obj.get_attribute_list(): if not mt_key in used_list: add_custom_element( network_obj.extra, mt_key, survey_obj.get_attr_from_name(mt_key), units=survey_obj._attr_dict[mt_key]["units"], namespace=namespace, ) return network_obj
channel_01.extra = AttribDict() channel_01.extra.DipoleLength = AttribDict() channel_01.extra.DipoleLength.value = '10' channel_01.extra.DipoleLength.namespace = ns channel_01.extra.DipoleLength.attrib = {'units': 'meters'} channel_01.extra.FieldNotes = AttribDict({'namespace': ns}) channel_01.extra.FieldNotes.value = AttribDict() channel_01.extra.FieldNotes.value.ContactResistanceA = AttribDict() channel_01.extra.FieldNotes.value.ContactResistanceA.value = 1.2 channel_01.extra.FieldNotes.value.ContactResistanceA.namespace = ns channel_01.extra.FieldNotes.value.ContactResistanceA.attrib = { 'units': 'kOhms' } channel_01.extra.FieldNotes.value.ContactResistanceB = AttribDict() channel_01.extra.FieldNotes.value.ContactResistanceB.value = 1.8 channel_01.extra.FieldNotes.value.ContactResistanceB.namespace = ns channel_01.extra.FieldNotes.value.ContactResistanceB.attrib = { 'units': 'kOhms' } # notes station_01.channels.append(channel_01) n = inventory.Network('MT666') n.stations = [station_01] inv = inventory.Inventory([n], code) inv.write('my_inventory.xml', format='STATIONXML')
site = inventory.Site(name=fields[1], description=fields[10]) station = inventory.Station(code=fields[1], creation_date=start_Date, termination_date=end_Date, latitude=fields[2], longitude=fields[3], elevation=fields[4], vault='Transportable Array', channels=channels, site=site) station_list.append(station) if i == len(data) - 1: network = inventory.Network(code=surv_name, alternate_code=fields[0], start_date=start_Date, stations=station_list) inv = inventory.Inventory(networks=[network], source='Geoscience Australia') print inv inv.write(path_or_file_object='/media/obsuser/seismic_data_1/_ANU/' + surv_name + '.xml', format='STATIONXML') break #compare the current iterated survey name to the next one in the text file if not surv_name == data[i + 1].split(' ')[9]: