예제 #1
0
def run_ph5_to_stationxml(paths, nickname, out_format, level, uri,
                          args_dict_list):
    networks = []
    if paths:
        for path in paths:
            try:
                LOGGER.info("CHECKING %s" % os.path.join(path, nickname))
                n = execute(path, args_dict_list, nickname, level, out_format)
                if n is None:
                    LOGGER.info("NO STATIONXML DATA CREATED FOR %s" %
                                os.path.join(path, nickname))
                else:
                    networks.append(n)
                    LOGGER.info("STATIONXML DATA CREATED FOR %s" %
                                os.path.join(path, nickname))
            except PH5toStationXMLError as e:
                LOGGER.error(e.message)
                LOGGER.info("NO STATIONXML DATA CREATED FOR %s" %
                            os.path.join(path, nickname))

        if networks:
            inv = inventory.Inventory(networks=networks,
                                      source="PIC-PH5",
                                      sender="IRIS-PASSCAL-DMC-PH5",
                                      created=UTCDateTime.now(),
                                      module=("PH5 WEB SERVICE: metadata "
                                              "| version: 1"),
                                      module_uri=uri)
            return inv
    else:
        raise PH5toStationXMLError("No PH5 experiments were found "
                                   "under path(s) {0}".format(paths))
예제 #2
0
def run_ph5_to_stationxml(paths,
                          nickname,
                          out_format,
                          level,
                          uri,
                          args_dict_list,
                          pool_size=5):
    results = []
    if paths:
        arguments = []
        for path in paths:
            arguments.append(
                (path, args_dict_list, nickname, level, out_format))

        pool = multiprocessing.Pool(processes=pool_size)
        results = pool.map(execute_unpack, arguments)
        pool.terminate()

        networks = [n for n in results if n is not None]

        if networks:
            inv = inventory.Inventory(networks=networks,
                                      source="PIC-PH5",
                                      sender="IRIS-PASSCAL-DMC-PH5",
                                      created=datetime.now(),
                                      module=("PH5 WEB SERVICE: metadata "
                                              "| version: 1"),
                                      module_uri=uri)
            return inv
        else:
            return
    else:
        raise PH5toStationXMLError("No PH5 experiments were found "
                                   "under path(s) {0}".format(paths))
예제 #3
0
    def __init__(self, inventory_object=None):

        self.logger = setup_logger("{0}.{1}".format(__name__,
                                                    self.__class__.__name__))

        self.mt_namespace = r"http://emiw.org/xmlns/mt/1.0"
        self.namespace_map = {
            "xsi": r"http://www.w3.org/2001/XMLSchema-instance",
            "schemaLocation":
            "http://www.fdsn.org/xml/station/fdsn-station-1.1.xsd",
            "mt": self.mt_namespace,
        }

        if inventory_object is not None:
            if not isinstance(inventory_object, inventory.Inventory):
                msg = "Input must be obspy.Inventory object not type {0}"
                self.logger.error(msg.format(type(inventory_object)))
                raise TypeError(msg.format(type(inventory_object)))
            self.inventory_obj = inventory_object

        else:
            self.inventory_obj = inventory.Inventory(source="MT Metadata")
예제 #4
0
    def test_parse_inventory(self):
        """
        test parsing inventory
        """
        # valid station xml
        # open file to pass handle
        f = open("ph5/test_data/metadata/station.xml", "r")

        # should be a station TXT, valid
        inventory_ = self.metadata.read_metadata(f, "station.xml")
        f.close()
        parsed_array = self.metadata.parse_inventory(inventory_)
        # expect an array kef with 3 channels HHN, LHN, LOG
        self.assertTrue(parsed_array)
        self.assertEqual(3, len(parsed_array))
        self.assertEqual('5553', parsed_array[0]['das/serial_number_s'])
        self.assertEqual('5553', parsed_array[1]['das/serial_number_s'])
        self.assertEqual('5553', parsed_array[2]['das/serial_number_s'])
        self.assertEqual('H', parsed_array[0]['seed_band_code_s'])
        self.assertEqual('L', parsed_array[1]['seed_band_code_s'])
        self.assertEqual('L', parsed_array[2]['seed_band_code_s'])
        self.assertEqual('H', parsed_array[0]['seed_instrument_code_s'])
        self.assertEqual('H', parsed_array[1]['seed_instrument_code_s'])
        self.assertEqual('O', parsed_array[2]['seed_instrument_code_s'])
        self.assertEqual('N', parsed_array[0]['seed_orientation_code_s'])
        self.assertEqual('N', parsed_array[1]['seed_orientation_code_s'])
        self.assertEqual('G', parsed_array[2]['seed_orientation_code_s'])

        # check response manager for loaded responses
        sensor_keys = [
            parsed_array[0]['sensor/manufacturer_s'],
            parsed_array[0]['sensor/model_s']
        ]
        datalogger_keys = [
            parsed_array[0]['das/manufacturer_s'],
            parsed_array[0]['das/model_s'], parsed_array[0]['sample_rate_i']
        ]
        self.assertTrue(
            self.metadata.resp_manager.is_already_requested(
                sensor_keys, datalogger_keys))

        # create empty inventory
        net = [inventory.Network('XX')]
        created = UTCDateTime.now()
        inventory_ = inventory.Inventory(networks=net,
                                         source="",
                                         sender="",
                                         created=created,
                                         module="",
                                         module_uri="")
        # should return empty list for parsed_array
        parsed_array = self.metadata.parse_inventory(inventory_)
        self.assertFalse(parsed_array)

        # test dataless seed
        # should be a dataless SEED file, valid
        f = open("ph5/test_data/metadata/1B.13.AAA.2018123.dataless", "r")
        inventory_ = self.metadata.read_metadata(f,
                                                 "1B.13.AAA.2018123.dataless")
        f.close()
        parsed_array = self.metadata.parse_inventory(inventory_)
        self.assertTrue(parsed_array)
        self.assertTrue(19, len(parsed_array))

        # dataless doesn't have datalogger serial numbers
        self.assertEqual("", parsed_array[0]['das/serial_number_s'])
        self.assertEqual("", parsed_array[9]['das/serial_number_s'])
        self.assertEqual("", parsed_array[17]['das/serial_number_s'])
        # check response manager for loaded responses
        sensor_keys = [
            parsed_array[0]['sensor/manufacturer_s'],
            parsed_array[0]['sensor/model_s']
        ]
        datalogger_keys = [
            parsed_array[0]['das/manufacturer_s'],
            parsed_array[0]['das/model_s'], parsed_array[0]['sample_rate_i']
        ]
        self.assertFalse(
            self.metadata.resp_manager.is_already_requested(
                sensor_keys, datalogger_keys))
예제 #5
0
                    start_date = cha[u'start_date']
                    if start_date == '':
                        start_date = UTCDateTime(1970, 1, 1, 0, 0)
                    end_date = cha[u'end_date'] 
                    if end_date =='':
                        end_date = UTCDateTime(2999, 1, 1, 0, 0)

                    #create inventory for station
                    stan_channel = inventory.Channel(code=chn_name, location_code=site_name, depth=depth, azimuth=270, 
                                                    dip=0, sample_rate=sampling, clock_drift_in_seconds_per_sample=0, 
                                                    latitude=latitude, longitude=longitude, elevation=elevation)
                    
                    channel_list.append(stan_channel)
                    site = inventory.Site(name=site_name, description=instrument)
                    station = inventory.Station(code=station_code, creation_date=start_date, 
                                                termination_date=end_date, latitude=latitude,
                                                longitude=longitude, elevation=elevation, vault=station_id,
                                                channels=channel_list, site=site)
                
            station_list.append(station)
                
    network = inventory.Network(code=network_code, alternate_code=network_name, 
                                start_date=start_date, stations=station_list)
    network_list.append(network)
inv = inventory.Inventory(networks=network_list, source='YN.dataless')
    
#print inv
    
inv.write(path_or_file_object='yn_station' + '.xml', format='STATIONXML')

                    
예제 #6
0
    if start < network_start_end[0]:
        network_start_end[0] = start
    elif end > network_start_end[1]:
        network_start_end[1] = end

# now make network level inventory
network_inv = inventory.Network(
    code=FDSNnetwork[0:2],
    start_date=UTCDateTime(network_start_end[0]),
    end_date=UTCDateTime(network_start_end[1]),
    stations=station_inventories_list,
    total_number_of_stations=len(station_inventories_list))

# create the inventory
inv = inventory.Inventory(networks=[network_inv],
                          source="Geoscience Australia")

print "+==============================+++"
print ""

print(inv)
print(inv[0])
print(inv[0][0])
print(inv[0][0][0])
print(inv[0][0][1])

XML_file = join(XML_path, FDSNnetwork + '.xml')

if exists(XML_file):
    remove(XML_file)
channel_01.extra = AttribDict()
channel_01.extra.DipoleLength = AttribDict()
channel_01.extra.DipoleLength.value = '10'
channel_01.extra.DipoleLength.namespace = ns
channel_01.extra.DipoleLength.attrib = {'units': 'meters'}

channel_01.extra.FieldNotes = AttribDict({'namespace': ns})
channel_01.extra.FieldNotes.value = AttribDict()

channel_01.extra.FieldNotes.value.ContactResistanceA = AttribDict()
channel_01.extra.FieldNotes.value.ContactResistanceA.value = 1.2
channel_01.extra.FieldNotes.value.ContactResistanceA.namespace = ns
channel_01.extra.FieldNotes.value.ContactResistanceA.attrib = {
    'units': 'kOhms'
}

channel_01.extra.FieldNotes.value.ContactResistanceB = AttribDict()
channel_01.extra.FieldNotes.value.ContactResistanceB.value = 1.8
channel_01.extra.FieldNotes.value.ContactResistanceB.namespace = ns
channel_01.extra.FieldNotes.value.ContactResistanceB.attrib = {
    'units': 'kOhms'
}

# notes
station_01.channels.append(channel_01)

n = inventory.Network('MT666')
n.stations = [station_01]

inv = inventory.Inventory([n], code)
inv.write('my_inventory.xml', format='STATIONXML')
예제 #8
0
                                latitude=fields[2],
                                longitude=fields[3],
                                elevation=fields[4],
                                vault='Transportable Array',
                                channels=channels,
                                site=site)

    station_list.append(station)

    if i == len(data) - 1:
        network = inventory.Network(code=surv_name,
                                    alternate_code=fields[0],
                                    start_date=start_Date,
                                    stations=station_list)

        inv = inventory.Inventory(networks=[network],
                                  source='Geoscience Australia')

        print inv

        inv.write(path_or_file_object='/media/obsuser/seismic_data_1/_ANU/' +
                  surv_name + '.xml',
                  format='STATIONXML')

        break

    #compare the current iterated survey name to the next one in the text file
    if not surv_name == data[i + 1].split(' ')[9]:
        # next line will be a new survey

        network = inventory.Network(code=surv_name,
                                    alternate_code=fields[0],