def read_networks(self): self.manager.ph5.read_experiment_t() self.experiment_t = self.manager.ph5.Experiment_t['rows'] if self.experiment_t == []: LOGGER.error("No experiment_t in %s" % self.manager.ph5.filename) return # read network codes and compare to network list network_patterns = [] for obj in self.manager.request_list: netcode_list = obj.network_list network_patterns.extend(netcode_list) if not ph5utils.does_pattern_exists( network_patterns, self.experiment_t[0]['net_code_s']): self.manager.ph5.close() return # read reportnums and compare to reportnum list reportnum_patterns = [] for obj in self.manager.request_list: reportnum_list = obj.reportnum_list reportnum_patterns.extend(reportnum_list) if not ph5utils.does_pattern_exists( reportnum_list, self.experiment_t[0]['experiment_id_s']): self.manager.ph5.close() return # update requests list to include ph5 station ids self.add_ph5_stationids() obs_network = self.create_obs_network() self.manager.ph5.close() return obs_network
def get_slc_info(self, st_data, station, location, channel): ph5_seed_station = '' ph5_loc = '' ph5_channel = '' if 'seed_station_name_s' in st_data: ph5_seed_station = st_data['seed_station_name_s'] elif 'id_s' in st_data: ph5_seed_station = st_data['id_s'] else: raise PH5AvailabilityError( "PH5 data lacks of station information.") if len(ph5_seed_station) > self.sta_len: # get the max len of station for space in report self.sta_len = len(ph5_seed_station) if not ph5utils.does_pattern_exists( [station], ph5_seed_station): return -1 ph5_channel = self.get_channel(st_data) if not ph5utils.does_pattern_exists( [channel], ph5_channel): return -1 if 'seed_location_code_s' in st_data: ph5_loc = st_data['seed_location_code_s'] else: ph5_loc = "" if not ph5utils.does_pattern_exists( [location], ph5_loc): return -1 return ph5_seed_station, ph5_loc, ph5_channel
def read_channels(self, sta_xml_obj, station_list): all_channels = [] cha_list_patterns = sta_xml_obj.channel_list component_list_patterns = sta_xml_obj.component_list receiver_list_patterns = sta_xml_obj.receiver_list location_patterns = sta_xml_obj.location_list for deployment in station_list: receiver_id = str(station_list[deployment][0]['id_s']) if not ph5utils.does_pattern_exists(receiver_list_patterns, receiver_id): continue c_id = str(station_list[deployment][0]['channel_number_i']) if not ph5utils.does_pattern_exists(component_list_patterns, c_id): continue seed_channel = \ station_list[deployment][0]['seed_band_code_s'] + \ station_list[deployment][0]['seed_instrument_code_s'] + \ station_list[deployment][0]['seed_orientation_code_s'] for pattern in cha_list_patterns: if fnmatch.fnmatch(seed_channel, pattern): if station_list[deployment][0]['seed_location_code_s']: location = station_list[deployment][0][ 'seed_location_code_s'] else: location = "" if not ph5utils.does_pattern_exists( location_patterns, location): continue cha_longitude = \ station_list[deployment][0]['location/X/value_d'] cha_latitude = \ station_list[deployment][0]['location/Y/value_d'] cha_elevation = \ station_list[deployment][0]['location/Z/value_d'] if not self.is_lat_lon_match(sta_xml_obj, cha_latitude, cha_longitude): continue obs_channel = self.create_obs_channel( station_list, deployment, seed_channel, location, cha_longitude, cha_latitude, cha_elevation, receiver_id) if obs_channel not in all_channels: all_channels.append(obs_channel) return all_channels
def read_networks(self): has_error = False self.manager.ph5.read_experiment_t() self.experiment_t = self.manager.ph5.Experiment_t['rows'] if self.experiment_t == []: raise PH5toStationXMLError("No experiment_t in %s" % self.manager.ph5.filename) # read network codes and compare to network list network_patterns = [] for obj in self.manager.request_list: netcode_list = obj.network_list network_patterns.extend(netcode_list) if not ph5utils.does_pattern_exists( network_patterns, self.experiment_t[0]['net_code_s']): self.manager.ph5.close() return # read reportnums and compare to reportnum list reportnum_patterns = [] for obj in self.manager.request_list: reportnum_list = obj.reportnum_list reportnum_patterns.extend(reportnum_list) if not ph5utils.does_pattern_exists( reportnum_list, self.experiment_t[0]['experiment_id_s']): self.manager.ph5.close() return unique_resp = validation.check_resp_unique_n_i(self.manager.ph5, set(), None) if unique_resp is not True: LOGGER.error(unique_resp) has_error = True has_response_file = validation.check_has_response_filename( self.manager.ph5.Response_t, set(), None) if has_response_file is not True: raise PH5toStationXMLError(has_response_file) # update requests list to include ph5 station ids self.add_ph5_stationids() obs_network = self.create_obs_network() self.manager.ph5.close() if has_error: if self.manager.stationxml_on_error: return obs_network return return obs_network
def test_does_pattern_exists(self): """ Tests does_patter_exist """ # match via * self.assertTrue(ph5utils.does_pattern_exists(['*'], "Test")) # exact match self.assertTrue(ph5utils.does_pattern_exists(['Test'], "Test")) # match one value of list self.assertTrue(ph5utils.does_pattern_exists(['Test', 'random'], "Test")) # match via ? self.assertTrue(ph5utils.does_pattern_exists(['DP?'], 'DPZ')) # No match via ? self.assertFalse(ph5utils.does_pattern_exists(['DP?'], 'DHZ')) # No match multiple patterns self.assertFalse(ph5utils.does_pattern_exists(['DPZ', 'XXX', 'test'], 'DHZ'))
def read_channels(self, sta_xml_obj, station_entry, deployment, sta_code, array_code): all_channels = [] cha_list_patterns = sta_xml_obj.channel_list component_list_patterns = sta_xml_obj.component_list receiver_list_patterns = sta_xml_obj.receiver_list location_patterns = sta_xml_obj.location_list longitude = station_entry['location/X/value_d'] latitude = station_entry['location/Y/value_d'] elevation = station_entry['location/Z/value_d'] receiver_id = str(station_entry['id_s']) if not ph5utils.does_pattern_exists(receiver_list_patterns, receiver_id): return c_id = str(station_entry['channel_number_i']) if not ph5utils.does_pattern_exists(component_list_patterns, c_id): return cha_code = (station_entry['seed_band_code_s'] + station_entry['seed_instrument_code_s'] + station_entry['seed_orientation_code_s']) for pattern in cha_list_patterns: if fnmatch.fnmatch(cha_code, pattern): if station_entry['seed_location_code_s']: loc_code = station_entry['seed_location_code_s'] else: loc_code = "" if not ph5utils.does_pattern_exists(location_patterns, loc_code): continue lat_lon_errs = self.is_lat_lon_match(sta_xml_obj, station_entry) if lat_lon_errs != []: continue start_date = UTCDateTime(station_entry['deploy_time/epoch_l']) end_date = UTCDateTime(station_entry['pickup_time/epoch_l']) # compute sample rate sample_rate_multiplier = \ float(station_entry['sample_rate_multiplier_i']) sample_rate_ration = float(station_entry['sample_rate_i']) try: sample_rate = sample_rate_ration / sample_rate_multiplier except ZeroDivisionError: raise PH5toStationXMLError( "Error - Invalid sample_rate_multiplier_i == 0") receiver_table_n_i = station_entry['receiver_table_n_i'] Receiver_t = self.manager.ph5.get_receiver_t_by_n_i( receiver_table_n_i) cha_key = self.manager.get_channel_key( sta_code, loc_code, cha_code, start_date, end_date, longitude, latitude, elevation, station_entry['channel_number_i'], # component receiver_id, sample_rate, sample_rate_ration, Receiver_t['orientation/azimuth/value_f'], Receiver_t['orientation/dip/value_f'], station_entry['sensor/manufacturer_s'], station_entry['sensor/model_s'], station_entry['sensor/serial_number_s'], station_entry['das/manufacturer_s'], station_entry['das/model_s'], station_entry['das/serial_number_s']) if self.manager.get_obs_channel(cha_key): # update existing channe entry obs_cha = self.manager.get_obs_channel(cha_key) arrays_list = obs_cha.extra.PH5Array.value.split(",") if array_code not in arrays_list: arrays_list.append(array_code) arrays_list.sort() obs_cha.extra.PH5Array.value = ",".join(arrays_list) else: # create new channel entry obs_channel = self.create_obs_channel( sta_code, loc_code, cha_code, start_date, end_date, longitude, latitude, elevation, station_entry['channel_number_i'], # component receiver_id, array_code, sample_rate, sample_rate_ration, Receiver_t['orientation/azimuth/value_f'], Receiver_t['orientation/dip/value_f'], station_entry['sensor/manufacturer_s'], station_entry['sensor/model_s'], station_entry['sensor/serial_number_s'], station_entry['das/manufacturer_s'], station_entry['das/model_s'], station_entry['das/serial_number_s']) self.manager.set_obs_channel(cha_key, obs_channel) # read response and add it to obspy channel inventory self.response_table_n_i = \ station_entry['response_table_n_i'] obs_channel.response = \ self.get_response_inv(obs_channel) all_channels.append(obs_channel) return all_channels
def read_stations(self): all_stations = [] for sta_xml_obj in self.manager.request_list: array_patterns = sta_xml_obj.array_list for array_name in self.array_names: array_code = array_name[8:] if not ph5utils.does_pattern_exists(array_patterns, array_code): continue arraybyid = self.manager.ph5.Array_t[array_name]['byid'] arrayorder = self.manager.ph5.Array_t[array_name]['order'] for sta_id in arrayorder: station_list = arraybyid.get(sta_id) obs_channels = [] if sta_id not in sta_xml_obj.ph5_station_id_list: continue for deployment, station_epoch, station_entry in \ ((dk, dv, se) for dk, dv in station_list.items() for se in dv): longitude = station_entry['location/X/value_d'] latitude = station_entry['location/Y/value_d'] elevation = station_entry['location/Z/value_d'] if station_entry['seed_station_name_s']: station_code = station_entry['seed_station_name_s'] else: station_code = sta_id lat_lon_errs = self.is_lat_lon_match( sta_xml_obj, station_entry) for e in lat_lon_errs: msg = "array %s, station %s, channel %s: %s" % \ (array_code, station_code, station_entry['channel_number_i'], e) self.unique_errors.add((msg, 'warning')) if lat_lon_errs != []: continue start_date = UTCDateTime( station_entry['deploy_time/epoch_l']) end_date = UTCDateTime( station_entry['pickup_time/epoch_l']) if (sta_xml_obj.start_time and sta_xml_obj.start_time > end_date): # chosen start time after pickup continue elif (sta_xml_obj.end_time and sta_xml_obj.end_time < start_date): # chosen end time before pickup continue # run channel filters if necessary. we do this # first to avoid creating a station that has no # channels if (self.manager.level.upper() == "RESPONSE" or self.manager.level.upper() == "CHANNEL" or sta_xml_obj.location_list != ['*'] or sta_xml_obj.channel_list != ['*'] or sta_xml_obj.component_list != ['*'] or sta_xml_obj.receiver_list != ['*']): obs_channels = self.read_channels( sta_xml_obj, station_entry, deployment, station_code, array_code) # go to the next station if no channels were # returned if len(obs_channels) == 0: continue sta_key = self.manager.get_station_key( station_code, start_date, end_date, longitude, latitude, elevation, station_entry['location/description_s']) if self.manager.get_obs_station(sta_key): # station already created and added to metadata obs_station = self.manager.get_obs_station(sta_key) else: # create and add a new station obs_station = self.create_obs_station( station_code, start_date, end_date, longitude, latitude, elevation, start_date, # creation_date end_date, # termination date station_entry['location/description_s']) # Add matching channels to station if necessary if obs_channels: obs_station.channels.extend(obs_channels) obs_station.selected_number_of_channels = \ len(obs_station.channels) else: obs_station.selected_number_of_channels = 0 obs_station.total_number_of_channels += \ len(station_list) if self.manager.get_obs_station(sta_key) is None: all_stations.append(obs_station) self.manager.set_obs_station(sta_key, obs_station) return all_stations
def Parse_Networks(self, path): network_list = self.args.get('network_list') if isinstance(network_list, collections.Iterable): network_patterns = network_list else: network_patterns = self.args.get('network_list').split(',') reportnum_list = self.args.get('reportnum_list') if isinstance(reportnum_list, collections.Iterable): reportnum_patterns = reportnum_list else: reportnum_patterns = self.args.get('reportnum_list').split(',') self.ph5 = ph5api.PH5(path=path, nickname=self.args.get('nickname')) self.ph5.read_experiment_t() self.experiment_t = self.ph5.Experiment_t['rows'] self.ph5.read_event_t_names() self.ph5.read_array_t_names() test = self.read_events(None) shot_lines = sorted(self.ph5.Event_t_names) if test == -1: self.ph5.close() return None if network_patterns and reportnum_patterns: if not ph5utils.does_pattern_exists( network_patterns, self.experiment_t[0]['net_code_s']) and \ not ph5utils.does_pattern_exists( reportnum_patterns, self.experiment_t[0]['experiment_id_s']): self.ph5.close() return None elif network_patterns: # read network code and compare to network list if not ph5utils.does_pattern_exists( network_patterns, self.experiment_t[0]['net_code_s']): self.ph5.close() return None elif reportnum_patterns: # read reportnum and compare to reportnum list if not ph5utils.does_pattern_exists( reportnum_patterns, self.experiment_t[0]['experiment_id_s']): self.ph5.close() return None self.read_arrays(None) array_names = self.ph5.Array_t_names array_names.sort() # get the earliest deploy and latest pickup dates from the arrays table earliest_deploy = None latest_pickup = None for array_name in array_names: arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: station_list = arraybyid.get(ph5_station) for deployment in station_list: station_len = len(station_list[deployment]) for st_num in range(0, station_len): micro = ph5utils.microsecs_to_sec( station_list[deployment][st_num] ['deploy_time/micro_seconds_i']) deploy_time = (station_list[deployment][st_num] ['deploy_time/epoch_l'] + micro) micro = ph5utils.microsecs_to_sec( station_list[deployment][st_num] ['pickup_time/micro_seconds_i']) pickup_time = (station_list[deployment][st_num] ['pickup_time/epoch_l'] + micro) if earliest_deploy is None or \ earliest_deploy > deploy_time: earliest_deploy = deploy_time if latest_pickup is None or \ latest_pickup < pickup_time: latest_pickup = pickup_time if self.args.get('start_time') and self.args.get( 'start_time') < datetime.fromtimestamp(earliest_deploy): self.args['start_time'] = datetime.fromtimestamp(earliest_deploy) if self.args.get('stop_time') and self.args.get( 'stop_time') > datetime.fromtimestamp(latest_pickup): self.args['stop_time'] = datetime.fromtimestamp(latest_pickup) network = Network(self.experiment_t[0]['net_code_s'], self.experiment_t[0]['experiment_id_s'], self.experiment_t[0]['longname_s']) shot_lines_ = [] shots = [] for shot_line in shot_lines: sl = Shotline(shot_line) event_t = self.ph5.Event_t[shot_line]['byid'] if self.args.get('shotline') and \ not ph5utils.does_pattern_exists(self.args.get('shotline'), str(shot_line[-3:])): continue for key, value in event_t.iteritems(): if self.args.get('shotid') and \ not ph5utils.does_pattern_exists( self.args.get('shotid'), key): continue cha_longitude = float(value['location/X/value_d']) cha_latitude = float(value['location/Y/value_d']) if not self.is_lat_lon_match(cha_latitude, cha_longitude): continue if self.args.get('start_time') and (datetime.fromtimestamp( value['time/epoch_l']) < self.args.get('start_time')): continue if self.args.get('stop_time') and (datetime.fromtimestamp( value['time/epoch_l']) > self.args.get('stop_time')): continue restricted = self.args.get('restricted') if restricted: is_restricted = False for r in restricted: if r.network == network.code and \ value['time/epoch_l'] >= r.starttime and \ value['time/epoch_l'] <= r.endtime: is_restricted = True break if is_restricted: continue shot = Shot( key, value['size/value_d'], value['size/units_s'], self.get_fdsn_time(value['time/epoch_l'], value['time/micro_seconds_i']), value['location/Y/value_d'], value['location/X/value_d'], value['location/Z/value_d'], value['location/X/units_s'], value['location/Z/units_s'], value['description_s']) shot.depth = value['depth/value_d'] shots.append(shot) sl.shots = shots shot_lines_.append(sl) network.shot_lines = shot_lines_ self.ph5.close() return network
def create_list(self): array_names = sorted(self.ph5.Array_t_names) stations = [] for array_name in array_names: if self.array: array = str(int(array_name[-3:])) array_patterns = self.array if not ph5utils.does_pattern_exists(array_patterns, str(array)): continue self.read_arrays(array_name) arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: station_list = arraybyid.get(ph5_station) for deployment in station_list: station_len = len(station_list[deployment]) for st_num in range(0, station_len): id_s = station_list[deployment][st_num]['id_s'] sample_rate = station_list[deployment][st_num][ 'sample_rate_i'] sample_rate_multiplier = station_list[deployment][ st_num]['sample_rate_multiplier_i'] das_model = station_list[deployment][st_num][ 'das/model_s'] das_model = das_model.replace(" ", "") sensor_model = station_list[deployment][st_num][ 'sensor/model_s'] sensor_model = sensor_model.replace(" ", "") if das_model.startswith("ZLAND"): sensor_model = "" channel = station_list[deployment][st_num][ 'channel_number_i'] serial = station_list[deployment][st_num][ 'das/serial_number_s'] pickup = station_list[deployment][st_num][ 'pickup_time/epoch_l'] deploy = station_list[deployment][st_num][ 'deploy_time/epoch_l'] self.ph5.read_das_t(serial, deploy, pickup, reread=False) try: Das_t = ph5api.filter_das_t( self.ph5.Das_t[serial]['rows'], channel) except BaseException: logging.warning("No DAS table found for das " + str(serial) + " channel " + str(channel) + ".\n") sys.stderr.write( "Error - No DAS table found for das " + str(serial) + " channel " + str(channel) + ".\n") break for entry in Das_t: if (entry['sample_rate_i'] == sample_rate and entry['sample_rate_multiplier_i'] == sample_rate_multiplier and entry['channel_number_i'] == channel): response_n_i = entry['response_table_n_i'] receiver_n_i = entry['receiver_table_n_i'] break Response_t = self.ph5.get_response_t_by_n_i( response_n_i) if Response_t: gain = Response_t['gain/value_i'] bit_weight = Response_t['bit_weight/value_d'] bit_weight_units = Response_t['bit_weight/units_s'] gain_units = Response_t['gain/units_s'] else: logging.warning( "No Response table found for das " + str(serial) + " channel " + str(channel) + ".\n") sys.stderr.write( "Error - No Response table found for das " + str(serial) + " channel " + str(channel) + ".\n") stations.append( Station(id_s, array, channel, sample_rate, sample_rate_multiplier, das_model.strip(), sensor_model.strip(), gain, response_n_i, receiver_n_i, bit_weight, bit_weight_units, gain_units, serial)) return stations
def create_list(self): array_names = sorted(self.ph5.Array_t_names) stations = [] for array_name in array_names: if self.array: array = str(int(array_name[-3:])) array_patterns = self.array if not ph5utils.does_pattern_exists(array_patterns, str(array)): continue self.read_arrays(array_name) arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: station_list = arraybyid.get(ph5_station) for deployment in station_list: station_len = len(station_list[deployment]) for st_num in range(0, station_len): station = station_list[deployment][st_num] id_s = station_list[deployment][st_num]['id_s'] sample_rate = station_list[deployment][st_num][ 'sample_rate_i'] sample_rate_multiplier = station_list[deployment][ st_num]['sample_rate_multiplier_i'] das_model = station_list[deployment][st_num][ 'das/model_s'] das_model = das_model.replace(" ", "") if das_model.strip() == "": LOGGER.error( "No Das model for array %s, station %s" % (array, id_s)) sensor_model = station_list[deployment][st_num][ 'sensor/model_s'] sensor_model = sensor_model.replace(" ", "") if das_model.startswith("ZLAND"): sensor_model = "" elif sensor_model.strip() == "": LOGGER.error( "No Sensor model for array %s, station %s" % (array, id_s)) channel = station_list[deployment][st_num][ 'channel_number_i'] serial = station_list[deployment][st_num][ 'das/serial_number_s'] pickup = station_list[deployment][st_num][ 'pickup_time/epoch_l'] deploy = station_list[deployment][st_num][ 'deploy_time/epoch_l'] self.ph5.read_das_t(serial, deploy, pickup, reread=False) try: Das_t = ph5api.filter_das_t( self.ph5.Das_t[serial]['rows'], channel) except BaseException: LOGGER.warning( "No DAS table found for das {0} channel " "{1}.\n".format(serial, channel)) break for entry in Das_t: if (entry['sample_rate_i'] == sample_rate and entry['sample_rate_multiplier_i'] == sample_rate_multiplier and entry['channel_number_i'] == channel): response_n_i = entry['response_table_n_i'] receiver_n_i = entry['receiver_table_n_i'] break if channel == -2: # in metadata # channel=-2 for no resp => n_i=-1 response_n_i = -1 Response_t = self.ph5.get_response_t_by_n_i( response_n_i) if Response_t: gain = Response_t['gain/value_i'] bit_weight = Response_t['bit_weight/value_d'] bit_weight_units = Response_t['bit_weight/units_s'] gain_units = Response_t['gain/units_s'] else: LOGGER.warning( "No Response table found for das {0} channel " "{1}.\n".format(serial, channel)) try: stations.append( Station(id_s, station, channel, sample_rate, sample_rate_multiplier, das_model.strip(), sensor_model.strip(), gain, response_n_i, receiver_n_i, bit_weight, bit_weight_units, gain_units, serial)) except BaseException: LOGGER.error("Couldn't add station.") continue return stations
def create_cut_list(self): cuts_generator = [] experiment_t = self.ph5.Experiment_t['rows'] try: seed_network = experiment_t[0]['net_code_s'] except BaseException: raise PH5toMSAPIError("Error-No net_code_s entry in Experiment_t. " "Verify that this experiment is " "PH5 version >= PN4.") if self.netcode and self.netcode != seed_network: raise PH5toMSAPIError( "Error - The requested SEED network code does " "not match this PH5 experiment network code. " "{0} != {1}".format(self.netcode, seed_network)) experiment_id = experiment_t[0]['experiment_id_s'] array_names = sorted(self.ph5.Array_t_names) self.read_events(None) if self.reqtype == "SHOT" or self.reqtype == "RECEIVER": # create list of all matched shotlines and shot-ids for request by # shot or receiver shot_lines = sorted(self.ph5.Event_t_names) matched_shot_lines = [] matched_shots = [] for shot_line in shot_lines: if not self.shotline or ph5utils.does_pattern_exists( self.shotline, shot_line[-3:]): matched_shot_lines.append(shot_line) else: continue event_t = self.ph5.Event_t[shot_line]['byid'] for shot_id, _ in event_t.iteritems(): if not self.eventnumbers or ph5utils.does_pattern_exists( self.eventnumbers, shot_id): matched_shots.append(shot_id) else: continue if self.shotline and not matched_shot_lines: raise PH5toMSAPIError( "Error - requested shotline(s) do not exist.") elif self.eventnumbers and not matched_shots: raise PH5toMSAPIError( "Error - requested shotid(s) do not exist.") for array_name in array_names: array_code = array_name[8:] # get 3 digit array code if self.array: array_patterns = self.array if not ph5utils.does_pattern_exists( array_patterns, str(array_code)): continue self.read_arrays(array_name) arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: if self.station_id: sta_list = self.station_id if not ph5utils.does_pattern_exists(sta_list, ph5_station): continue station_list = arraybyid.get(ph5_station) for deployment in station_list: station_cut_times = [] station_len = len(station_list[deployment]) for st_num in range(0, station_len): if station_list[deployment][ st_num]['seed_station_name_s']: seed_station = station_list[deployment][ st_num]['seed_station_name_s'] else: seed_station = station_list[ deployment][st_num]['id_s'] if self.station: sta_patterns = self.station if not ph5utils.does_pattern_exists(sta_patterns, seed_station): continue if (self.reqtype == "SHOT" or self.reqtype == "RECEIVER"): # request by shot for shotline in matched_shot_lines: for shot in matched_shots: try: event_t = self.ph5.Event_t[ shotline]['byid'][shot] # we add event info here for data # formats that use it, like SEG-Y sct = \ StationCutTime( event_t['time/epoch_l'], shot_id=event_t['id_s'], shot_lat=event_t ['location/Y/value_d'], shot_lng=event_t ['location/X/value_d'], shot_elevation=event_t ['location/Z/value_d'] ) station_cut_times.append(sct) except Exception: raise PH5toMSAPIError( "Error reading events table.") cuts_generator.append(self.create_cut( seed_network, ph5_station, seed_station, station_cut_times, station_list, deployment, st_num, array_code, experiment_id)) elif self.reqtype == "FDSN": # fdsn request cuts_generator.append(self.create_cut( seed_network, ph5_station, seed_station, station_cut_times, station_list, deployment, st_num, array_code, experiment_id)) return itertools.chain.from_iterable(cuts_generator)
def create_cut(self, seed_network, ph5_station, seed_station, station_cut_times, station_list, deployment, st_num, array_code, experiment_id): deploy = station_list[deployment][st_num]['deploy_time/epoch_l'] deploy_micro = station_list[deployment][ st_num]['deploy_time/micro_seconds_i'] pickup = station_list[deployment][st_num]['pickup_time/epoch_l'] pickup_micro = station_list[deployment][ st_num]['pickup_time/micro_seconds_i'] location = station_list[deployment][ st_num]['seed_location_code_s'] das = station_list[deployment][st_num]['das/serial_number_s'] das_manufacturer = station_list[deployment][st_num][ 'das/manufacturer_s'] das_model = station_list[deployment][st_num][ 'das/model_s'] sensor_type = " ".join([x for x in [station_list[deployment][st_num][ 'sensor/manufacturer_s'], station_list[deployment][st_num][ 'sensor/model_s']] if x]) receiver_n_i = station_list[deployment][st_num]['receiver_table_n_i'] response_n_i = station_list[deployment][st_num]['response_table_n_i'] if 'sample_rate_i' in station_list[deployment][0]: sample_rate = station_list[deployment][st_num]['sample_rate_i'] sample_rate_multiplier = 1 if ('sample_rate_multiplier_i' in station_list[deployment][st_num]): sample_rate_multiplier = station_list[ deployment][st_num]['sample_rate_multiplier_i'] if self.sample_rate_list: sample_list = self.sample_rate_list if not ph5utils.does_pattern_exists(sample_list, sample_rate): return seed_channel, component = self.get_channel_and_component( station_list, deployment, st_num) if self.component: component_list = self.component if not ph5utils.does_pattern_exists(component_list, component): return if self.channel: cha_patterns = self.channel if not ph5utils.does_pattern_exists(cha_patterns, seed_channel): return if self.das_sn and self.das_sn != das: return if self.reqtype == "FDSN": # trim user defined time range if it extends beyond the # deploy/pickup times if self.start_time: if "T" not in self.start_time: check_start_time = passcal2epoch( self.start_time, fepoch=True) if float(check_start_time) > float(deploy): start_fepoch = self.start_time sct = StationCutTime( passcal2epoch(start_fepoch, fepoch=True) ) station_cut_times.append(sct) else: sct = StationCutTime(deploy) station_cut_times.append(sct) else: check_start_time = ph5utils.datestring_to_epoch( self.start_time) if float(check_start_time) > float(deploy): sct = StationCutTime( ph5utils.datestring_to_epoch(self.start_time)) station_cut_times.append(sct) else: sct = StationCutTime(deploy) station_cut_times.append(sct) if float(check_start_time) > float(pickup): return else: sct = StationCutTime( ph5api.fepoch(deploy, deploy_micro) ) station_cut_times.append(sct) for sct in station_cut_times: start_fepoch = sct.time if self.reqtype == "SHOT" or self.reqtype == "RECEIVER": if self.offset: # adjust starttime by an offset start_fepoch += int(self.offset) if self.length: stop_fepoch = start_fepoch + self.length else: raise PH5toMSAPIError( "Error - length is required for request by shot.") elif self.reqtype == "FDSN": if self.end_time: if "T" not in self.end_time: check_end_time = passcal2epoch( self.end_time, fepoch=True) if float(check_end_time) < float(pickup): stop_fepoch = self.end_time stop_fepoch = passcal2epoch( stop_fepoch, fepoch=True) else: stop_fepoch = pickup else: check_end_time = ph5utils.datestring_to_epoch( self.end_time) if float(check_end_time) < float(pickup): stop_fepoch = ph5utils.datestring_to_epoch( self.end_time) else: stop_fepoch = pickup if float(check_end_time) < float(deploy): continue elif self.length: stop_fepoch = start_fepoch + self.length else: stop_fepoch = ph5api.fepoch(pickup, pickup_micro) if (self.use_deploy_pickup is True and not ((int(start_fepoch) >= deploy and int(stop_fepoch) <= pickup))): # das not deployed within deploy/pickup time continue start_passcal = epoch2passcal(start_fepoch, sep=':') start_passcal_list = start_passcal.split(":") start_doy = start_passcal_list[1] if self.doy_keep: if start_doy not in self.doy: continue midnight_fepoch, secondLeftInday = \ ph5utils.inday_breakup(start_fepoch) # if (stop_fepoch - start_fepoch) > 86400: if (stop_fepoch - start_fepoch) > secondLeftInday: seconds_covered = 0 total_seconds = stop_fepoch - start_fepoch times_to_cut = [] if self.cut_len != 86400: stop_time, seconds = ph5utils.doy_breakup( start_fepoch, self.cut_len) else: stop_time, seconds = ph5utils.inday_breakup(start_fepoch) seconds_covered = seconds_covered + seconds times_to_cut.append([start_fepoch, stop_time]) start_time = stop_time while seconds_covered < total_seconds: if self.cut_len != 86400: stop_time, seconds = ph5utils.doy_breakup( start_time, self.cut_len) else: stop_time, seconds = ph5utils.inday_breakup(start_time) seconds_covered += seconds if stop_time > stop_fepoch: times_to_cut.append([start_time, stop_fepoch]) break times_to_cut.append([start_time, stop_time]) start_time = stop_time else: times_to_cut = [[start_fepoch, stop_fepoch]] times_to_cut[-1][-1] = stop_fepoch latitude = station_list[deployment][ st_num]['location/Y/value_d'] longitude = station_list[deployment][ st_num]['location/X/value_d'] elev = station_list[deployment][ st_num]['location/Z/value_d'] for starttime, endtime in tuple(times_to_cut): try: self.ph5.query_das_t(das, component, starttime, endtime, sample_rate, sample_rate_multiplier ) except experiment.HDF5InteractionError: continue station_x = StationCut( seed_network, experiment_id, ph5_station, seed_station, array_code, das, das_manufacturer, das_model, sensor_type, component, seed_channel, starttime, endtime, sample_rate, sample_rate_multiplier, self.notimecorrect, location, latitude, longitude, elev, receiver_n_i, response_n_i, shot_id=sct.shot_id, shot_lat=sct.shot_lat, shot_lng=sct.shot_lng, shot_elevation=sct.shot_elevation) station_hash = hash(frozenset([seed_station, das, latitude, longitude, sample_rate, sample_rate_multiplier, starttime, endtime])) if station_hash in self.hash_list: continue else: self.hash_list.append(station_hash) yield station_x
def read_stations(self): all_stations = [] all_stations_keys = [] for sta_xml_obj in self.manager.request_list: array_patterns = sta_xml_obj.array_list for array_name in self.array_names: array = array_name[8:] if not ph5utils.does_pattern_exists(array_patterns, array): continue arraybyid = self.manager.ph5.Array_t[array_name]['byid'] arraybyid = self.manager.ph5.Array_t[array_name]['byid'] arrayorder = self.manager.ph5.Array_t[array_name]['order'] for x in arrayorder: station_list = arraybyid.get(x) obs_channels = [] if x not in sta_xml_obj.ph5_station_id_list: continue for deployment in station_list: sta_longitude = station_list[deployment][0][ 'location/X/value_d'] sta_latitude = station_list[deployment][0][ 'location/Y/value_d'] sta_elevation = station_list[deployment][0][ 'location/Z/value_d'] if not self.is_lat_lon_match(sta_xml_obj, sta_latitude, sta_longitude): continue if station_list[deployment][0]['seed_station_name_s']: station_name = station_list[deployment][0][ 'seed_station_name_s'] else: station_name = x start_date = station_list[deployment][0][ 'deploy_time/epoch_l'] start_date = UTCDateTime(start_date) end_date = station_list[deployment][0][ 'pickup_time/epoch_l'] end_date = UTCDateTime(end_date) if sta_xml_obj.start_time and \ sta_xml_obj.start_time > end_date: # chosen start time after pickup continue elif sta_xml_obj.end_time and \ sta_xml_obj.end_time < start_date: # chosen end time before pickup continue obs_station = self.create_obs_station( station_list, station_name, array_name, start_date, end_date, sta_longitude, sta_latitude, sta_elevation, deployment) if self.manager.level.upper() == "RESPONSE" or \ self.manager.level.upper() == "CHANNEL" or \ sta_xml_obj.location_list != ['*'] or \ sta_xml_obj.channel_list != ['*'] or \ sta_xml_obj.component_list != ['*'] or \ sta_xml_obj.receiver_list != ['*']: obs_channels = self.read_channels( sta_xml_obj, station_list) obs_station.channels = obs_channels obs_station.total_number_of_channels = len( station_list) obs_station.selected_number_of_channels = len( obs_channels) if obs_station and \ obs_station.selected_number_of_channels \ == 0: continue else: obs_station.total_number_of_channels = len( station_list) obs_station.selected_number_of_channels = 0 hash = "{}.{}.{}.{}.{}.{}.{}".format( obs_station.code, obs_station.latitude, obs_station.longitude, obs_station.start_date, obs_station.end_date, obs_station.elevation, obs_station.extra) if hash not in all_stations_keys: all_stations_keys.append(hash) all_stations.append(obs_station) return all_stations