def test_check_lat_lon_elev(self): station = {'location/X/value_d': 100.0, 'location/X/units_s': 'degrees', 'location/Y/value_d': 70.0, 'location/Y/units_s': 'degrees', 'location/Z/value_d': 1047, 'location/Z/units_s': 'm'} errors, warnings = validation.check_lat_lon_elev(station) self.assertEqual(errors, []) self.assertEqual(warnings, []) station = {'location/X/value_d': 190.0, 'location/X/units_s': '', 'location/Y/value_d': -100.0, 'location/Y/units_s': '', 'location/Z/value_d': 0.0, 'location/Z/units_s': ''} errors, warnings = validation.check_lat_lon_elev(station) self.assertEqual( errors, ['Channel longitude 190.0 not in range [-180,180]', 'Channel latitude -100.0 not in range [-90,90]']) self.assertEqual( warnings, ['No Station location/X/units_s value found.', 'No Station location/Y/units_s value found.', 'Channel elevation seems to be 0. Is this correct???', 'No Station location/Z/units_s value found.']) station = {'location/X/value_d': 0, 'location/X/units_s': '', 'location/Y/value_d': 0, 'location/Y/units_s': None, 'location/Z/value_d': 0, 'location/Z/units_s': None} errors, warnings = validation.check_lat_lon_elev(station) self.assertEqual(errors, []) self.assertEqual( warnings, ['Channel longitude seems to be 0. Is this correct???', 'No Station location/X/units_s value found.', 'Channel latitude seems to be 0. Is this correct???', 'No Station location/Y/units_s value found.', 'Channel elevation seems to be 0. Is this correct???', 'No Station location/Z/units_s value found.'])
def is_lat_lon_match(self, sta_xml_obj, station): """ Checks if the given station's latitude/longitude matches geographic query constraints and the completeness of latitude/longitude/elevation :param: sta_xml_obj : a PH5toStationXMLRequest object for checking lat/lon box intersection and point/radius intersection :param: station : a station entry of latitude, longitude and elevation to be checked """ errors = [] latitude = float(station['location/Y/value_d']) longitude = float(station['location/X/value_d']) validation.check_lat_lon_elev(station, errors) # check if lat/lon box intersection if not ph5utils.is_rect_intersection( sta_xml_obj.minlatitude, sta_xml_obj.maxlatitude, sta_xml_obj.minlongitude, sta_xml_obj.maxlongitude, latitude, longitude): errors.append( box_intersection_err(latitude, sta_xml_obj.minlatitude, sta_xml_obj.maxlatitude, longitude, sta_xml_obj.minlongitude, sta_xml_obj.maxlongitude)) # check if point/radius intersection if not ph5utils.is_radial_intersection( sta_xml_obj.latitude, sta_xml_obj.longitude, sta_xml_obj.minradius, sta_xml_obj.maxradius, latitude, longitude): errors.append( radial_intersection_err(latitude, longitude, sta_xml_obj.minradius, sta_xml_obj.maxradius, sta_xml_obj.latitude, sta_xml_obj.longitude)) return errors
def read_stations(self): all_stations = [] for sta_xml_obj in self.manager.request_list: array_patterns = sta_xml_obj.array_list for array_name in self.array_names: array_code = array_name[8:] if not ph5utils.does_pattern_exists(array_patterns, array_code): continue arraybyid = self.manager.ph5.Array_t[array_name]['byid'] arrayorder = self.manager.ph5.Array_t[array_name]['order'] for sta_id in arrayorder: station_list = arraybyid.get(sta_id) obs_channels = [] if sta_id not in sta_xml_obj.ph5_station_id_list: continue for deployment, station_epoch, station_entry in \ ((dk, dv, se) for dk, dv in station_list.items() for se in dv): longitude = station_entry['location/X/value_d'] latitude = station_entry['location/Y/value_d'] elevation = station_entry['location/Z/value_d'] if station_entry['seed_station_name_s']: station_code = station_entry['seed_station_name_s'] else: station_code = sta_id errors, warnings = validation.check_lat_lon_elev( station_entry) header = "array %s, station %s, channel %s: " % \ (array_code, station_code, station_entry['channel_number_i']) for e in errors: msg = header + str(e) self.unique_errors.add((msg, 'error')) for e in warnings: msg = header + str(e) self.unique_errors.add((msg, 'warning')) if errors != []: continue if not self.check_intersection(sta_xml_obj, latitude, longitude): continue start_date = UTCDateTime( station_entry['deploy_time/epoch_l']) end_date = UTCDateTime( station_entry['pickup_time/epoch_l']) if (sta_xml_obj.start_time and sta_xml_obj.start_time > end_date): # chosen start time after pickup continue elif (sta_xml_obj.end_time and sta_xml_obj.end_time < start_date): # chosen end time before pickup continue # run channel filters if necessary. we do this # first to avoid creating a station that has no # channels if (self.manager.level.upper() == "RESPONSE" or self.manager.level.upper() == "CHANNEL" or sta_xml_obj.location_list != ['*'] or sta_xml_obj.channel_list != ['*'] or sta_xml_obj.component_list != ['*'] or sta_xml_obj.receiver_list != ['*']): obs_channels = self.read_channels( sta_xml_obj, station_entry, deployment, station_code, array_code) # go to the next station if no channels were # returned if len(obs_channels) == 0: continue sta_key = self.manager.get_station_key( station_code, start_date, end_date, longitude, latitude, elevation, station_entry['location/description_s']) if self.manager.get_obs_station(sta_key): # station already created and added to metadata obs_station = self.manager.get_obs_station(sta_key) else: # create and add a new station obs_station = self.create_obs_station( station_code, start_date, end_date, longitude, latitude, elevation, start_date, # creation_date end_date, # termination date station_entry['location/description_s']) # Add matching channels to station if necessary if obs_channels: obs_station.channels.extend(obs_channels) obs_station.selected_number_of_channels = \ len(obs_station.channels) else: obs_station.selected_number_of_channels = 0 obs_station.total_number_of_channels += \ len(station_list) if self.manager.get_obs_station(sta_key) is None: all_stations.append(obs_station) self.manager.set_obs_station(sta_key, obs_station) return all_stations
def check_station_completeness(self, station): """ Checks that the following are present in Experiment_t: #### STATION LEVEL CHECKS - id_s - description_s - seed_station_name_s - check that 1 <= len(seed_station_name_s) <= 5 #### CHANNEL LEVEL CHECKS CHECKS - seed_location_code_s - check that 0 <= len(seed_location_code_s) <= 2 - seed_channel_code_s - check that 1 <= len(seed_channel_code_s) <= 3 - seed_band_code_s - seed_instrument_code_s - seed_orientation_code_s - sample_rate_i - check that sample_rate_i > 0 - sample_rate_multiplier - check that sample_rate_multiplier > 0 - response_table_n_i #### CHANNEL LOCATION - location/X/value_d - location/Y/value_d - location/Z/value_d #### CHANNEL DEPLOY/PICKUP - deploy_time/epoch_l - pickup_time/epoch_l - check if deploy time is after pickup time #### CHANNEL SENSOR/DAS - das/serial_number_s - das/manufacturer_s - das/model_s - sensor/serial_number_s - sensor/manufacturer_s - sensor/model_s """ info = [] warning = [] error = [] # STATION LEVEL CHECKS CHECKS station_id = station['id_s'] try: if not (0 <= int(station_id) <= 65535): error.append("Station ID not " "between 0 and 65535") except ValueError: error.append("Station ID not a whole " "number between 0 " "and 65535 ") if not station['description_s']: warning.append("No station description found.") if not station['seed_station_name_s']: error.append("SEED station code required.") if not (1 <= len(station['seed_station_name_s']) <= 5): error.append("SEED station code not " "between 1 and 5 characters.") # CHANNEL LEVEL CHECKS CHECKS if station['seed_location_code_s'] is None: error.append("SEED location code " "required.") if not (0 <= len(station['seed_location_code_s']) <= 2): error.append("SEED location code not " "between 0 and 2 characters.") if not station['seed_band_code_s']: error.append("SEED band code required.") if not station['seed_instrument_code_s']: error.append("SEED instrument code required.") if not station['seed_orientation_code_s']: error.append("SEED orientation code " "required.") if station['sample_rate_i'] <= 0: warning.append("Sample rate seems to be <= 0. " "Is this correct???") if station['sample_rate_multiplier_i'] <= 0: warning.append("Sample rate multiplier <= 0. " "Is this correct???") response_t = self.ph5.get_response_t_by_n_i( station['response_table_n_i']) if response_t is None: error.append("No Response table found. " "Have you run resp_load yet?") # CHANNEL LOCATION validation.check_lat_lon_elev(station, error) # CHANNEL DEPLOY/PICKUP deploy_time = station['deploy_time/epoch_l'] if deploy_time is None: error.append("No deploy_time value " "found for channel.") pickup_time = station['pickup_time/epoch_l'] if pickup_time is None: error.append("No pickup_time value " "found for channel.") das_serial = station['das/serial_number_s'] sensor_serial = station['sensor/serial_number_s'] if deploy_time > pickup_time: error.append("Deploy time is after pickup time") # CHANNEL SENSOR/DAS channel_id = station['channel_number_i'] if das_serial is None: error.append("Das serial number is missing.") if sensor_serial is None: warning.append("Sensor serial number is missing.") self.ph5.read_das_t(das_serial, reread=False) sample_rate = station['sample_rate_i'] nodata_err = None if das_serial not in self.ph5.Das_t: error.append("No data found for das serial number {0}. " "You may need to reload the raw " "data for this station.".format(str(das_serial))) dt = self.das_time[(das_serial, channel_id, sample_rate)] # add bound_errors if applicable if deploy_time == dt['min_deploy_time'][0]: try: warning.append(dt['min_deploy_time'][1]) except IndexError: pass das_time_list = copy.copy(dt['time_windows']) # check for duplicates: item = (deploy_time, pickup_time, station_id) dups = [i for i, x in enumerate(das_time_list) if x == item] if len(dups) > 1: warning.append( "Station %s [%s, %s] is repeated %s time(s)" % (station_id, deploy_time, pickup_time, len(dups) - 1)) # delete all duplicates except for the last one del das_time_list[dups[0]:dups[-1]] index = das_time_list.index((deploy_time, pickup_time, station_id)) overlaps = [] # check if there is any overlap time for this das for t in das_time_list: if ((t[0] <= deploy_time < t[1]) or (t[0] < pickup_time <= t[1])): overlaps.append(t[2]) if len(overlaps) > 1: error.append("Overlap time on station(s): %s" % ", ".join(overlaps)) try: # don't need to read das_t because it will be read in get_extent if (index <= (len(das_time_list) - 1)): # current deploy time check_start = das_time_list[index][0] if index == len(das_time_list) - 1: # for last index, need to check if no data exist # so check from curr deploy time to current pickup time check_end = das_time_list[index][1] else: # -- check data from current deploy time to # next deploy time -1 (-1 to avoid include next deploy time check_end = das_time_list[index + 1][0] - 1 i = 1 # while loop to avoid using overlaping row while check_end < check_start: i += 1 check_end = das_time_list[index + i][0] - 1 try: # clear das to make sure get_extent consider channel & sr self.ph5.forget_das_t(das_serial) except AttributeError: pass true_start, true_end =\ self.ph5.get_extent(das=das_serial, component=channel_id, start=check_start, end=check_end, sample_rate=sample_rate) if true_start is None and nodata_err is None: # check nodata_err to avoid duplicate error error.append( "No data found for das serial number {0} during this " "station's time. You may need to reload the raw " "data for this station.".format(str(das_serial))) else: # don't check deploy time because the time sent to # get_extent() is limited from deploy time if pickup_time < true_end: time = int(true_end - pickup_time) warning.append( "Data exists after pickup time: %s seconds." % time) except KeyError: try: # avoid opening too many files self.ph5.forget_das_t(das_serial) except Exception: pass error.append("No data found for channel {0}. " "Other channels seem to exist".format( str(channel_id))) if not station['sensor/manufacturer_s']: warning.append("Sensor manufacturer is " "missing. Is this correct???") if not station['sensor/model_s']: warning.append("Sensor model is missing. " "Is this correct???") if not station['das/manufacturer_s']: warning.append("DAS manufacturer is missing. " "Is this correct???") if not station['das/model_s']: warning.append("DAS model is missing. " "Is this correct???") return info, warning, error