def test_mix_ins(self): """ test the added mjix in at end of ph5api that aren't used within api itself """ # true case result = ph5api.is_in(0, 10000, 5, 1000) self.assertTrue(result) # false result = ph5api.is_in(99999, 10000, 5, 1000) self.assertFalse(result) # get float epoch fepoch = ph5api.fepoch(1000000, 9555) self.assertEqual(1000000.009555, fepoch) fepoch = ph5api.fepoch(1978346, 100000000) self.assertEqual(1978446, fepoch) # filter a das self.ph5API_object.read_das_t('5553') das_t = ph5api.filter_das_t(self.ph5API_object.Das_t['5553']['rows'], 1) self.assertEqual(2, len(das_t)) das_t = ph5api.filter_das_t(self.ph5API_object.Das_t['5553']['rows'], -2) self.assertEqual(1, len(das_t)) das_t = ph5api.filter_das_t(self.ph5API_object.Das_t['5553']['rows'], -9) self.assertEqual(0, len(das_t))
def get_time_das_t(self, das, start, end, component=None, sample_rate=None): """ para das: name of das para start: start time, can be none para end: end time, can be none component: channel number, default none sample_rate: default none return + earliest_epoch, latest epoch: time range for channel + new_das_t: list of data for das in the given time range(start-end) (all channel or only the channel required) if component and sample_rate=None, doesn't care about channel, this function only for checking if the start and end time given is in the time range of this das or not. Useful only when channel is * for has data so don't need to check each channel. """ s = 0 if start is None else start e = 32509613590 if end is None else end if start is None and end is None: rangestr = "" elif start is None: rangestr = " before %s " % end elif end is None: rangestr = " after %s " % start else: rangestr = " in range %s - %s " % (start, end) if sample_rate is not None: if component is None: raise PH5AvailabilityError( "get_time_das_t requires component when " "sample_rate is given") Das_t = self.ph5.query_das_t( das, chan=component, start_epoch=s, stop_epoch=e, sample_rate=sample_rate) if not Das_t: LOGGER.warning( "No Das table found for %s %s for " "component: %s, samplerate:%s" % (das, rangestr, component, sample_rate)) return -1 else: # include all channelnum and sample_rate self.ph5.read_das_t(das, s, e, reread=False) if das not in self.ph5.Das_t: LOGGER.warning("No Das table found for %s %s" % (das, rangestr)) return -1 Das_t = self.ph5.Das_t[das]['rows'] if component is not None: # include all sample_rate Das_t = ph5api.filter_das_t(Das_t, component) new_das_t = sorted(Das_t, key=lambda k: k['time/epoch_l']) # if not new_das_t: # LOGGER.warning("No Das table found for %s %s" % (das, rangestr)) # self.ph5.forget_das_t(das) # return -1 earliest_epoch = self.get_start(new_das_t[0]) latest_epoch_start = self.get_start(new_das_t[-1]) true_sample_rate = self.get_sample_rate(new_das_t[-1]) latest_epoch = self.get_end(new_das_t[-1], latest_epoch_start, true_sample_rate) # don't need to compare start with latest_epoch (following lines) # because this case has been filtered out with read_das_t() # and query_das_t => comment out to exclude from testing # if start is not None and start > latest_epoch: # self.ph5.forget_das_t(das) # return -1 # for the case end = time/epoch_l if there is time/micro_seconds_i, # this seem to not be considered correctly in ph5 if end is not None and end < earliest_epoch: self.ph5.forget_das_t(das) return -1 self.ph5.forget_das_t(das) return earliest_epoch, latest_epoch, new_das_t
def create_list(self): array_names = sorted(self.ph5.Array_t_names) stations = [] for array_name in array_names: if self.array: array = str(int(array_name[-3:])) array_patterns = self.array if not ph5utils.does_pattern_exists(array_patterns, str(array)): continue self.read_arrays(array_name) arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: station_list = arraybyid.get(ph5_station) for deployment in station_list: station_len = len(station_list[deployment]) for st_num in range(0, station_len): id_s = station_list[deployment][st_num]['id_s'] sample_rate = station_list[deployment][st_num][ 'sample_rate_i'] sample_rate_multiplier = station_list[deployment][ st_num]['sample_rate_multiplier_i'] das_model = station_list[deployment][st_num][ 'das/model_s'] das_model = das_model.replace(" ", "") sensor_model = station_list[deployment][st_num][ 'sensor/model_s'] sensor_model = sensor_model.replace(" ", "") if das_model.startswith("ZLAND"): sensor_model = "" channel = station_list[deployment][st_num][ 'channel_number_i'] serial = station_list[deployment][st_num][ 'das/serial_number_s'] pickup = station_list[deployment][st_num][ 'pickup_time/epoch_l'] deploy = station_list[deployment][st_num][ 'deploy_time/epoch_l'] self.ph5.read_das_t(serial, deploy, pickup, reread=False) try: Das_t = ph5api.filter_das_t( self.ph5.Das_t[serial]['rows'], channel) except BaseException: logging.warning("No DAS table found for das " + str(serial) + " channel " + str(channel) + ".\n") sys.stderr.write( "Error - No DAS table found for das " + str(serial) + " channel " + str(channel) + ".\n") break for entry in Das_t: if (entry['sample_rate_i'] == sample_rate and entry['sample_rate_multiplier_i'] == sample_rate_multiplier and entry['channel_number_i'] == channel): response_n_i = entry['response_table_n_i'] receiver_n_i = entry['receiver_table_n_i'] break Response_t = self.ph5.get_response_t_by_n_i( response_n_i) if Response_t: gain = Response_t['gain/value_i'] bit_weight = Response_t['bit_weight/value_d'] bit_weight_units = Response_t['bit_weight/units_s'] gain_units = Response_t['gain/units_s'] else: logging.warning( "No Response table found for das " + str(serial) + " channel " + str(channel) + ".\n") sys.stderr.write( "Error - No Response table found for das " + str(serial) + " channel " + str(channel) + ".\n") stations.append( Station(id_s, array, channel, sample_rate, sample_rate_multiplier, das_model.strip(), sensor_model.strip(), gain, response_n_i, receiver_n_i, bit_weight, bit_weight_units, gain_units, serial)) return stations
def create_list(self): array_names = sorted(self.ph5.Array_t_names) stations = [] for array_name in array_names: if self.array: array = str(int(array_name[-3:])) array_patterns = self.array if not ph5utils.does_pattern_exists(array_patterns, str(array)): continue self.read_arrays(array_name) arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: station_list = arraybyid.get(ph5_station) for deployment in station_list: station_len = len(station_list[deployment]) for st_num in range(0, station_len): station = station_list[deployment][st_num] id_s = station_list[deployment][st_num]['id_s'] sample_rate = station_list[deployment][st_num][ 'sample_rate_i'] sample_rate_multiplier = station_list[deployment][ st_num]['sample_rate_multiplier_i'] das_model = station_list[deployment][st_num][ 'das/model_s'] das_model = das_model.replace(" ", "") if das_model.strip() == "": LOGGER.error( "No Das model for array %s, station %s" % (array, id_s)) sensor_model = station_list[deployment][st_num][ 'sensor/model_s'] sensor_model = sensor_model.replace(" ", "") if das_model.startswith("ZLAND"): sensor_model = "" elif sensor_model.strip() == "": LOGGER.error( "No Sensor model for array %s, station %s" % (array, id_s)) channel = station_list[deployment][st_num][ 'channel_number_i'] serial = station_list[deployment][st_num][ 'das/serial_number_s'] pickup = station_list[deployment][st_num][ 'pickup_time/epoch_l'] deploy = station_list[deployment][st_num][ 'deploy_time/epoch_l'] self.ph5.read_das_t(serial, deploy, pickup, reread=False) try: Das_t = ph5api.filter_das_t( self.ph5.Das_t[serial]['rows'], channel) except BaseException: LOGGER.warning( "No DAS table found for das {0} channel " "{1}.\n".format(serial, channel)) break for entry in Das_t: if (entry['sample_rate_i'] == sample_rate and entry['sample_rate_multiplier_i'] == sample_rate_multiplier and entry['channel_number_i'] == channel): response_n_i = entry['response_table_n_i'] receiver_n_i = entry['receiver_table_n_i'] break if channel == -2: # in metadata # channel=-2 for no resp => n_i=-1 response_n_i = -1 Response_t = self.ph5.get_response_t_by_n_i( response_n_i) if Response_t: gain = Response_t['gain/value_i'] bit_weight = Response_t['bit_weight/value_d'] bit_weight_units = Response_t['bit_weight/units_s'] gain_units = Response_t['gain/units_s'] else: LOGGER.warning( "No Response table found for das {0} channel " "{1}.\n".format(serial, channel)) try: stations.append( Station(id_s, station, channel, sample_rate, sample_rate_multiplier, das_model.strip(), sensor_model.strip(), gain, response_n_i, receiver_n_i, bit_weight, bit_weight_units, gain_units, serial)) except BaseException: LOGGER.error("Couldn't add station.") continue return stations
def check_station_completeness(self, station): """ Checks that the following are present in Experiment_t: #### STATION LEVEL CHECKS - id_s - description_s - seed_station_name_s - check that 1 <= len(seed_station_name_s) <= 5 #### CHANNEL LEVEL CHECKS CHECKS - seed_location_code_s - check that 0 <= len(seed_location_code_s) <= 2 - seed_channel_code_s - check that 1 <= len(seed_channel_code_s) <= 3 - seed_band_code_s - seed_instrument_code_s - seed_orientation_code_s - sample_rate_i - check that sample_rate_i > 0 - sample_rate_multiplier - check that sample_rate_multiplier > 0 - response_table_n_i #### CHANNEL LOCATION - location/X/value_d - location/Y/value_d - location/Z/value_d #### CHANNEL DEPLOY/PICKUP - deploy_time/epoch_l - pickup_time/epoch_l - check if deploy time is after pickup time #### CHANNEL SENSOR/DAS - das/serial_number_s - das/manufacturer_s - das/model_s - sensor/serial_number_s - sensor/manufacturer_s - sensor/model_s """ info = [] warning = [] error = [] # STATION LEVEL CHECKS CHECKS station_id = station['id_s'] try: if not (0 <= int(station_id) <= 65535): error.append("Station ID not " "between 0 and 65535") except ValueError: error.append("Station ID not a whole " "number between 0 " "and 65535 ") if not station['description_s']: warning.append("No station description found.") if not station['seed_station_name_s']: error.append("SEED station code required.") if not (1 <= len(station['seed_station_name_s']) <= 5): error.append("SEED station code not " "between 1 and 5 characters.") # CHANNEL LEVEL CHECKS CHECKS if station['seed_location_code_s'] is None: error.append("SEED location code " "required.") if not (0 <= len(station['seed_location_code_s']) <= 2): error.append("SEED location code not " "between 0 and 2 characters.") if not station['seed_band_code_s']: error.append("SEED band code required.") if not station['seed_instrument_code_s']: error.append("SEED instrument code required.") if not station['seed_orientation_code_s']: error.append("SEED orientation code " "required.") if station['sample_rate_i'] <= 0: warning.append("Sample rate seems to be <= 0. " "Is this correct???") if station['sample_rate_multiplier_i'] <= 0: warning.append("Sample rate multiplier <= 0. " "Is this correct???") response_t = self.ph5.get_response_t_by_n_i( station['response_table_n_i']) if response_t is None: error.append("No Response table found. " "Have you run load_resp yet?") # CHANNEL LOCATION if station['location/X/value_d'] == 0: warning.append("Channel location/X/value_d " "'longitude' seems to be 0. " "Is this correct???") if station['location/X/units_s'] is None: warning.append("No Station location/X/units_s value " "found.") if station['location/Y/value_d'] == 0: warning.append("Channel location/Y/value_d " "'latitude' seems to be 0. " "Is this correct???") if station['location/Y/units_s'] is None: warning.append("No Station location/Y/units_s value " "found.") if not station['location/Z/value_d']: warning.append("No Channel location/Z/value_d value") if station['location/Z/units_s'] is None: warning.append("No Station location/Z/units_s value " "found.") # CHANNEL DEPLOY/PICKUP deploy_time = station['deploy_time/epoch_l'] if deploy_time is None: error.append("No deploy_time value " "found for channel.") pickup_time = station['pickup_time/epoch_l'] if pickup_time is None: error.append("No pickup_time value " "found for channel.") das_serial = station['das/serial_number_s'] sensor_serial = station['sensor/serial_number_s'] if deploy_time > pickup_time: error.append("Deploy time is after pickup time") # CHANNEL SENSOR/DAS channel_id = station['channel_number_i'] if das_serial is None: error.append("Das serial number is missing.") if sensor_serial is None: warning.append("Sensor serial number is missing.") self.ph5.read_das_t(das_serial, reread=False) if das_serial not in self.ph5.Das_t: error.append("No data found for das serial number {0}. " "You may need to reload the raw " "data for this station." .format(str(das_serial))) try: das_rows = self.ph5.Das_t[das_serial]['rows'] ph5api.filter_das_t(das_rows, channel_id) true_deploy, true_pickup =\ self.ph5.get_extent(das=das_serial, component=channel_id, sample_rate=station['sample_rate_i']) if deploy_time > true_deploy: time = int(deploy_time - true_deploy) warning.append("Data exists before deploy time: " + str(time) + " seconds ") if pickup_time < true_pickup: time = int(true_pickup - pickup_time) warning.append("Data exists after pickup time: " + str(time) + " seconds ") self.ph5.forget_das_t(das_serial) except KeyError: try: # avoid opening too many files self.ph5.forget_das_t(das_serial) except Exception: pass error.append("No data found for channel {0}. " "Other channels seem to exist" .format(str(channel_id))) if not station['sensor/manufacturer_s']: warning.append("Sensor manufacturer is " "missing. Is this correct???") if not station['sensor/model_s']: warning.append("Sensor model is missing. " "Is this correct???") if not station['das/manufacturer_s']: warning.append("DAS manufacturer is missing. " "Is this correct???") if not station['das/model_s']: warning.append("DAS model is missing. " "Is this correct???") return info, warning, error
def create_trace(self, station_to_cut, mp=False): station_to_cut_segments = PH5toMSeed.get_nonrestricted_segments( [station_to_cut], self.restricted) obspy_stream = Stream() for stc in station_to_cut_segments: new_endtime = stc.endtime + (1 / float(stc.sample_rate)) self.ph5.read_das_t(stc.das, stc.starttime, stc.endtime, reread=False) if stc.das not in self.ph5.Das_t: return Das_t = ph5api.filter_das_t(self.ph5.Das_t[stc.das]['rows'], stc.component) Das_t = [x for x in Das_t] Das_tf = next(iter(Das_t or []), None) if Das_tf is None: return else: das_t_start = (float(Das_tf['time/epoch_l']) + float(Das_tf['time/micro_seconds_i']) / 1000000) if float(das_t_start) > float(stc.starttime): start_time = das_t_start else: start_time = stc.starttime nt = stc.notimecorrect actual_sample_rate = float( stc.sample_rate) / float(stc.sample_rate_multiplier) traces = self.ph5.cut(stc.das, start_time, new_endtime, chan=stc.component, sample_rate=actual_sample_rate, apply_time_correction=nt, das_t=Das_t) if not isinstance(traces, list): return for trace in traces: if trace.nsamples == 0: continue # if start time is before requested start time move up 1 sample # and delete first sample of data if trace.start_time.epoch() < stc.starttime: trace.start_time = trace.start_time + \ (1 / float(stc.sample_rate)) trace.data = trace.data[1:] try: obspy_trace = Trace(data=trace.data) except ValueError: continue if self.format == "SAC": Receiver_t = \ self.ph5.get_receiver_t_by_n_i(stc.receiver_n_i) azimuth = Receiver_t['orientation/azimuth/value_f'] dip = Receiver_t['orientation/dip/value_f'] obspy_trace.stats.sac = {'kstnm': stc.seed_station, 'kcmpnm': stc.seed_channel, 'knetwk': stc.net_code, 'stla': float(stc.latitude), 'stlo': float(stc.longitude), 'stel': float(stc.elev), 'cmpaz': float(azimuth), 'cmpinc': float(dip)} elif self.format == "GEOCSV": Receiver_t = \ self.ph5.get_receiver_t_by_n_i(stc.receiver_n_i) azimuth = Receiver_t['orientation/azimuth/value_f'] dip = Receiver_t['orientation/dip/value_f'] obspy_trace.stats.sensor_type = stc.sensor_type obspy_trace.stats.elevation = float(stc.elev) obspy_trace.stats.dip = float(dip) obspy_trace.stats.depth = 0 obspy_trace.stats.back_azimuth = azimuth obspy_trace.stats.experiment_id = stc.experiment_id obspy_trace.stats.array = stc.array_code obspy_trace.stats.component = stc.component obspy_trace.stats.response = self.get_response_obj(stc) obspy_trace.stats.sampling_rate = actual_sample_rate obspy_trace.stats.location = stc.location obspy_trace.stats.station = stc.seed_station obspy_trace.stats.coordinates = AttribDict() obspy_trace.stats.coordinates.latitude = stc.latitude obspy_trace.stats.coordinates.longitude = stc.longitude obspy_trace.stats.channel = stc.seed_channel obspy_trace.stats.network = stc.net_code obspy_trace.stats.starttime = trace.start_time.getFdsnTime() if self.decimation: obspy_trace.decimate(int(self.decimation)) obspy_stream.append(obspy_trace) self.ph5.Das_t = {} if len(obspy_stream.traces) < 1: return return obspy_stream
def checK_stations(self): logging.info("Checking Stations...") array_names = sorted(self.ph5.Array_t_names) for array_name in array_names: self.read_arrays(array_name) arraybyid = self.ph5.Array_t[array_name]['byid'] arrayorder = self.ph5.Array_t[array_name]['order'] for ph5_station in arrayorder: station_list = arraybyid.get(ph5_station) for deployment in station_list: station_len = len(station_list[deployment]) for st_num in range(0, station_len): station_id = station_list[deployment][st_num]['id_s'] serial = station_list[deployment][st_num][ 'das/serial_number_s'] channel = station_list[deployment][st_num][ 'channel_number_i'] logging.info("\n##############") logging.info("Station " + str(station_id) + " Channel " + str(channel) + "\n") if not station_list[deployment][st_num][ 'seed_station_name_s']: logging.error("SEED station name required.") response_t = self.ph5.get_response_t_by_n_i( station_list[deployment][st_num] ['response_table_n_i']) if response_t is None: logging.error("No Response table found." + "Have you run load_resp yet?") deploy_time = station_list[deployment][st_num][ 'deploy_time/epoch_l'] pickup_time = station_list[deployment][st_num][ 'pickup_time/epoch_l'] self.ph5.read_das_t(serial, deploy_time, pickup_time, reread=False) if serial not in self.ph5.Das_t: logging.error("No Data found for: " + str(station_id) + " You may need " + "to reload the raw data for " + "this station.") try: ph5api.filter_das_t(self.ph5.Das_t[serial]['rows'], channel) except: logging.error("NO Data found for channel " + str(channel) + " Other channels seem to exist") if station_list[deployment][st_num][ 'sample_rate_i'] == 0: logging.warning("Sample rate seems to be 0." + " Is this correct???") if station_list[deployment][st_num][ 'sample_rate_multiplier_i'] == 0: logging.warning("Sample rate multiplier 0." + " Is this correct???") return