def test_mix_ins(self): """ test the added mjix in at end of ph5api that aren't used within api itself """ # true case result = ph5api.is_in(0, 10000, 5, 1000) self.assertTrue(result) # false result = ph5api.is_in(99999, 10000, 5, 1000) self.assertFalse(result) # get float epoch fepoch = ph5api.fepoch(1000000, 9555) self.assertEqual(1000000.009555, fepoch) fepoch = ph5api.fepoch(1978346, 100000000) self.assertEqual(1978446, fepoch) # filter a das self.ph5API_object.read_das_t('5553') das_t = ph5api.filter_das_t(self.ph5API_object.Das_t['5553']['rows'], 1) self.assertEqual(2, len(das_t)) das_t = ph5api.filter_das_t(self.ph5API_object.Das_t['5553']['rows'], -2) self.assertEqual(1, len(das_t)) das_t = ph5api.filter_das_t(self.ph5API_object.Das_t['5553']['rows'], -9) self.assertEqual(0, len(das_t))
def print_new_Das_t(P5, n_i_map, family): ''' Print Das_t corrected for Response_t n_i ''' P5.read_das_g_names() for das_g in P5.Das_g_names: das = P5.read_das_t(das_g) MAP = n_i_map[family] #if not das : continue if not das or not P5.Das_t.has_key(das): sys.stderr.write("#***\tMissing: {0}\n".format(das)) continue if not os.path.exists( os.path.join(ARGS.families_directory, "RESPONSE_T_N_I")): os.mkdir(os.path.join(ARGS.families_directory, "RESPONSE_T_N_I")) DAS_KEF = os.path.join(ARGS.families_directory, "RESPONSE_T_N_I", "Das_t_response_n_i_{0}.kef".format(das)) sys.stderr.write("Creating: {0}\n".format(DAS_KEF)) with open(DAS_KEF, 'w+') as fh: fh.write("# PH5VERSION: {0}\n".format(ph5api.PH5VERSION)) keys = P5.Das_t[das]['keys'] keys.sort() i = 1 for das_t in P5.Das_t[das]['rows']: try: M = MAP[das_t['response_table_n_i']] das_t['response_table_n_i'] = M['n_i_all'] fh.write("# {0}\n".format(i)) i += 1 fh.write( "/Experiment_g/Receivers_g/{0}/Das_t\n".format(das_g)) for k in keys: fh.write("\t{0}={1}\n".format(k, das_t[k])) except IndexError: sr = ph5api.fepoch(das_t['sample_rate_i'], das_t['sample_rate_multiplier_i']) sys.stderr.write( "# Index out of range for DAS: {0}, sample rate: {1}\n" .format(das, sr)) sys.stderr.write("# Entry unchanged! Suspect data.\n") fh.write( "# {0} response_table_n_i entry suspect!\n".format( i)) i += 1 fh.write( "/Experiment_g/Receivers_g/{0}/Das_t\n".format(das_g)) for k in keys: fh.write("\t{0}={1}\n".format(k, das_t[k]))
def create_cut(self, seed_network, ph5_station, seed_station, station_cut_times, station_list, deployment, st_num, array_code, experiment_id): deploy = station_list[deployment][st_num]['deploy_time/epoch_l'] deploy_micro = station_list[deployment][ st_num]['deploy_time/micro_seconds_i'] pickup = station_list[deployment][st_num]['pickup_time/epoch_l'] pickup_micro = station_list[deployment][ st_num]['pickup_time/micro_seconds_i'] location = station_list[deployment][ st_num]['seed_location_code_s'] das = station_list[deployment][st_num]['das/serial_number_s'] das_manufacturer = station_list[deployment][st_num][ 'das/manufacturer_s'] das_model = station_list[deployment][st_num][ 'das/model_s'] sensor_type = " ".join([x for x in [station_list[deployment][st_num][ 'sensor/manufacturer_s'], station_list[deployment][st_num][ 'sensor/model_s']] if x]) receiver_n_i = station_list[deployment][st_num]['receiver_table_n_i'] response_n_i = station_list[deployment][st_num]['response_table_n_i'] if 'sample_rate_i' in station_list[deployment][0]: sample_rate = station_list[deployment][st_num]['sample_rate_i'] sample_rate_multiplier = 1 if ('sample_rate_multiplier_i' in station_list[deployment][st_num]): sample_rate_multiplier = station_list[ deployment][st_num]['sample_rate_multiplier_i'] if self.sample_rate_list: sample_list = self.sample_rate_list if not ph5utils.does_pattern_exists(sample_list, sample_rate): return seed_channel, component = self.get_channel_and_component( station_list, deployment, st_num) if self.component: component_list = self.component if not ph5utils.does_pattern_exists(component_list, component): return if self.channel: cha_patterns = self.channel if not ph5utils.does_pattern_exists(cha_patterns, seed_channel): return if self.das_sn and self.das_sn != das: return if self.reqtype == "FDSN": # trim user defined time range if it extends beyond the # deploy/pickup times if self.start_time: if "T" not in self.start_time: check_start_time = passcal2epoch( self.start_time, fepoch=True) if float(check_start_time) > float(deploy): start_fepoch = self.start_time sct = StationCutTime( passcal2epoch(start_fepoch, fepoch=True) ) station_cut_times.append(sct) else: sct = StationCutTime(deploy) station_cut_times.append(sct) else: check_start_time = ph5utils.datestring_to_epoch( self.start_time) if float(check_start_time) > float(deploy): sct = StationCutTime( ph5utils.datestring_to_epoch(self.start_time)) station_cut_times.append(sct) else: sct = StationCutTime(deploy) station_cut_times.append(sct) if float(check_start_time) > float(pickup): return else: sct = StationCutTime( ph5api.fepoch(deploy, deploy_micro) ) station_cut_times.append(sct) for sct in station_cut_times: start_fepoch = sct.time if self.reqtype == "SHOT" or self.reqtype == "RECEIVER": if self.offset: # adjust starttime by an offset start_fepoch += int(self.offset) if self.length: stop_fepoch = start_fepoch + self.length else: raise PH5toMSAPIError( "Error - length is required for request by shot.") elif self.reqtype == "FDSN": if self.end_time: if "T" not in self.end_time: check_end_time = passcal2epoch( self.end_time, fepoch=True) if float(check_end_time) < float(pickup): stop_fepoch = self.end_time stop_fepoch = passcal2epoch( stop_fepoch, fepoch=True) else: stop_fepoch = pickup else: check_end_time = ph5utils.datestring_to_epoch( self.end_time) if float(check_end_time) < float(pickup): stop_fepoch = ph5utils.datestring_to_epoch( self.end_time) else: stop_fepoch = pickup if float(check_end_time) < float(deploy): continue elif self.length: stop_fepoch = start_fepoch + self.length else: stop_fepoch = ph5api.fepoch(pickup, pickup_micro) if (self.use_deploy_pickup is True and not ((int(start_fepoch) >= deploy and int(stop_fepoch) <= pickup))): # das not deployed within deploy/pickup time continue start_passcal = epoch2passcal(start_fepoch, sep=':') start_passcal_list = start_passcal.split(":") start_doy = start_passcal_list[1] if self.doy_keep: if start_doy not in self.doy: continue midnight_fepoch, secondLeftInday = \ ph5utils.inday_breakup(start_fepoch) # if (stop_fepoch - start_fepoch) > 86400: if (stop_fepoch - start_fepoch) > secondLeftInday: seconds_covered = 0 total_seconds = stop_fepoch - start_fepoch times_to_cut = [] if self.cut_len != 86400: stop_time, seconds = ph5utils.doy_breakup( start_fepoch, self.cut_len) else: stop_time, seconds = ph5utils.inday_breakup(start_fepoch) seconds_covered = seconds_covered + seconds times_to_cut.append([start_fepoch, stop_time]) start_time = stop_time while seconds_covered < total_seconds: if self.cut_len != 86400: stop_time, seconds = ph5utils.doy_breakup( start_time, self.cut_len) else: stop_time, seconds = ph5utils.inday_breakup(start_time) seconds_covered += seconds if stop_time > stop_fepoch: times_to_cut.append([start_time, stop_fepoch]) break times_to_cut.append([start_time, stop_time]) start_time = stop_time else: times_to_cut = [[start_fepoch, stop_fepoch]] times_to_cut[-1][-1] = stop_fepoch latitude = station_list[deployment][ st_num]['location/Y/value_d'] longitude = station_list[deployment][ st_num]['location/X/value_d'] elev = station_list[deployment][ st_num]['location/Z/value_d'] for starttime, endtime in tuple(times_to_cut): try: self.ph5.query_das_t(das, component, starttime, endtime, sample_rate, sample_rate_multiplier ) except experiment.HDF5InteractionError: continue station_x = StationCut( seed_network, experiment_id, ph5_station, seed_station, array_code, das, das_manufacturer, das_model, sensor_type, component, seed_channel, starttime, endtime, sample_rate, sample_rate_multiplier, self.notimecorrect, location, latitude, longitude, elev, receiver_n_i, response_n_i, shot_id=sct.shot_id, shot_lat=sct.shot_lat, shot_lng=sct.shot_lng, shot_elevation=sct.shot_elevation) station_hash = hash(frozenset([seed_station, das, latitude, longitude, sample_rate, sample_rate_multiplier, starttime, endtime])) if station_hash in self.hash_list: continue else: self.hash_list.append(station_hash) yield station_x