def assert_process(self, srm_txt): # --------------- DAS ---------------- # before process(), throw srm error when running read_das() das = self.ph5object.ph5_g_receivers.getdas_g('1X1111') self.ph5object.ph5_g_receivers.setcurrent(das) with self.assertRaises(experiment.HDF5InteractionError) as context: self.ph5object.ph5_g_receivers.read_das() self.assertEqual(context.exception.errno, 7) self.assertEqual( context.exception.msg, ('Das_t_1X1111 has sample_rate_multiplier_i %s. ' 'Please run fix_srm to fix sample_rate_multiplier_i for PH5 data.' % srm_txt)) # after process() for das_name=Das_t_1X1111 # there are no more error when running read_das() with LogCapture(): self.ph5object = fix_srm.process(self.ph5object, 'master.ph5', self.tmpdir, das_name='Das_t_1X1111') self.ph5object.close() self.ph5object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5', editmode=True) das = self.ph5object.ph5_g_receivers.getdas_g('1X1111') self.ph5object.ph5_g_receivers.setcurrent(das) rows, keys = self.ph5object.ph5_g_receivers.read_das() self.assertEqual(len(rows), 9) self.assertIn('sample_rate_multiplier_i', keys) # ----------------- ARRAY ----------------- # before process(), throw srm error when running read_arrays() with self.assertRaises(experiment.HDF5InteractionError) as context: self.ph5object.ph5_g_sorts.read_arrays('Array_t_001') self.assertEqual(context.exception.errno, 7) self.assertEqual( context.exception.msg, ('Array_t_001 has sample_rate_multiplier_i %s. ' 'Please run fix_srm to fix sample_rate_multiplier_i for PH5 data.' % srm_txt)) # after process() for array_name=Array_t_001 # there are no more error when running read_arrays() with LogCapture(): fix_srm.process(self.ph5object, 'master.ph5', self.tmpdir, array_name='Array_t_001') self.ph5object.close() self.ph5object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5', editmode=False) rows, keys = self.ph5object.ph5_g_sorts.read_arrays('Array_t_001') self.assertEqual(len(rows), 3) self.assertIn('sample_rate_multiplier_i', keys)
def main(): """ Main method for use for command line program """ args = get_args(sys.argv[1:]) if args.nickname[-3:] == 'ph5': ph5file = os.path.join(args.ph5path, args.nickname) else: ph5file = os.path.join(args.ph5path, args.nickname + '.ph5') args.nickname += '.ph5' if not os.path.exists(ph5file): LOGGER.error("{0} not found.\n".format(ph5file)) sys.exit(-1) try: ph5API_object = ph5api.PH5(path=args.ph5path, nickname=args.nickname) availability = PH5Availability(ph5API_object) availability.analyze_args(args) availability.process_all() except ph5api.APIError as err: LOGGER.error(err.msg) except PH5AvailabilityError as err: LOGGER.error(str(err)) except Exception as err: LOGGER.error(str(err)) ph5API_object.close()
def setUp(self): super(TestPH5Validate_response_info, self).setUp() # copy ph5 data and tweak sensor model and sample rate in array 9 # to test for inconsistencies between filenames and info orgph5path = os.path.join(self.home, "ph5/test_data/ph5") shutil.copy(os.path.join(orgph5path, 'master.ph5'), os.path.join(self.tmpdir, 'master.ph5')) shutil.copy(os.path.join(orgph5path, 'miniPH5_00001.ph5'), os.path.join(self.tmpdir, 'miniPH5_00001.ph5')) testargs = ['delete_table', '-n', 'master.ph5', '-A', '9'] with patch.object(sys, 'argv', testargs): with OutputCapture(): f = StringIO('y') sys.stdin = f nuke_table.main() f.close() kefpath = os.path.join( self.home, 'ph5/test_data/metadata/array_9_test_resp_filename.kef') testargs = ['keftoph5', '-n', 'master.ph5', '-k', kefpath] with patch.object(sys, 'argv', testargs): kef2ph5.main() self.ph5API_object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5') self.ph5validate = ph5validate.PH5Validate(self.ph5API_object, '.')
def main(): args = get_args() if args.nickname[-3:] == 'ph5': ph5file = os.path.join(args.ph5path, args.nickname) else: ph5file = os.path.join(args.ph5path, args.nickname + '.ph5') args.nickname += '.ph5' if not os.path.exists(ph5file): logging.warning("{0} not found.\n".format(ph5file)) sys.stderr.write("Error - {0} not found.\n".format(ph5file)) sys.exit(-1) if args.array: args.array = args.array.split(',') ph5API_object = ph5api.PH5(path=args.ph5path, nickname=args.nickname) fix_n_i = n_i_fix(ph5API_object, args.array) data = fix_n_i.create_list() ph5API_object.close() if args.input_csv is None: fix_n_i.create_template(data) else: new_data = fix_n_i.load_response(args.ph5path, args.nickname, data, args.input_csv) import time time.sleep(5) fix_n_i.update_kefs(args.ph5path, args.array, new_data)
def test_main(self): testargs = ['initialize_ph5', '-n', 'master.ph5'] with patch.object(sys, 'argv', testargs): initialize_ph5.main() ph5 = ph5api.PH5(path='.', nickname='master.ph5', editmode=True) ph5.ph5_g_receivers.newdas('12537') for name, data in self.soh.items(): ph5.ph5_g_receivers.newarray(name, data, description="Texan State of Health") ph5.close() slope_d = ['4.93494e-09', '-4.67895e-08', '-3.61306e-08'] offset_d = ['0.000806093', '-0.01614', '-0.015794'] testargs = ['time_kef_gen', '-n', 'master.ph5'] with patch.object(sys, 'argv', testargs): with OutputCapture() as out: time_kef_gen.main() output = out.captured.strip() self.assertEqual(output.count('/Experiment_g/Receivers_g/Time_t'), 3) outputlines = output.split("\n") i = 0 for line in outputlines: if 'das/serial_number_s' in line: self.assertTrue(line.split("=")[1].strip(), '12357') if 'slope_d' in line: self.assertTrue(line.split("=")[1].strip(), slope_d[i]) if 'offset_d' in line: self.assertTrue(line.split("=")[1].strip(), offset_d[i]) i += 1
def main(): try: args = get_args() ph5API_object = ph5api.PH5(path=args.ph5path, nickname=args.nickname) ph5validate = PH5Validate(ph5API_object, args.ph5path, args.level.upper(), args.outfile) validation_blocks = [] validation_blocks.extend(ph5validate.check_experiment_t()) validation_blocks.extend(ph5validate.check_array_t()) validation_blocks.extend(ph5validate.check_event_t()) with open(args.outfile, "w") as log_file: for vb in validation_blocks: vb.write_to_log(log_file, args.level) ph5API_object.close() sys.stdout.write("\nWarnings, Errors and suggestions " "written to logfile: %s\n" % args.outfile) except ph5api.APIError as err: LOGGER.error(err) except PH5ValidateException as err: LOGGER.error(err) except Exception as e: LOGGER.error(e)
def main(): global P5 get_args () try : P5 = ph5api.PH5 (path=ARGS.ph5_path, nickname=ARGS.ph5_file_prefix) except Exception as e : sys.stderr.write ("Error: Can't open {0} at {1}.".format (ARGS.ph5_file_prefix, ARGS.ph5_path)) sys.exit (-1) dasGroups = P5.ph5_g_receivers.alldas_g () dass = dasGroups.keys (); dass.sort () # DAS, start time, end time, drift slope, offset stats = ([],[],[],[],[]) no_cor = [] print "# Written by time-gef-gen v{0}, PH5 v{1}".format (PROG_VERSION, ph5version) for d in dass : das = d[6:] soh = read_soh (dasGroups[d]) tos, fos = process_soh (soh) clock = parse_tos_froms (tos, fos) print_kef (das, clock) if clock[0] == None : no_cor.append (das) continue else : stats[0].append (das) stats[1].append (clock[0]) stats[2].append (clock[1]) stats[3].append (clock[2]) stats[4].append (clock[3]) if ARGS.clock_report : report (stats, no_cor) P5.close ()
def __init__(self, sta_xml_obj_list, ph5path, nickname, level, format): self.request_list = sta_xml_obj_list self.ph5 = ph5api.PH5(path=ph5path, nickname=nickname) self.iris_custom_ns = "http://www.fdsn.org/xml/station/1/iris" self.level = level.upper() self.format = format.upper() self.nickname = nickname
def __init__(self, sta_xml_obj_list, ph5path, nickname, level, format): self.request_list = sta_xml_obj_list self.ph5 = ph5api.PH5(path=ph5path, nickname=nickname) self.iris_custom_ns = "http://www.iris.edu/xml/station/1/" self.level = level.upper() self.format = format.upper() self.nickname = nickname self._obs_stations = {} self._obs_channels = {}
def setUp(self): super(Test_n_i_fix_simpleph5object, self).setUp() testargs = ['initialize_ph5', '-n', 'master.ph5'] with patch.object(sys, 'argv', testargs): initialize_ph5.main() self.ph5API_object = ph5api.PH5(path='.', nickname='master.ph5', editmode=True) self.n_i_fix = resp_load.n_i_fix(self.ph5API_object, False, True, ['1', '2', '3', '4'])
def setUp(self): super(TestPh5Validate, self).setUp() kef_to_ph5( self.tmpdir, 'master.ph5', os.path.join(self.home, 'ph5/test_data'), ['rt125a/das_t_12183.kef', 'metadata/array_t_9_validate.kef'], das_sn_list=['12183']) self.ph5_object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5') self.ph5validate = ph5validate.PH5Validate( self.ph5_object, self.tmpdir, "WARNING", outfile="ph5_validate.log")
def test_create_trace(self): self.ph5_object = ph5api.PH5(path=os.path.join(self.home, 'ph5/test_data/ph5'), nickname='master.ph5') ph5toms = PH5toMSeed(self.ph5_object) ph5toms.process_all() cuts = ph5toms.create_cut_list() for cut in cuts: trace = ph5toms.create_trace(cut) if trace is not None: self.assertEqual(trace[0].stats.sampling_rate, cut.sample_rate)
def main(): args = get_args() ph5API_object = ph5api.PH5( path=args.ph5path, nickname=args.nickname, ) ph5validate = PH5Validate(ph5API_object, args.ph5path, level=args.level) ph5validate.check_experiment_t() ph5validate.checK_stations() ph5API_object.close() sys.stdout.write("\nWarnings, Errors and suggests written to logfile: " + "PH5Validate.log\n\n")
def init_ph5(srcdir, destdir): ''' 1. copy all ph5 files from srcdir to destdir 2. create ph5object from ph5 file in destdir 3. initiate T2K for use in fix_srm's function ''' for basename in os.listdir(srcdir): if basename.endswith('.ph5'): pathname = os.path.join(srcdir, basename) shutil.copy2(pathname, destdir) ph5object = ph5api.PH5(path=destdir, nickname='master.ph5', editmode=True) fix_srm.init_T2K(ph5object) return ph5object
def assert_create_cut_list_trace(self, ph5path, errortype, errno, errmsg): self.ph5_object = ph5api.PH5(path=ph5path, nickname='master.ph5') ph5toms = PH5toMSeed(self.ph5_object, starttime='2019-06-29T18:03:13.000000', component='1') ph5toms.process_all() with self.assertRaises(errortype) as context: cuts = ph5toms.create_cut_list() for cut in cuts: ph5toms.create_trace(cut) self.assertEqual(context.exception.errno, errno) self.assertEqual(context.exception.msg, errmsg) self.ph5_object.ph5.close()
def test_mismatch_sample_rate(self): ph5test_srpath = os.path.join(self.home, 'ph5/test_data/ph5/samplerate') self.ph5_object = ph5api.PH5(path=ph5test_srpath, nickname='master.ph5') ph5toms = PH5toMSeed(self.ph5_object) ph5toms.process_all() cuts = ph5toms.create_cut_list() with LogCapture() as log: for cut in cuts: trace = ph5toms.create_trace(cut) if trace is not None: self.assertEqual(trace[0].stats.station, '10075') self.assertIsNotNone(log)
def test_check_has_response_filename(self): testargs = ['segdtoph5', '-n', 'master.ph5', '-U', '13N', '-r', os.path.join(self.home, 'ph5/test_data/segd/3ch.fcnt')] with patch.object(sys, 'argv', testargs): segd2ph5.main() self.ph5 = ph5api.PH5(path=self.tmpdir, nickname='master.ph5') self.ph5.read_response_t() has_response_file = validation.check_has_response_filename( self.ph5.Response_t, set(), None) self.assertEqual(has_response_file, "Response table does not contain any response file " "names. Check if resp_load has been run or if " "metadatatoph5 input contained response information.")
def setUp(self): super(TestValidation_response, self).setUp() ph5path = os.path.join(self.home, "ph5/test_data/ph5") self.ph5API_object = ph5api.PH5(path=ph5path, nickname='master.ph5') self.resp_check_info = [ {'n_i': 5, 'array': '002', 'sta': '0407', 'cha_code': 'HHN', 'spr': 200, 'sprm': 1, 'cha_id': 1, 'smodel': 'None CMG-3T', 'dmodel': 'None Q330'}, {'n_i': 1, 'array': '008', 'sta': '8001', 'cha_code': 'HLZ', 'spr': 100, 'sprm': 1, 'cha_id': 1, 'smodel': 'cmg-3t', 'dmodel': 'rt130'}, {'n_i': 4, 'array': '009', 'sta': '9001', 'cha_code': 'DPZ', 'spr': 500, 'sprm': 1, 'cha_id': 1, 'smodel': 'gs11v', 'dmodel': 'rt125a'}]
def assert_main(self, srm_txt): # before running fix_srm, throw srm error when running read_das() das = self.ph5object.ph5_g_receivers.getdas_g('1X1111') self.ph5object.ph5_g_receivers.setcurrent(das) with self.assertRaises(experiment.HDF5InteractionError) as context: self.ph5object.ph5_g_receivers.read_das() self.assertEqual(context.exception.errno, 7) self.assertEqual( context.exception.msg, ('Das_t_1X1111 has sample_rate_multiplier_i %s. ' 'Please run fix_srm to fix sample_rate_multiplier_i for PH5 data.' % srm_txt)) # before running fix_srm, throw srm error when running read_arrays() with self.assertRaises(experiment.HDF5InteractionError) as context: self.ph5object.ph5_g_sorts.read_arrays('Array_t_001') self.assertEqual(context.exception.errno, 7) self.assertEqual( context.exception.msg, ('Array_t_001 has sample_rate_multiplier_i %s. ' 'Please run fix_srm to fix sample_rate_multiplier_i for PH5 data.' % srm_txt)) self.ph5object.close() testargs = ['fix_srm', '-n', 'master.ph5'] with patch.object(sys, 'argv', testargs): fix_srm.main() self.ph5object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5', editmode=False) # after running fix_srm for the whole ph5 # there are no more error when running read_das(), # das_t include column 'sample_rate_multiplier_i' das = self.ph5object.ph5_g_receivers.getdas_g('1X1111') self.ph5object.ph5_g_receivers.setcurrent(das) rows, keys = self.ph5object.ph5_g_receivers.read_das() self.assertEqual(len(rows), 9) self.assertIn('sample_rate_multiplier_i', keys) # there are no more error when running read_arrays() # array_t include column 'sample_rate_multiplier_i' rows, keys = self.ph5object.ph5_g_sorts.read_arrays('Array_t_001') self.assertEqual(len(rows), 3) self.assertIn('sample_rate_multiplier_i', keys) # check backup files indir = os.listdir(self.tmpdir) self.assertTrue('Das_t_1X1111_%s_00.kef' % yeardoy in indir) self.assertTrue('Array_t_001_%s_00.kef' % yeardoy in indir)
def reformat_das_t(ph5object, das_sn, ph5, path): ''' remove das_t and reformat from ph3 to pn4 :param ph5object: ph5 object where the das table will be deleted :param das_sn: serial number of das to be deleted Ex: '1X1111' Ex: 'Das_t_1X1111' :param ph5: name of ph5 file (str) :param path: path to ph5 file (str) :return backupfile: name of the kef file to backup the deleted table (str) datapath: path to the table in ph5 structure ph5object: ph5 object of which das table has been deleted ''' # get mini_file that keep the passed das's data index_rows, keys = ph5object.ph5_g_maps.read_index() for i in index_rows: if i['serial_number_s'] == das_sn: mini_filename = i['external_file_name_s'] break # open mini ph5 file to reformat das_t from pn3 to pn4 # because das_t is read-only when opened from ph5object exrec = experiment.ExperimentGroup(nickname=mini_filename, currentpath=path) exrec.ph5open(True) exrec.initgroup() # remove das_t and re-initialize das_t with pn4's structure das_t = exrec.ph5_g_receivers.ph5.get_node( '/Experiment_g/Receivers_g/Das_g_%s' % das_sn, name='Das_t', classname='Table') das_t.remove() experiment.initialize_table(exrec.ph5_g_receivers.ph5, '/Experiment_g/Receivers_g/Das_g_%s' % das_sn, 'Das_t', columns.Data, expectedrows=1000) exrec.ph5close() # The changes have happened on exrec, NOT on ph5object. # Now need to close and re-open ph5object to update all those changes. ph5object.close() ph5object = ph5api.PH5(path=path, nickname=ph5, editmode=True) # ph5object has been reopened, need to return for uses afterward return ph5object
def setUp(self): super(Test_n_i_fix, self).setUp() """ use metadata to add metadata info array_002-st0407 - das5553 - response_n_i=0 array_003-st0407 - das5553 - response_n_i=1 array_004-st0407 - das5553 - response_n_i=-1 response_t: n_i=0 response_file_das_a= NoneQ330_NoneCMG3T_200HHN response_t: n_i=1 response_file_das_a= NoneQ330_NoneCMG3T_100LHN response_t: n_i=-1 """ testargs = [ 'metadatatoph5', '-n', 'master.ph5', '-f', os.path.join(self.home, "ph5/test_data/metadata/station.xml") ] with patch.object(sys, 'argv', testargs): metadatatoph5.main() """ add das_t 5553's """ testargs = [ 'mstoph5', '-n', 'master.ph5', '-d', os.path.join(self.home, "ph5/test_data/miniseed") ] with patch.object(sys, 'argv', testargs): obspytoph5.main() """ add create master.ph5 with the following tables: array_001-sta500-cha1,2,3-das3x500-modelZLAND_sr500-response_n_i0 das_3x500 response_t: n_i=0 bit_weight=1.88e-05 """ testargs = [ 'segdtoph5', '-n', 'master.ph5', '-r', os.path.join(self.home, "ph5/test_data/segd/3ch.fcnt") ] with patch.object(sys, 'argv', testargs): segd2ph5.main() self.ph5API_object = ph5api.PH5(path='.', nickname='master.ph5', editmode=True) self.n_i_fix = resp_load.n_i_fix(self.ph5API_object, False, True, ['1', '2', '3', '4'])
def main(): args = get_args() if args.nickname[-3:] == 'ph5': ph5file = os.path.join(args.ph5path, args.nickname) else: args.nickname = '{0}.ph5'.format(args.nickname) ph5file = os.path.join(args.ph5path, args.nickname) if not os.path.exists(ph5file): LOGGER.warning("{0} not found.\n".format(ph5file)) sys.exit(-1) else: # Set up logging # Write log to file ch = logging.FileHandler(os.path.join('.', "resp_load.log")) ch.setLevel(logging.INFO) # Add formatter formatter = logging.Formatter(LOGGING_FORMAT) ch.setFormatter(formatter) LOGGER.addHandler(ch) if args.array: args.array = args.array.split(',') ph5API_object = ph5api.PH5(path=args.ph5path, nickname=args.nickname) fix_n_i = n_i_fix(ph5API_object, args.array) data = fix_n_i.create_list() ph5API_object.close() if args.input_csv is None: fix_n_i.create_template(data) else: new_data = fix_n_i.load_response( args.ph5path, args.nickname, data, args.input_csv) import time time.sleep(5) fix_n_i.update_kefs(args.ph5path, args.array, new_data)
def main(): global P5, OFILE get_args() try: P5 = ph5api.PH5(path=ARGS.ph5_path, nickname=ARGS.ph5_file_prefix) except Exception: LOGGER.error("Can't open {0} at {1}.".format(ARGS.ph5_file_prefix, ARGS.ph5_path)) sys.exit(-1) dasGroups = P5.ph5_g_receivers.alldas_g() dass = sorted(dasGroups.keys()) # DAS, start time, end time, drift slope, offset stats = ([], [], [], [], []) no_cor = [] print_report("# Written by time-gef-gen v{0}, PH5 v{1}".format( PROG_VERSION, ph5version)) for d in dass: das = d[6:] soh = read_soh(dasGroups[d]) to_froms = process_soh(soh) count_cor = 0 for tos, fos in to_froms: clock = parse_tos_froms(tos, fos) print_kef(das, clock) if clock[0] is not None: stats[0].append(das) stats[1].append(clock[0]) stats[2].append(clock[1]) stats[3].append(clock[2]) stats[4].append(clock[3]) count_cor += 1 if count_cor == 0: no_cor.append(das) if ARGS.clock_report: report(stats, no_cor) P5.close() if OFILE is not None: OFILE.close()
def main(): ph5, path = get_args() set_logger() ph5object = ph5api.PH5(path=path, nickname=ph5, editmode=True) LOGGER.info("fix_srm {0}".format(PROG_VERSION)) LOGGER.info("{0}".format(sys.argv)) init_T2K(ph5object) try: # fix all das tables ph5object.read_das_g_names() for das_g_name in ph5object.Das_g_names.keys(): das_t_name = das_g_name.replace('Das_g_', 'Das_t_') ph5object = process(ph5object, ph5, path, das_name=das_t_name) # fix all array tables ph5object.ph5_g_sorts.read_sorts() for array_name in ph5object.ph5_g_sorts.names(): process(ph5object, ph5, path, array_name=array_name) except Exception as e: LOGGER.error(e.message) ph5object.close()
def main(): get_args() os.chdir(ARGS.families_directory) dump_kefs() try: n_i_map = parse_kef() except BaseException: LOGGER.error("Cannot create n_i map. " "Make sure the directory is correct using -F flag") else: for family in ALL_FAMILIES: ph5 = os.path.join(ARGS.families_directory, family) try: P5 = ph5api.PH5(path=ph5, nickname='master.ph5') except Exception as e: LOGGER.warning(e.msg) continue print_new_Das_t(P5, n_i_map, family) P5.close() print_new_Response_t(n_i_map)
def test_read_arrays_nosrm(self): # test read_arrays with sample_rate_multiplier_i missing # => raise error if run with default ignore_srm=False # => pass assert_read_das() if ignore_srm=True nosrmpath = os.path.join(self.home, 'ph5/test_data/ph5_no_srm/array_das') self.ph5_object = ph5api.PH5(path=nosrmpath, nickname='master.ph5') with self.assertRaises(experiment.HDF5InteractionError) as context: self.ph5_object.ph5_g_sorts.read_arrays('Array_t_001') self.assertEqual(context.exception.errno, 7) self.assertEqual( context.exception.msg, ('Array_t_001 has sample_rate_multiplier_i missing. ' 'Please run fix_srm to fix sample_rate_multiplier_i for PH5 data.' )) # same condition but with ignore_srm=True, read_arrays return no error ret, keys = self.ph5_object.ph5_g_sorts.read_arrays('Array_t_001', ignore_srm=True) self.assertEqual(len(ret), 3)
def test_main(self): testargs = [ 'segdtoph5', '-n', 'master.ph5', '-r', os.path.join(self.home, "ph5/test_data/segd/3ch.fcnt") ] with patch.object(sys, 'argv', testargs): segd2ph5.main() testargs = [ 'resp_load', '-n', 'master.ph5', '-a', '1', '-i', os.path.join(self.home, 'ph5/test_data/metadata/input.csv') ] with patch.object(sys, 'argv', testargs): resp_load.main() self.ph5API_object = ph5api.PH5(path='.', nickname='master.ph5') # check array_t self.ph5API_object.read_array_t('Array_t_001') entries = self.ph5API_object.Array_t['Array_t_001']['byid']['500'][1] self.assertEqual(entries[0]['response_table_n_i'], 0) self.assertEqual(entries[0]['receiver_table_n_i'], 1) # check response_t response_t = self.ph5API_object.get_response_t_by_n_i(0) self.assertEqual(response_t['response_file_das_a'], '/Experiment_g/Responses_g/ZLAND3C_500_1_24') # check response data loaded for all response files listed in input.csv try: self.ph5API_object.ph5.get_node('/Experiment_g/Responses_g/', 'ZLAND3C_500_1_24') self.ph5API_object.ph5.get_node('/Experiment_g/Responses_g/', 'cmg3t') self.ph5API_object.ph5.get_node('/Experiment_g/Responses_g/', 'gs11v') self.ph5API_object.ph5.get_node('/Experiment_g/Responses_g/', 'rt125a_500_1_32') self.ph5API_object.ph5.get_node('/Experiment_g/Responses_g/', 'rt130_100_1_1') except tables.NoSuchNodeError as e: raise AssertionError(e)
def test_read_das_srm0(self): # test read_das with sample_rate_multiplier_i=0 # => raise error if run with default ignore_srm=False # => pass assert_read_das() if ignore_srm=True ph5path = os.path.join( self.home, 'ph5/test_data/ph5/sampleratemultiplier0/array_das') self.ph5_object = ph5api.PH5(path=ph5path, nickname='master.ph5') self.set_current_das('1X1111') with self.assertRaises(experiment.HDF5InteractionError) as context: self.ph5_object.ph5_g_receivers.read_das() self.assertEqual(context.exception.errno, 7) self.assertEqual( context.exception.msg, ('Das_t_1X1111 has sample_rate_multiplier_i with value 0. ' 'Please run fix_srm to fix sample_rate_multiplier_i for PH5 data.' )) # same condition but with ignore_srm=True, read_das return no error ret, keys = self.ph5_object.ph5_g_receivers.read_das(ignore_srm=True) self.assertEqual(len(ret), 9)
def main(): global P5, N, LOG N = 0 get_args() try: P5 = ph5api.PH5(path=ARGS.ph5_path, nickname=ARGS.ph5_file_prefix) except Exception as e: sys.stderr.write("Error: Can't open {0} at {1}.".format( ARGS.ph5_file_prefix, ARGS.ph5_path)) sys.exit(-1) P5.read_array_t_names() P5.read_event_t_names() if not P5.Array_t_names or not P5.Event_t_names: print >> sys.stderr, "No arrays or no events defined in ph5 file. Can not continue!" P5.close() sys.exit() print "# geod2kef v{0}, PH5 v{1}".format(PROG_VERSION, ph5version) with open("geod2kef.log", 'w+') as LOG: print >> LOG, sys.argv print >> LOG, "***\nOffset statistics:" for Array_t_name in P5.Array_t_names: array_num = int(Array_t_name[8:]) P5.read_array_t(Array_t_name) for Event_t_name in P5.Event_t_names: event_num = int(Event_t_name[8:]) P5.read_event_t(Event_t_name) order = P5.Event_t[Event_t_name]['order'] for shot_id in order: Offset_t = P5.calc_offsets(Array_t_name, shot_id, shot_line=Event_t_name) offsets = print_kef(array_num, event_num, Offset_t) write_log(Array_t_name, Event_t_name, event_num, offsets) P5.close()
def test_check_response_t(self): testargs = [ 'segdtoph5', '-n', 'master.ph5', '-U', '13N', '-r', os.path.join(self.home, 'ph5/test_data/segd/3ch.fcnt') ] with patch.object(sys, 'argv', testargs): segd2ph5.main() self.ph5API_object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5') self.ph5validate = ph5validate.PH5Validate(self.ph5API_object, '.') with LogCapture() as log: log.setLevel(logging.ERROR) ret = self.ph5validate.check_response_t() self.assertEqual(ret[0].error, [ "Response table does not contain any response file names. " "Check if resp_load has been run or if metadatatoph5 input " "contained response information." ]) self.assertEqual( log.records[0].msg, "Response table does not contain any response file names. " "Check if resp_load has been run or if metadatatoph5 input " "contained response information.")