def main(): global RH, TH TH = [] RH = segdreader.ReelHeaders() try: sd = segdreader.Reader(infile=sys.argv[1]) general_headers(sd) channel_set_descriptors(sd) extended_headers(sd) external_header(sd) trace_headers(sd) print "{0} bytes read.".format(sd.bytes_read) except BaseException: print "Usage: dumpfair fairfield_seg-d_file" sys.exit()
def main(): global RH, TH TH = [] RH = segdreader.ReelHeaders() try: sd = segdreader.Reader(infile=sys.argv[1]) except: print "Usage: dumpfair fairfield_seg-d_file.rg16" print "To also print traces: set environment variable fairprint. This will run slowly." sys.exit() general_headers(sd) channel_set_descriptors(sd) extended_headers(sd) external_header(sd) trace_headers(sd) print "{0} bytes read.".format(sd.bytes_read)
def test_process_traces(self): segd2ph5.setLogger() segd2ph5.SD = SD = segdreader.Reader( infile=os.path.join(self.home, 'ph5/test_data/segd/3ch.fcnt')) SD.process_general_headers() SD.process_channel_set_descriptors() SD.process_extended_headers() SD.process_external_headers() SIZE = os.path.getsize( os.path.join(self.home, 'ph5/test_data/segd/3ch.fcnt')) # need to use relative path './miniPH5_00001.ph5' because # index_t's 'external_file_name_s will be chopped off if the path's # length is greater than 32 segd2ph5.DAS_INFO = { '3X500': [ segd2ph5.Index_t_Info('3X500', './miniPH5_00001.ph5', '/Experiment_g/Receivers_g/Das_g_3X500', 1502294400.38, 1502294430.38) ] } segd2ph5.MAP_INFO = { '3X500': [ segd2ph5.Index_t_Info('3X500', './miniPH5_00001.ph5', '/Experiment_g/Maps_g/Das_g_3X500', 1502294400.38, 1502294430.38) ] } segd2ph5.Das = '3X500' segd2ph5.ARRAY_T = {} segd2ph5.TSPF = False segd2ph5.UTM = 0 segd2ph5.LON = None segd2ph5.LAT = None segd2ph5.RH = False segd2ph5.writeINDEX() segd2ph5.RESP = segd2ph5.Resp(segd2ph5.EX.ph5_g_responses) segd2ph5.EXREC = segd2ph5.get_current_data_only(SIZE, segd2ph5.Das) segd2ph5.TRACE_JSON = [] # prepare trace trace, cs = SD.process_trace() T = segd2ph5.Trace(trace, SD.trace_headers) # ___________test process_traces ____________________ segd2ph5.process_traces(SD.reel_headers, T.headers, T.trace) # check ARRAY_T [array][das][deploy_time][chan] array_line = segd2ph5.ARRAY_T.keys()[0] self.assertEqual(array_line, 1) das = segd2ph5.ARRAY_T[array_line].keys()[0] self.assertEqual(das, '3X500') deploy_time = segd2ph5.ARRAY_T[array_line][das].keys()[0] self.assertEqual(deploy_time, 1502293592) chan = segd2ph5.ARRAY_T[array_line][das][deploy_time].keys()[0] self.assertEqual(chan, 1) # DAS_INFO self.assertEqual(segd2ph5.DAS_INFO.keys()[0], '3X500') das_info = segd2ph5.DAS_INFO['3X500'][0] self.assertEqual(das_info.ph5file, './miniPH5_00001.ph5') self.assertEqual(das_info.ph5path, "/Experiment_g/Receivers_g/Das_g_3X500") self.assertEqual(das_info.startepoch, 1502294400.38) self.assertEqual(das_info.stopepoch, 1502294430.38) self.assertEqual(das_info.das, '3X500') # MAP_INFO self.assertEqual(segd2ph5.MAP_INFO.keys()[0], '3X500') map_info = segd2ph5.MAP_INFO['3X500'][0] self.assertEqual(map_info.ph5file, './miniPH5_00001.ph5') self.assertEqual(map_info.ph5path, "/Experiment_g/Maps_g/Das_g_3X500") self.assertEqual(map_info.startepoch, 1502294400.38) self.assertEqual(map_info.stopepoch, 1502294430.38) self.assertEqual(map_info.das, '3X500') # RESP response = { 'gain/value_i': 24, 'response_file_das_a': '', 'bit_weight/units_s': 'mV/count', 'bit_weight/value_d': 1.880399419308285e-05, 'gain/units_s': 'dB', 'response_file_a': '', 'response_file_sensor_a': '', 'n_i': 0 } self.assertEqual(sorted(segd2ph5.RESP.keys), sorted(response.keys())) self.assertEqual(len(segd2ph5.RESP.lines), 1) for k in response.keys(): if isinstance(segd2ph5.RESP.lines[0][k], float): self.assertAlmostEqual(segd2ph5.RESP.lines[0][k], response[k], places=5) else: self.assertEqual(segd2ph5.RESP.lines[0][k], response[k])
def main(): import time then = time.time() from numpy import append as npappend def prof(): global RESP, INDEX_T_DAS, INDEX_T_MAP, SD, EXREC, MINIPH5, Das, SIZE,\ ARRAY_T, RH, LAT, LON, F, TRACE_JSON, APPEND MINIPH5 = None ARRAY_T = {} def get_das(sd): # Return line_station or das#[-9:] try: das = "{0}X{1}".format( sd.reel_headers.extended_header_3.line_number, sd.reel_headers.extended_header_3.receiver_point) except Exception: try: das = "{0}X{1}".format( sd.reel_headers.external_header.receiver_line, sd.reel_headers.external_header.receiver_point) except Exception: das = "sn" + \ str(sd.reel_headers.general_header_block_1. manufactures_sn) if das == 0: das = "id" + \ str(sd.reel_headers .extended_header_1.id_number)[-9:] return das def get_node(sd): # Return node part number, node id, and number of channels pn = None # Part Number id = None # Node ID nc = None # Number of channel sets try: nc = sd.reel_headers.general_header_block_1[ 'chan_sets_per_scan'] pn = sd.reel_headers.extended_header_1['part_number'] id = sd.reel_headers.extended_header_1['id_number'] except Exception: pass return pn, id, nc try: get_args() except Exception, err_msg: LOGGER.error(err_msg) return 1 initializeExperiment() LOGGER.info("segd2ph5 {0}".format(PROG_VERSION)) LOGGER.info("{0}".format(sys.argv)) if len(FILES) > 0: RESP = Resp(EX.ph5_g_responses) rows, keys = EX.ph5_g_receivers.read_index() INDEX_T_DAS = Rows_Keys(rows, keys) rows, keys = EX.ph5_g_maps.read_index() INDEX_T_MAP = Rows_Keys(rows, keys) for f in FILES: F = f traces = [] TRACE_JSON = [] try: SIZE = os.path.getsize(f) except Exception as e: LOGGER.error("Failed to read {0}, {1}.\ Skipping...\n".format(f, str(e.message))) continue SD = segdreader.Reader(infile=f) LAT = None LON = None RH = False if not SD.isSEGD(expected_manufactures_code=MANUFACTURERS_CODE): LOGGER.error( "{0} is not a Fairfield SEG-D file. Skipping.".format( SD.name())) continue try: SD.process_general_headers() SD.process_channel_set_descriptors() SD.process_extended_headers() SD.process_external_headers() except segdreader.InputsError as e: LOGGER.error("Possible bad SEG-D file -- {0}".format("".join( e.message))) continue nleft = APPEND Das = get_das(SD) part_number, node_id, number_of_channels = get_node(SD) EXREC = get_current_data_only(SIZE, Das) LOGGER.info(":<Processing>: {0}\n".format(SD.name())) LOGGER.info("Processing: {0}... Size: {1}\n".format( SD.name(), SIZE)) if EXREC.filename != MINIPH5: LOGGER.info("Opened: {0}...\n".format(EXREC.filename)) LOGGER.info( "DAS: {0}, Node ID: {1}, PN: {2}, Channels: {3}".format( Das, node_id, part_number, number_of_channels)) MINIPH5 = EXREC.filename n = 0 trace_headers_list = [] while True: if SD.isEOF(): if n != 0: thl = [] chan_set = None t = None new_traces = [] for T in traces: thl.append(T.headers) if chan_set is None: chan_set = T.headers.trace_header.channel_set if chan_set == T.headers.trace_header.channel_set: if isinstance(t, type(None)): t = T.trace else: t = npappend(t, T.trace) else: new_traces.append(T) traces = new_traces process_traces(SD.reel_headers, thl[0], t) if DAS_INFO: writeINDEX() break try: trace, cs = SD.process_trace() except segdreader.InputsError as e: LOGGER.error("{0}\n".format(F)) LOGGER.error("Possible bad SEG-D file -- {0}".format( "".join(e.message))) break if not LAT and not LON: try: if UTM: # UTM LAT, LON = utmcsptolatlon( SD.trace_headers.trace_header_N[4]. receiver_point_Y_final / 10., SD.trace_headers. trace_header_N[4].receiver_point_X_final / 10.) elif TSPF: # Texas State Plane coordinates LAT, LON = txncsptolatlon( SD.trace_headers.trace_header_N[4]. receiver_point_Y_final / 10., SD.trace_headers. trace_header_N[4].receiver_point_X_final / 10.) else: LAT = SD.trace_headers.trace_header_N[ 4].receiver_point_Y_final / 10. LON = SD.trace_headers.trace_header_N[ 4].receiver_point_X_final / 10. except Exception as e: LOGGER.warning( "Failed to convert location: {0}.\n".format( e.message)) trace_headers_list.append(SD.trace_headers) if n == 0: traces.append(Trace(trace, SD.trace_headers)) n = 1 Das = get_das(SD) else: traces.append(Trace(trace, SD.trace_headers)) if n >= nleft or EVERY is True: thl = [] chan_set = None chan_set_next = None t = None new_traces = [] # Need to check for gaps here! for T in traces: thl.append(T.headers) if chan_set is None: chan_set = T.headers.trace_header.channel_set if chan_set == T.headers.trace_header.channel_set: if isinstance(t, type(None)): t = T.trace else: t = npappend(t, T.trace) else: new_traces.append(T) if chan_set_next is None: chan_set_next =\ T.headers.trace_header.channel_set traces = new_traces process_traces(SD.reel_headers, thl[0], t) if new_traces: nleft = APPEND - len(new_traces) else: nleft = APPEND chan_set = chan_set_next chan_set_next = None if DAS_INFO: writeINDEX() n = 0 trace_headers_list = [] continue n += 1 update_external_references() if TRACE_JSON: log_array, name = getLOG() for line in TRACE_JSON: log_array.append(line) LOGGER.info(":<Finished>: {0}\n".format(F)) write_arrays(ARRAY_T) seconds = time.time() - then try: EX.ph5close() EXREC.ph5close() except Exception as e: LOGGER.warning("{0}\n".format("".join(e.message))) LOGGER.info("Done...{0:b}".format(int(seconds / 6.))) logging.shutdown()
def main(): global RH, TH TH = [] get_args() outpath = ARGS.linkdirectory with open(ARGS.segdfilelist) as fh: lh = open("unsimpleton.log", 'a+') while True: line = fh.readline() if not line: break filename = line.strip() if not os.path.exists(filename): sys.stderr.write("Warning: can't find: {0}\n".format(filename)) continue RH = segdreader.ReelHeaders() try: sd = segdreader.Reader(infile=filename) except: sys.stderr.write( "Failed to properly read {0}.\n".format(filename)) sys.exit() general_headers(sd) channel_set_descriptors(sd) extended_headers(sd) external_header(sd) #print "{0} bytes read.".format (sd.bytes_read) line_number = sd.reel_headers.extended_header_3['line_number'] receiver_point = sd.reel_headers.extended_header_3[ 'receiver_point'] version = sd.reel_headers.general_header_block_2[ 'file_version_number'] id_number = sd.reel_headers.extended_header_1['id_number'] outfile = "PIC_{0}_{1}_{3}.0.0.rg{2}".format( line_number, receiver_point, 16, id_number) linkname = os.path.join(outpath, outfile) i = 0 while os.path.exists(linkname): i += 1 outfile = "PIC_{0}_{1}_{4}.0.{3}.rg{2}".format( line_number, receiver_point, 16, i, id_number) linkname = os.path.join(outpath, outfile) try: if ARGS.hardlinks == True: print filename, 'hard->', linkname try: os.link(filename, linkname) except Exception as e: sys.stderr.write( "Failed to create HARD link:\n{0}\n".format( e.message)) sys.exit() else: print filename, 'soft->', linkname try: os.symlink(filename, linkname) except Exception as e: sys.stderr.write( "Failed to create soft link:\n{0}\n".format( e.message)) sys.exit() lh.write("{0} -> {1}\n".format(filename, linkname)) except Exception as e: print e.message lh.close()