def show_block(filename, offset): f = DatFile(filename) f.stream.seek(offset) block_data = f.stream.read(f.block_size) dump(block_data) print "---" dump(f.stream.read(0x40))
def load_dat(self, target): while True: item = (yield) directory,filename = os.path.split(item['ImageLocation']) filename = ''.join((os.path.splitext(filename)[0], '.dat')) if self.exp_directory == 'beamline' : ## For version running on beamline patharray = ['/data/pilatus1M'] + directory.split('/')[2:-1] + ['raw_dat', filename] print patharray else: ## Offline mode patharray = [self.exp_directory, 'raw_dat', filename] print os.path.join(*patharray) try: filesize = 0 for count in range(30): filesizetemp = os.path.getsize(os.path.join(*patharray)) if filesizetemp == filesize: break filesize=filesizetemp time.sleep(0.1) dat = DatFile(os.path.join(*patharray)) try: dat.setuserdata({'flush': item['flush']}) except Exception: pass target.send(dat) except EnvironmentError: pass
def show_header(filename): f = DatFile(filename) print("File Header Information:") print(" Version: %08X %08X" % (f.version, f.version_2)) print(" Block Size: %08X bytes" % f.block_size) print(" File Size: %08X bytes" % f.file_size) print(" Root Directory Block: %08X" % f.directory_offset) print(" Free Block Chain Head: %08X" % f.free_head) print(" Free Blocks: %08X blocks" % f.free_size)
def show_file_block(filename, offset): f = DatFile(filename) f.stream.seek(offset) block_data = f.stream.read(f.block_size) zero1, zero2, file_id, size = struct.unpack("<LLLL", block_data[:0x10]) assert zero1 == 0 assert zero2 == 0 print("%08X %08X" % (file_id, size)) file_data = f.stream.read(size) dump(file_data[0x10:])
def show_directory(filename, offset=None): """ show the directory at the given offset or the root directory if no offset given """ f = DatFile(filename) d = f.directory() if d.subdir_ptrs: for i, block_size, dir_offset in d.subdir_ptrs: print " %08X" % dir_offset if i < d.count: print_entry(d.file_ptrs[i]) else: # leaf for entry in d.file_ptrs: print_entry(entry)
#!/usr/bin/env python import struct import zlib from dat import DatFile import png filename = "LOTRO/client_local_English.dat" f = DatFile(filename) def image_0x15(header_id, width, height, data): pixels = {} apixels = {} for y in range(height): for x in range(width): o = (y * width + x) * 4 c1, c2, c3, c4 = map(ord, data[o:o + 4]) pixels[(x, y)] = (c3, c2, c1) apixels[(x, y)] = (c4, c4, c4) png.output_png("local/%08X_015.png" % header_id, width, height, pixels) png.output_png("local/%08X_015_a.png" % header_id, width, height, apixels) def image_0x1F4(header_id, data): with open("local/%08X.jpg" % header_id, "w") as nf: nf.write(data)
engine.movingAvWindow = movingAvWindow = 5 lastname = '' pipeline = Pipeline.Pipeline(config) r = redis.StrictRedis(host='localhost', port=6379, db=0) if offline is True: redis_dat = engine.no_op else: ##Load last buffer from redis in case this is a recovery redisBuffer = r.get('logline:avg_buf') try : bufferQ, bufferProfile, bufferErrors = zip(*pickle.loads(redisBuffer)) bufferDat = DatFile() bufferDat.q = bufferQ bufferDat.intensities = bufferProfile bufferDat.errors = bufferErrors Buffer = Buffer() Buffer.value = bufferDat except: pass ## buffer pipeline # Use this line if wanting to use autowater as buffer: buffers = engine.filter_on_attr('SampleType', ['0', '3', '8'], engine.load_dat(engine.average_water(engine.broadcast(engine.save_dat('avg'), redis_dat('avg_buf'), engine.store_obj(Buffer))))) buffers = engine.filter_on_attr('SampleType', ['0', '3'], engine.load_dat(engine.average(engine.broadcast(engine.save_dat('avg'), redis_dat('avg_buf'), engine.store_obj(Buffer))))) repbuffers = engine.filter_on_attr('SampleType', ['2', '5'], engine.load_dat(engine.average(engine.broadcast(engine.save_dat('avg'), redis_dat('avg_rep_buf'))))) ## samples pipeline if args.none is False:
def show_find_file(filename, file_id): """ find the directory entry for the file with the given id """ f = DatFile(filename) print_entry(f.find_file(file_id))
def show_list(filename): """ list all the files in the DAT """ f = DatFile(filename) f.visit_file_entries(print_entry)
def subtract(self, readpath, sample, blank, fraction=0, writepath='../manual', analysispath='../analysis', analyse=True): if fraction > 0: fraction = round(fraction, 1) try: sampleDat = DatFile(os.path.join(readpath, sample)) blankDat = DatFile(os.path.join(readpath, blank)) except IOError: print "File Doesn't Exist" return -1 sampleNum = int(sampleDat.fileindex) blankNum = int(blankDat.fileindex) sampleNumLen = len(sampleDat.fileindex) blankNumLen = len(blankDat.fileindex) sampleRoot = os.path.splitext(sampleDat.rootname)[0] blankRoot = os.path.splitext(blankDat.rootname)[0] if not os.path.exists(writepath): writepath = os.path.abspath(os.path.join(readpath, writepath)) if not os.path.exists(writepath): try: os.mkdir(writepath) except OSError: if not os.path.isdir(writepath): print 'dat write path ' + writepath + ' invalid, file not written' raise if analyse: if not os.path.exists(analysispath): analysispath = os.path.abspath( os.path.join(readpath, analysispath)) if not os.path.exists(analysispath): try: os.mkdir(analysispath) except OSError: if not os.path.isdir(analysispath): print 'analysis write path invalid, file not written' raise num = 0 while True: try: sampleTemp = DatFile(datfile=os.path.join( readpath, ''.join([ sampleRoot, '_', str(sampleNum).zfill(sampleNumLen), '.dat' ]))) blankTemp = DatFile(datfile=os.path.join( readpath, ''.join([ blankRoot, '_', str(blankNum).zfill(blankNumLen), '.dat' ]))) fraction_string = '' if fraction > 0: blank2 = '{}_{:04d}.dat'.format( blankTemp.rootname_rmext, int(blankTemp.fileindex) + 1) blankTemp2 = DatFile(os.path.join(readpath, blank2)) blankTemp = dat.average([blankTemp, blankTemp2], [fraction, 1 - fraction]) fraction_string = '_{:.0f}'.format(100 * fraction) subtractedDat = dat.subtract(sampleTemp, blankTemp) filename = os.path.join( writepath, ''.join([ os.path.splitext(subtractedDat.rootname)[0], '_', str(sampleNum).zfill(sampleNumLen), '_', str(blankNum).zfill(blankNumLen), fraction_string, '.dat' ])) subtractedDat.save(filename) if analyse: pipeline = PipelineLite.PipelineLite( filename, analysispath) pipeline.runPipeline() sampleNum += 1 blankNum += 1 num += 1 except IOError as e: print 'IO' break except OSError as e: print 'OS' break except Exception: raise return num