def main(): image_path = input('>> Caminho para a imagem com extensão: ') image = img_process.ImageProcess(image_path) huffman_obj = huffman.Huffman(image.img_array) save_data.SaveData('results/', image.img_array, huffman_obj.hist, huffman_obj.codes) bench = benchmark.Benchmark(image, image.img_array, huffman_obj.codes) print('\nRESULTADOS PODEM SER VISUALIZADOS EM "/results"')
def __init__(self, options, is_request, params): # 'params' is ignored self.compressor = spdy4_codec_impl.Spdy4CoDe() self.decompressor = spdy4_codec_impl.Spdy4CoDe() self.options = options self.hosts = {} self.group_ids = common_utils.IDStore(2**31) self.wf = self.compressor.wf self.name = "delta2_bohe" if is_request: request_freq_table = header_freq_tables.request_freq_table self.compressor.huffman_table = huffman.Huffman(request_freq_table) self.decompressor.huffman_table = huffman.Huffman(request_freq_table) else: response_freq_table = header_freq_tables.response_freq_table self.compressor.huffman_table = huffman.Huffman(response_freq_table) self.decompressor.huffman_table = huffman.Huffman(response_freq_table)
def __init__(self, options, is_request, params): BaseProcessor.__init__(self, options, is_request, params) self.compressor = spdy4_codec_impl.Spdy4CoDe(params) self.decompressor = spdy4_codec_impl.Spdy4CoDe(params) self.hosts = {} self.group_ids = common_utils.IDStore(255) self.wf = self.compressor.wf if is_request: request_freq_table = header_freq_tables.request_freq_table self.compressor.huffman_table = huffman.Huffman(request_freq_table) self.decompressor.huffman_table = huffman.Huffman( request_freq_table) else: response_freq_table = header_freq_tables.response_freq_table self.compressor.huffman_table = huffman.Huffman( response_freq_table) self.decompressor.huffman_table = huffman.Huffman( response_freq_table)
def __init__(self, inputfile): #constructor in python #self must be passed to any method in the class self.inputFile = inputfile self.message = self.ReadFile() #create new huffman object ! self.huffman = huffman.Huffman() #get unique vlaues in the message and put them in a list self.values = list(set(self.message)) #list of unique bytes self.symbols = [ ] # list of symmbol objects # {} for dict (map) and [] for list {array} self.GenerateSymbols()
def main(): if len(sys.argv) < 2: print "Usage:" print sys.argv[0] + " <filename>" if len(sys.argv) > 1: filename = sys.argv[1] else: # lets hard code a case for testing... filename = "test.txt" # for debug... print "Compressing " + filename + "..." huff = huffman.Huffman() huff.encode(filename, filename + ".out") print "Decompressing " + filename + ".out..." huff.decode(filename + ".out", filename + ".org")
def __init__(self): self.key = "" self.data = None self.h = huffman.Huffman() self.root = Tk() self.root.title("Huffman Compression & Encrption") self.root.geometry("1000x1000") self.root.resizable(0, 0) self.root.configure(background='LIGHT blue') self.com_s = None # self.com_s.pack() # File Select Button rf_button = Button(self.root, text="Select File") rf_button.configure(command=self.load_file) rf_button.place(x=250, y=200, width=200) # Enter Password Label ep_label = Label(self.root, text="Enter Password:"******"\u2022", bd=7) self.password_box.bind('<Return>', self.get_password) self.password_box.place(x=250 + 200, y=200 + 20, width=200) enter_b = Label(self.root, text="Press Enter") enter_b.place(x=250 + 200, y=200 + 50, width=200) enter_b.configure(background="lightblue") # Compression / Encyption Button ce_button = Button(self.root, text="Encrypt/Compress") ce_button.bind("<Button-1>", self.compress_encrypt) ce_button.place(x=250, y=25 + 200, width=200) # Decompression / Decryption Button dd_button = Button(self.root, text="Decrypt/Decompress") dd_button.bind("<Button-1>", self.decrypt_decompress) dd_button.place(x=250, y=50 + 200, width=200) # Console Text Box self.tb = Text(self.root, bg="black", fg="#00ff37", font=("Arial", 10)) self.tb.place(x=250, y=75 + 200, width=400, height=500)
def __init__(self): self.key = "" self.data = None self.h = huffman.Huffman() self.root = Tk() self.root.title("HuffCrypt") self.root.geometry("250x250") self.root.resizable(0, 0) # File Select Button rf_button = Button(self.root, text="Select File") rf_button.configure(command=self.load_file) rf_button.place(x=0, y=0, width=122) # Enter Password Label ep_label = Label(self.root, text="Enter Password:"******"\u2022", bd=3) self.password_box.bind('<Return>', self.get_password) self.password_box.place(x=125, y=25, width=120) # Compression / Encyption Button ce_button = Button(self.root, text="Encrypt/Compress") ce_button.bind("<Button-1>", self.compress_encrypt) ce_button.place(x=0, y=25, width=122) # Decompression / Decryption Button dd_button = Button(self.root, text="Decrypt/Decompress") dd_button.bind("<Button-1>", self.decrypt_decompress) dd_button.place(x=0, y=50, width=122) # Console Text Box self.tb = Text(self.root, bg="black", fg="#00ff37", font=("Helvetica", 8)) self.tb.place(x=0, y=75, width=250, height=175)
def __init__(self): self._qtables = [[0] * 64 for i in range(8)] self._htables = [0] * 8 self._planes_count = 0 self._planes = [{'fh': -1, 'fv': -1, 'q': -1}] * 256 self._width = 0 self._height = 0 self._pwidth = 0 self._pheight = 0 self._psum = 3 self._lsum = 0 self._sos_table = [(0, 0)] self._use_ri = False self._ri = 0 self._bmpdata = None self._huf = huffman.Huffman() self._inverse_dir = False
def __init__(self, inputfile): #constructor in python #self must be passed to any method in the class self.inputFile = inputfile print('Encoder constructor') self.message = self.ReadFile() # print(self.message) self.AddEOF() #print(self.message) #create new huffman object ! self.huffman = huffman.Huffman() #get unique vlaues in the message and put them in a list self.values = list(set(self.message)) #list of unique bytes #x=bytearray(fcontents) # print(len(self.values)) i = 0 while i < len(self.values): # print(self.values[i]) i += 1 self.symbols = [ ] # list of symbols objects # {} for dict (map) and [] for list (array) self.GenerateSymbols()
def write_file_parallel(file_ind, i, obsid, obs_ind, daflags, TODs, gain_guesses, band_labels, band, psi_A, psi_B, pix_A, pix_B, fknee, alpha, n_per_day, ntodsigma, npsi, psiBins, nside, fsamp, pos, vel, time, compress=False): file_out = prefix + f'data/wmap_{band}_{str(file_ind+1).zfill(6)}_v5.h5' if os.path.exists(file_out): return dt0 = np.diff(time).mean() det_list = [] # make huffman code tables # Pixel, Psi, Flag pixArray_A = [[], [], []] pixArray_B = [[], [], []] todArray = [] for j in range(len(band_labels)): label = band_labels[j] if label[:-2] == band.upper(): TOD = TODs[j] gain = gain_guesses[j] sigma_0 = TOD.std() scalars = np.array([gain, sigma_0, fknee, alpha]) tod = np.zeros(TOD.size) for n in range(len(TOD[0])): tod[n::len(TOD[0])] = TOD[:, n] todi = np.array_split(tod, n_per_day)[i] todInd = np.int32(ntodsigma * todi / (sigma_0 * gain)) delta = np.diff(todInd) delta = np.insert(delta, 0, todInd[0]) todArray.append(delta) pix = np.array_split(pix_A[j // 4], n_per_day)[i] delta = np.diff(pix) delta = np.insert(delta, 0, pix[0]) pixArray_A[0].append(delta) pix = np.array_split(pix_B[j // 4], n_per_day)[i] delta = np.diff(pix) delta = np.insert(delta, 0, pix[0]) pixArray_B[0].append(delta) psi = np.array_split(psi_A[j // 4], n_per_day)[i] psi = np.where(psi < 0, 2 * np.pi + psi, psi) psi = np.where(psi >= 2 * np.pi, psi - 2 * np.pi, psi) psiIndexes = np.digitize(psi, psiBins) delta = np.diff(psiIndexes) delta = np.insert(delta, 0, psiIndexes[0]) pixArray_A[1].append(delta) psi = np.array_split(psi_B[j // 4], n_per_day)[i] psi = np.where(psi < 0, 2 * np.pi + psi, psi) psi = np.where(psi >= 2 * np.pi, psi - 2 * np.pi, psi) psiIndexes = np.digitize(psi, psiBins) delta = np.diff(psiIndexes) delta = np.insert(delta, 0, psiIndexes[0]) pixArray_B[1].append(delta) flags = np.array_split(daflags[:, j // 4], n_per_day)[i] t0 = np.arange(len(flags)) t = np.linspace(t0.min(), t0.max(), len(todi)) func = interp1d(t0, flags, kind='previous') flags = func(t) delta = np.diff(flags) delta = np.insert(delta, 0, flags[0]) pixArray_A[2].append(delta) pixArray_B[2].append(delta) h_A = huffman.Huffman("", nside) h_A.GenerateCode(pixArray_A) h_B = huffman.Huffman("", nside) h_B.GenerateCode(pixArray_B) h_Tod = huffman.Huffman("", nside) h_Tod.GenerateCode(todArray) huffarray_A = np.append(np.append(np.array(h_A.node_max), h_A.left_nodes), h_A.right_nodes) huffarray_B = np.append(np.append(np.array(h_B.node_max), h_B.left_nodes), h_B.right_nodes) huffarray_Tod = np.append( np.append(np.array(h_Tod.node_max), h_Tod.left_nodes), h_Tod.right_nodes) #with h5py.File(file_out, 'a') as f: #with h5py.File(file_out, 'w') as f: f = h5py.File(file_out, 'a') for j in range(len(band_labels)): label = band_labels[j] if label[:-2] == band.upper(): TOD = TODs[j] gain = gain_guesses[j] sigma_0 = TOD.std() scalars = np.array([gain, sigma_0, fknee, alpha]) tod = np.zeros(TOD.size) for n in range(len(TOD[0])): tod[n::len(TOD[0])] = TOD[:, n] todi = np.array_split(tod, n_per_day)[i] todInd = np.int32(ntodsigma * todi / (sigma_0 * gain)) deltatod = np.diff(todInd) deltatod = np.insert(deltatod, 0, todInd[0]) pixA = np.array_split(pix_A[j // 4], n_per_day)[i] deltapixA = np.diff(pixA) deltapixA = np.insert(deltapixA, 0, pixA[0]) pixB = np.array_split(pix_B[j // 4], n_per_day)[i] deltapixB = np.diff(pixB) deltapixB = np.insert(deltapixB, 0, pixB[0]) psiA = np.array_split(psi_A[j // 4], n_per_day)[i] psiA = np.where(psiA < 0, 2 * np.pi + psiA, psiA) psiA = np.where(psiA >= 2 * np.pi, psiA - 2 * np.pi, psiA) psiIndexesA = np.digitize(psiA, psiBins) deltapsiA = np.diff(psiIndexesA) deltapsiA = np.insert(deltapsiA, 0, psiIndexesA[0]) psiB = np.array_split(psi_B[j // 4], n_per_day)[i] psiB = np.where(psiB < 0, 2 * np.pi + psiB, psiB) psiB = np.where(psiB >= 2 * np.pi, psiB - 2 * np.pi, psiB) psiIndexesB = np.digitize(psiB, psiBins) deltapsiB = np.diff(psiIndexesB) deltapsiB = np.insert(deltapsiB, 0, psiIndexesB[0]) flags = np.array_split(daflags[:, j // 4], n_per_day)[i] t0 = np.arange(len(flags)) t = np.linspace(t0.min(), t0.max(), len(todi)) func = interp1d(t0, flags, kind='previous') flags = func(t) deltaflag = np.diff(flags) deltaflag = np.insert(deltaflag, 0, flags[0]) f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/flag', data=np.void(bytes(h_A.byteCode(deltaflag)))) f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/pixA', data=np.void(bytes(h_A.byteCode(deltapixA)))) f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/pixB', data=np.void(bytes(h_B.byteCode(deltapixB)))) f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/psiA', data=np.void(bytes(h_A.byteCode(deltapsiA)))) f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/psiB', data=np.void(bytes(h_B.byteCode(deltapsiB)))) if compress: f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/tod', data=np.void(bytes(h_Tod.byteCode(deltatod)))) else: f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/tod', data=todInd) det_list.append(label.replace('KA', 'Ka')) f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/scalars', data=scalars) f[obsid + '/' + label.replace('KA', 'Ka') + '/scalars'].attrs['legend'] = 'gain, sigma0, fknee, alpha' # filler f.create_dataset(obsid + '/' + label.replace('KA', 'Ka') + '/outP', data=np.array([0, 0])) f.create_dataset(obsid + '/common/hufftree_A', data=huffarray_A) f.create_dataset(obsid + '/common/huffsymb_A', data=h_A.symbols) f.create_dataset(obsid + '/common/hufftree_B', data=huffarray_B) f.create_dataset(obsid + '/common/huffsymb_B', data=h_B.symbols) if compress: f.create_dataset(obsid + '/common/todtree', data=huffarray_Tod) f.create_dataset(obsid + '/common/todsymb', data=h_Tod.symbols) f.create_dataset(obsid + '/common/satpos', data=np.array_split(pos, n_per_day)[i][0]) f[obsid + '/common/satpos'].attrs['info'] = '[x, y, z]' f[obsid + '/common/satpos'].attrs['coords'] = 'galactic' f.create_dataset(obsid + '/common/vsun', data=np.array_split(vel, n_per_day)[i][0]) f[obsid + '/common/vsun'].attrs['info'] = '[x, y, z]' f[obsid + '/common/vsun'].attrs['coords'] = 'galactic' dt = dt0 / len(TOD[0]) time_band = np.arange(time.min(), time.min() + dt * len(tod), dt) f.create_dataset(obsid + '/common/time', data=[np.array_split(time_band, n_per_day)[i][0], 0, 0]) f[obsid + '/common/time'].attrs['type'] = 'MJD, null, null' f.create_dataset(obsid + '/common/ntod', data=len(np.array_split(tod, n_per_day)[i])) if "/common/fsamp" not in f: f.create_dataset('/common/fsamp', data=fsamp * len(TOD[0])) f.create_dataset('/common/nside', data=nside) f.create_dataset('/common/npsi', data=npsi) f.create_dataset('/common/det', data=np.string_(', '.join(det_list))) f.create_dataset('/common/datatype', data='WMAP') # fillers #f.create_dataset('/common/mbang', data=0) f.create_dataset('/common/ntodsigma', data=100) #f.create_dataset('/common/polang', data=0) with open(prefix + f'data/filelist_{band}_v5.txt', 'a') as file_list: file_list.write(f'{str(obs_ind).zfill(6)}\t"{file_out}"\t1\t0\t0\n') return
def make_od(freq, od, args, outbuf): print(freq, od) horns = {30: [27, 28], 44: [24, 25, 26], 70: [18, 19, 20, 21, 22, 23]} #psi_uv from https://www.aanda.org/articles/aa/full_html/2016/10/aa25818-15/T5.html mbangs = { 27: -22.46, 28: 22.45, 24: 0.01, 25: -113.23, 26: 113.23, 18: 22.15, 19: 22.4, 20: 22.38, 21: -22.38, 22: -22.34, 23: -22.08 } nsides = {30: 512, 44: 512, 70: 1024} nside = nsides[freq] npsi = 4096 outName = os.path.join( args.out_dir, 'LFI_0' + str(freq) + '_' + str(od).zfill(6) + '.h5') try: exFile = h5py.File( os.path.join( args.planck_dir, 'LFI_0' + str(freq) + '_' + str(horns[freq][0]) + '_L2_002_OD' + str(od).zfill(4) + '.h5'), 'r') except (OSError): return if (args.restart and os.path.exists(outName)): for pid, index in zip(exFile['AHF_info/PID'], range(len(exFile['AHF_info/PID']))): startIndex = np.where( exFile['Time/OBT'] > exFile['AHF_info/PID_start'][index]) endIndex = np.where( exFile['Time/OBT'] > exFile['AHF_info/PID_end'][index]) if len(startIndex[0]) > 0: pid_start = startIndex[0][0] else: #catch days with no pids continue if len(endIndex[0]) is not 0: pid_end = endIndex[0][0] else: #catch final pid per od pid_end = len(exFile['Time/OBT']) if pid_start == pid_end: #catch chunks with no data like od 1007 continue outbuf['id' + str(pid)] = str(pid) + ' "' + outName + '" ' + '1\n' return outFile = h5py.File(outName, 'w') rimo = fits.open(args.rimo) if args.velocity_file is not None: velFile = fits.open(args.velocity_file) #make common group for things we only read once #polang, mbeamang, nside, fsamp, npsi prefix = '/common' rimo_i = np.where(rimo[1].data.field('detector').flatten() == 'LFI' + str(horns[freq][0]) + 'M') #sampling frequency fsamp = rimo[1].data.field('f_samp')[rimo_i] outFile.create_dataset(prefix + '/fsamp', data=fsamp) #nside outFile.create_dataset(prefix + '/nside', data=[nside]) #psi angle resolution outFile.create_dataset(prefix + '/npsi', data=[npsi]) detNames = '' polangs = [] mainbeamangs = [] for horn in horns[freq]: for hornType in ['M', 'S']: rimo_i = np.where( rimo[1].data.field('detector').flatten() == 'LFI' + str(horn) + hornType) detNames += str(horn) + hornType + ', ' polangs.append(math.radians(rimo[1].data.field('psi_pol')[rimo_i])) mainbeamangs.append(math.radians(mbangs[horn])) #make detector names lookup outFile.create_dataset(prefix + '/det', data=np.string_(detNames[0:-2])) #make polarization angle outFile.create_dataset(prefix + '/polang', data=polangs) outFile[prefix + '/polang'].attrs['legend'] = detNames[0:-2] #make main beam angle outFile.create_dataset(prefix + '/mbang', data=mainbeamangs) outFile[prefix + '/mbang'].attrs['legend'] = detNames[0:-2] #huffman coded bits for pid, index in zip(exFile['AHF_info/PID'], range(len(exFile['AHF_info/PID']))): startIndex = np.where( exFile['Time/OBT'] > exFile['AHF_info/PID_start'][index]) endIndex = np.where( exFile['Time/OBT'] > exFile['AHF_info/PID_end'][index]) if len(startIndex[0]) > 0: pid_start = startIndex[0][0] else: #catch days with no pids continue if len(endIndex[0]) is not 0: pid_end = endIndex[0][0] else: #catch final pid per od pid_end = len(exFile['Time/OBT']) if pid_start == pid_end: #catch chunks with no data like od 1007 continue obt = exFile['Time/OBT'][pid_start] cut1 = exFile['Time/OBT'][ exFile['Time/OBT'] > exFile['AHF_info/PID_start'][index]] #common fields prefix = str(pid).zfill(6) + '/common' #time field outFile.create_dataset(prefix + '/time', data=[exFile['Time/MJD'][pid_start]]) outFile[prefix + '/time'].attrs['type'] = 'MJD' #velocity field velIndex = np.where( velFile[1].data.scet > exFile['Time/SCET'][pid_start])[0][0] #rotate from ecliptic to galactic r = hp.Rotator(coord=['E', 'G']) outFile.create_dataset(prefix + '/vsun', data=r([ velFile[1].data.xvel[velIndex], velFile[1].data.yvel[velIndex], velFile[1].data.zvel[velIndex] ])) #add some metadata so someone might be able to figure out what is going on outFile[prefix + '/vsun'].attrs['info'] = '[x, y, z]' outFile[prefix + '/vsun'].attrs['coords'] = 'galactic' #make huffman code table pixArray = [[], [], []] for horn in horns[freq]: for hornType in ['M', 'S']: fileName = h5py.File( os.path.join( args.planck_dir, 'LFI_0' + str(freq) + '_' + str(horn) + '_L2_002_OD' + str(od).zfill(4) + '.h5'), 'r') rimo_i = np.where( rimo[1].data.field('detector').flatten() == 'LFI' + str(horn) + hornType) #get all pointing data newTheta, newPhi = r( fileName[str(horn) + hornType + '/THETA'][pid_start:pid_end], fileName[str(horn) + hornType + '/PHI'][pid_start:pid_end]) pixels = hp.pixelfunc.ang2pix(nside, newTheta, newPhi) if len(pixels > 0): delta = np.diff(pixels) delta = np.insert(delta, 0, pixels[0]) pixArray[0].append(delta) #get all pol angle data psiArray = fileName[ str(horn) + hornType + '/PSI'][pid_start:pid_end] + r.angle_ref( fileName[str(horn) + hornType + '/THETA'][pid_start:pid_end], fileName[str(horn) + hornType + '/PHI'][pid_start:pid_end]) + math.radians( rimo[1].data.field('psi_pol')[rimo_i]) psiArray = np.where(psiArray < 0, 2 * np.pi + psiArray, psiArray) psiArray = np.where(psiArray >= 2 * np.pi, psiArray - 2 * np.pi, psiArray) psiBins = np.linspace(0, 2 * np.pi, num=4096) psiIndexes = np.digitize(psiArray, psiBins) if (len(psiIndexes) > 0): delta = np.diff(psiIndexes) delta = np.insert(delta, 0, psiIndexes[0]) pixArray[1].append(delta) #get all flag data flagArray = fileName[str(horn) + hornType + '/FLAG'][pid_start:pid_end] if (len(flagArray) > 0): delta = np.diff(flagArray) delta = np.insert(delta, 0, flagArray[0]) pixArray[2].append(delta) h = huffman.Huffman("", nside) h.GenerateCode(pixArray) huffarray = np.append(np.append(np.array(h.node_max), h.left_nodes), h.right_nodes) outFile.create_dataset(prefix + '/hufftree', data=huffarray) outFile.create_dataset(prefix + '/huffsymb', data=h.symbols) #open per freq npipe gains file if required if "npipe" in args.gains_dir: #this is a shitty test gainsFile = fits.open( os.path.join(args.gains_dir, 'gains_0' + str(freq) + '_iter01.fits')) for horn in horns[freq]: fileName = h5py.File( os.path.join( args.planck_dir, 'LFI_0' + str(freq) + '_' + str(horn) + '_L2_002_OD' + str(od).zfill(4) + '.h5'), 'r') for hornType in ['S', 'M']: prefix = str(pid).zfill(6) + '/' + str(horn) + hornType #get RIMO index #print(rimo[1].data.field('detector').flatten().shape, rimo[1].data.field('detector').flatten(), 'LFI' +str(horn) + hornType) rimo_i = np.where( rimo[1].data.field('detector').flatten() == 'LFI' + str(horn) + hornType) #make tod data outFile.create_dataset( prefix + '/tod', data=fileName[str(horn) + hornType + '/SIGNAL'][pid_start:pid_end], dtype='f4') #undifferenced data? TODO #outFile.create_dataset(prefix + '/') #make flag data flagArray = fileName[str(horn) + hornType + '/FLAG'][pid_start:pid_end] if (len(flagArray) > 0): delta = np.diff(flagArray) delta = np.insert(delta, 0, flagArray[0]) if (args.no_compress): outFile.create_dataset(prefix + '/flag', data=flagArray) else: outFile.create_dataset(prefix + '/flag', data=np.void( bytes(h.byteCode(delta)))) #outFile.create_dataset(prefix + '/flag', data=flagArray, compression='gzip', shuffle=True) #make pixel number newTheta, newPhi = r( fileName[str(horn) + hornType + '/THETA'][pid_start:pid_end], fileName[str(horn) + hornType + '/PHI'][pid_start:pid_end]) pixels = hp.pixelfunc.ang2pix(nside, newTheta, newPhi) if len(pixels > 0): delta = np.diff(pixels) delta = np.insert(delta, 0, pixels[0]) if (args.no_compress): outFile.create_dataset(prefix + '/pix', data=pixels) outFile.create_dataset(prefix + '/theta', data=newTheta) outFile.create_dataset(prefix + '/phi', data=newPhi) else: outFile.create_dataset(prefix + '/pix', data=np.void( bytes(h.byteCode(delta)))) #outFile.create_dataset(prefix + '/pix', data=pixels, compression='gzip', shuffle=True) #make pol angle psiArray = fileName[ str(horn) + hornType + '/PSI'][pid_start:pid_end] + r.angle_ref( fileName[str(horn) + hornType + '/THETA'][pid_start:pid_end], fileName[str(horn) + hornType + '/PHI'][pid_start:pid_end]) + math.radians( rimo[1].data.field('psi_pol')[rimo_i]) psiArray = np.where(psiArray < 0, 2 * np.pi + psiArray, psiArray) psiArray = np.where(psiArray >= 2 * np.pi, psiArray - 2 * np.pi, psiArray) psiBins = np.linspace(0, 2 * np.pi, num=4096) psiIndexes = np.digitize(psiArray, psiBins) #if(pid == 3798 and horn == 28 and hornType == 'M'): # print(len(psiIndexes)) # np.set_printoptions(threshold=sys.maxsize) # for i in range(4000): # print(i, psiArray[i], psiIndexes[i]) if (len(psiIndexes) > 0): delta = np.diff(psiIndexes) delta = np.insert(delta, 0, psiIndexes[0]) if (args.no_compress): outFile.create_dataset(prefix + '/psi', data=psiArray) else: outFile.create_dataset(prefix + '/psi', data=np.void( bytes(h.byteCode(delta)))) #outFile.create_dataset(prefix + '/psi', data=psiIndexes, compression='gzip', shuffle=True) #scalars gain = 1 #make gain if "npipe" in args.gains_dir: #this is a shitty test baseGain = fits.getdata(os.path.join( args.gains_dir, 'C0' + str(freq) + '-0000-DX11D-20150209_uniform.fits'), extname='LFI' + str(horn) + hornType)[0][0] gainArr = gainsFile['LFI' + str(horn) + hornType].data.cumulative obtArr = (1e-9 * pow(2, 16)) * gainsFile[1].data.OBT gainI = np.where(obtArr <= obt)[0][-1] gain = np.array([1.0 / (baseGain * gainArr[gainI])]) elif args.gains_dir is not None: gainFile = fits.open( os.path.join( args.gains_dir, 'LFI_0' + str(freq) + '_LFI' + str(horn) + hornType + '_001.fits')) gain = 1.0 / gainFile[1].data.GAIN[np.where( gainFile[1].data.PID == pid)] gainFile.close() #TODO: fix this if (gain.size == 0): gain = [0.06] #make white noise sigma0 = rimo[1].data.field('net')[rimo_i] * math.sqrt(fsamp) #make f_knee fknee = rimo[1].data.field('f_knee')[rimo_i] #make 1/f noise exponent alpha = rimo[1].data.field('alpha')[rimo_i] #print(gain, sigma0, fknee, alpha) outFile.create_dataset( prefix + '/scalars', data=np.array([gain, sigma0, fknee, alpha]).flatten()) outFile[ prefix + '/scalars'].attrs['legend'] = 'gain, sigma0, fknee, alpha' #make psd noise #make other #write to output file outbuf['id' + str(pid)] = str(pid) + ' "' + outName + '" ' + '1\n'
# Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import zlib import re import header_freq_tables import spdy4_codec_impl import huffman import common_utils from .. import BaseProcessor # There are a number of TODOS in the spdy4 # have near indices. Possibly renumber whever something is referenced) request_huffman = huffman.Huffman(header_freq_tables.request_freq_table) response_huffman = huffman.Huffman(header_freq_tables.response_freq_table) class Processor(BaseProcessor): """ This class formats header frames in SPDY4 wire format, and then reads the resulting wire-formatted data and restores the data. Thus, it compresses and decompresses header data. It also keeps track of letter frequencies so that better frequency tables can eventually be constructed for use with the Huffman encoder. """ def __init__(self, options, is_request, params): BaseProcessor.__init__(self, options, is_request, params) description = "request" if not is_request:
delta = np.diff(pixels) delta = np.insert(delta, 0, pixels[0]) pix_array[0].append(delta) # psi psi_bins = np.linspace(0, 2*np.pi, num=npsi) psi_index = np.digitize(det_file['psi'][i_start:i_stop], psi_bins) delta = np.diff(psi_index) delta = np.insert(delta, 0, psi_index[0]) pix_array[1].append(delta) # flag flag = np.ones(nsamp) delta = np.diff(flag) delta = np.insert(delta, 0, flag[0]) pix_array[2].append(delta) h = huffman.Huffman("", nside) h.GenerateCode(pix_array) huffarray = np.append(np.append(np.array(h.node_max), h.left_nodes), h.right_nodes) out_f.create_dataset(prefix + 'hufftree', data=huffarray) out_f.create_dataset(prefix + 'huffsymb', data=h.symbols) for det in det_list: prefix = '/' + str(chunk+1).zfill(6) + '/' + det + '/' # signal out_f.create_dataset(prefix + 'tod', data=det_file['signal'][i_start:i_stop]) # flag flag = np.ones(nsamp) delta = np.diff(flag) delta = np.insert(delta, 0, flag[0]) out_f.create_dataset(prefix + 'flag', data=np.void(bytes(h.byteCode(delta)))) # pixels
def process_client_packet(self, data, trace): (sequence, ) = struct.unpack_from('<I', data) trace.add("sequence", sequence) if sequence == 0xffffffff: if data[4:12] == b'connect ': trace.add("command", '"connect "') (userinfo_length, ) = struct.unpack_from('>H', data, 12) huff = huffman.Huffman() userinfo_string = huff.decode(buffers.Buffer(data[14:]), userinfo_length) trace.add( "userinfo", '"' + userinfo_string.decode( 'ascii', errors='backslashreplace') + '"') userinfo_list = userinfo_string.strip(b'"').split(b'\\')[1:] userinfo = dict(zip(*[userinfo_list[i::2] for i in range(2)])) if userinfo[b'protocol'] == b'68': self.protocol = 'quake3' elif userinfo[b'protocol'] == b'91': self.protocol = 'quakelive' else: if self.config.debug_level >= 1: print("Unknown protocol: {}, proceeding as if 68.". format(userinfo[b'protocol'].decode( 'ascii', errors='backslashreplace'))) else: trace.add( "command", '"' + data[4:].decode('ascii', errors='backslashreplace') + '"') encoded_data = data else: if not self.challenge or not self.checksum_feed: if self.config.debug_level >= 2: print( "Looks like we didn't see the connection from the " "beginning. Can't inspect the packets so falling back " "to dumb proxy mode.") return data (qport, ) = struct.unpack_from('<H', data, 4) trace.add("qport", qport) buffer = buffers.Buffer(data[6:], trace) output = buffers.Buffer() if self.config.aimbot else None server_id = buffer.read_bits(32, "server_id", passthru=output) server_message_sequence = buffer.read_bits( 32, "server_message_sequence", passthru=output) # messageAcknowledge server_command_sequence = buffer.read_bits( 32, "server_command_sequence", passthru=output) # reliable_acknowledge last_command = self.server_commands[server_command_sequence & (defs.MAX_RELIABLE_COMMANDS - 1)] key = self.challenge ^ server_id ^ server_message_sequence buffer.xor(12, key, last_command) if self.protocol == 'quakelive': buffer.read_bits(8, "unknown", passthru=output) stop = False while not stop: trace.begin("_clc_op") clc_op = buffer.read_bits(8, "clc_op", passthru=output) if clc_op == 5: # clc_EOF stop = True elif clc_op == 4: # clc_clientCommand command_sequence = buffer.read_bits(32, "command_sequence", passthru=output) command = buffer.read_string("command", passthru=output) self.client_commands[command_sequence & (defs.MAX_RELIABLE_COMMANDS - 1)] = command elif clc_op in [2, 3]: # clc_move, clc_moveNoDelta partial_key = (self.checksum_feed ^ server_message_sequence ^ hash_string(last_command, 32)) self.process_usercmds(buffer, output, partial_key, trace) else: raise Exception("unknown clc_op {}".format(clc_op)) #break trace.end() if self.config.aimbot: output.xor(12, key, last_command) encoded_data = data[0:6] + output.data else: encoded_data = data return encoded_data