def map_addr_int(s, d): """ Map the current source and dest address to the ones in the topo """ s_out, d_out = crc32(s), crc32(d) return s_out, d_out
def test_crc32(self): """ Test CRC32 computation """ data = '\x03\x00\n\x00rcpg125B2\n\xf3 hel\t\x00 bw\x05\xb0ld}\n\x80' \ '\x0f\xa0' self.assertEqual(crc32(data), 0xa7c7c5f1) # test empty crc32 self.assertEqual(crc32(''), 0x00000000)
def next_hop_hash_set(cur_hop, pre_hop, s, d, hash_str, table, seeds, polys, flow_paths, select_dict=OrderedDict({}), test_hop=''): select_range = 1 << 28 if cur_hop == d: hash_str0 = hash_str hash_str = hash_str0[0:13] marker = hash_str0[13:] hash_mid = crc32(hash_str) if hash_mid < select_range: append_path(hash_str, pre_hop, cur_hop, d, flow_paths) if cur_hop == test_hop: select_dict[hash_str] = 1 return n = len(hash_str) # Header = 4+4+2+2+1 bytes hash_str0 = hash_str hash_str = hash_str0[0:13] marker = hash_str0[13:] hash_mid = crc32(hash_str) if hash_mid < select_range: append_path(hash_str, pre_hop, cur_hop, d, flow_paths) if cur_hop == test_hop: select_dict[hash_str] = 1 nhop = table[cur_hop][d][0] n = len(table[cur_hop][d]) if n > 1: ni = crc8(seeds[cur_hop], hash_str, polys[cur_hop]) % n nhop = table[cur_hop][d][ni] next_hop_hash_set(nhop, cur_hop, s, d, hash_str0, table, seeds, polys, flow_paths, select_dict, test_hop=test_hop)
def _read_image(self, image_file): image_file.seek(0, 2) file_size = image_file.tell() if self._start_offset + self._image_length != file_size: raise FirmwareImageInvalidSize() image_file.seek(self._start_offset) self.image = image_file.read(self._image_length) if crc32(self.image) != self._checksum: raise FirmwareImageInvalidChecksum()
def routing(file_name, seeds, polys, port_list, link_loads, node_loads, paths): nodes, host_nodes, tor_nodes, aggr_nodes, spine_nodes = topo.getnodes() count = 0; start_time = 0; is_update = True with open((file_name), 'r') as f: for line in f: count += 1 if(count < 10000000): data = line.split() # Update start time if(is_update): start_time = float((data[0])) is_update = False # Monitor every second if (float(data[0]) - start_time) > 1: is_update = True print('Greater than 1.') break hash_str = (data[2] + '\t' + data[3] + '\t' + data[4] + '\t' + data[5] + '\t' + data[6] ) ev = crc32(hash_str) size = int(float(data[1])) port = int(data[7]) next_hop = host_nodes[port] con.host_up( next_hop, ev, hash_str, size, seeds, polys, port_list, link_loads, node_loads, paths ) else: break
def upgrade_flag(filepath): global Flag crc32_file = crc32(filepath).lower() file_size = ReadFileSize(filepath) file_size = struct.pack('>i', file_size).hex() crc32_file1 = ini_data(1, 16, 0, 4) #第16位开始 crc32_file1 = str(binascii.b2a_hex(crc32_file1))[2:-1] file_size1 = ini_data(1, 20, 0, 4) file_size1 = str(binascii.b2a_hex(file_size1))[2:-1] print(crc32_file) print(file_size) print(crc32_file1) print(file_size1) if (crc32_file == crc32_file1) & (file_size == file_size1): Flag = 0 else: ini_data(0, 16, crc32_file, 4) ini_data(0, 20, file_size, 4) Flag = 1
def get_descriptor(self): """ Generate the CamiDataTable entry for this function Returns: bytes: the CamiDataTable entry (a _CAMI_WIN_FUNCTION structure) Raises: FilePointerException: If this method is called before generating the binary form of its code (with serialize) """ return struct.pack( self.descriptor_layout, crc32.crc32(self.name), self.patterns.get_entry_count(), self.patterns.get_file_pointer(), self.arguments.get_count(), self.arguments.get_file_pointer(), 0, 0, 0, )
def upgrade_function(data, size, BUFSIZ): msg = '00' global Flag if (data[2] == 'a') & (data[3] == 'b'): #0xAB 请求升级 Send_num1 = str(data)[4:6] #保留 Send_num1 = int(Send_num1, 16) #保留 crc11 = str(data)[6:int(size)] data1 = str(data)[0:6] data1 = crc2hex(data1) if data1 == crc11: #下发升级包大小 upgrade_flag(BUFSIZ) if Flag == 1: msg1 = b'\xAA\xAB' else: msg1 = b'\xAA\xAD' #获取结构化事件戳 print(Flag) msg1 = str(binascii.b2a_hex(msg1))[2:-1] msg2 = ReadFileSize(BUFSIZ) #print(msg2) msg2 = struct.pack('>i', msg2).hex() #数据大小 msg3 = crc32(BUFSIZ) msg2 = '%s%s%s' % (msg1, msg2, msg3) msg4 = crc2hex(msg2) #crc32校验和 msg = '%s%s' % (msg2, msg4) #else:print("服务端返回的内容:%s" % (data_i)) #else:print("服务端返回的内容:%s" % (data_i)) elif (data[2] == 'a') & (data[3] == 'c'): #0xAC 请求包号 data_i = str(data)[4:6] data_i = int(data_i, 16) crc11 = str(data)[6:int(size)] data1 = str(data)[0:6] data1 = crc2hex(data1) print("服务端返回的内容:%s" % (data1)) print("服务端返回的内容:%s" % (crc11)) if data1 == crc11: #下发升级包大小 print("服务端返回的内容:%s" % (data_i)) msg = ReadFile(data_i - 1, Send_num, BUFSIZ) else: msg = '00' return msg
def picture_function(data, size, BUFSIZ): msg = '00' if (data[2] == 'a') & (data[3] == 'b'): #0xAB 请求升级 pic_size = str(data)[4:12] pic_size1 = int(pic_size, 16) crc11 = str(data)[12:int(size)] data1 = str(data)[0:12] data1 = crc2hex(data1) if data1 == crc11: #下发升级包大小 msg1 = b'\xBB\xAB' ini_flag = b'\xF6\x00' ini_flag = str(binascii.b2a_hex(ini_flag))[2:-1] msg1 = str(binascii.b2a_hex(msg1))[2:-1] msg2 = BUFSIZ msg2 = struct.pack('>i', msg2).hex() #数据大小 msg2 = '%s%s' % (msg1, msg2) msg3 = crc2hex(msg2) #crc32校验和 msg = '%s%s' % (msg2, msg3) name = time.strftime("%Y%m%d%H%M%S") #获取结构化事件戳 ini_data(0, 0, ini_flag, 0) #存储图片 ini_data(0, 2, pic_size, 0) #存储图片大小 ini_data(0, 6, name, 0) #存储图片名称 print("1") print(pic_size1) elif (data[2] == 'a') & (data[3] == 'c'): #0xAC 请求包号 print("22") ini_flag1 = ini_data(1, 0, 0, 1) ini_flag1 = str(binascii.b2a_hex(ini_flag1))[2:-1] if ini_flag1 == 'f6': data_pack = str(data)[4:6] #data_pack1 = int(data_pack,16) crc11 = str(data)[(int(size) - 8):int(size)] data1 = str(data)[0:(int(size) - 8)] data1 = crc2hex(data1) print("1") print(data1) print(crc11) if data1 == crc11: #下发升级包大小 pic_pack = ini_data(1, 1, 0, 1) pic_pack = str(binascii.b2a_hex(pic_pack))[2:-1] print(pic_pack) print(data_pack) if pic_pack == data_pack: print("2") msg1 = b'\xBB\xAC' msg1 = str(binascii.b2a_hex(msg1))[2:-1] msg2 = data_pack #msg2 = struct.pack('B',msg2).hex() msg2 = '%s%s' % (msg1, msg2) msg3 = crc2hex(msg2) #crc32校验和 msg = '%s%s' % (msg2, msg3) pic_pack1 = int(pic_pack, 16) + 1 print(pic_pack1 - 1) pic_pack1 = "%0.2x" % (pic_pack1) #pic_pack1 = str.encode(hex(pic_pack1)) #pic_pack1=pic_pack1.HexstrToBytes(1,'little') #pic_pack1 = str(pic_pack1)[4:-1] ini_data(0, 1, pic_pack1, 0) #存储图片 pic_data = str(data)[14:(int(size) - 8)] pic_data = binascii.a2b_hex(pic_data) name = ini_data(1, 6, 0, 7) name = str(binascii.b2a_hex(name))[2:-1] name_path = 'pic/' + name + '.jpg' with open(name_path, "ab") as f: f.write( pic_data ) # img_bin里面保存着 以二进制方式读取的图片内容,当前目录会生成一张img.jpg的图片 print("3") elif (data[2] == 'a') & (data[3] == 'd'): #0xAC 请求包号 pic_crc_client = str(data)[4:12] print("pic_crc_client:%s" % (pic_crc_client)) crc11 = str(data)[12:int(size)] data1 = str(data)[0:12] data1 = crc2hex(data1) if data1 == crc11: #下发升级包大小 name = ini_data(1, 6, 0, 7) name = str(binascii.b2a_hex(name))[2:-1] name_path = 'pic/' + name + '.jpg' #name_path = 'pic/20201017114938.jpg' pic_crc = crc32(name_path) pic_crc = pic_crc.lower() #大写转换成小写 pic_size = ini_data(1, 2, 0, 4) pic_size = str(binascii.b2a_hex(pic_size))[2:-1] pic_size = int(pic_size, 16) pic_size_read = os.path.getsize(name_path) #获得文件大小 print("pic_crc_client:%s" % (pic_crc)) print("pic_crc_client:%s" % (pic_size)) print("pic_crc_client:%s" % (pic_size_read)) if (pic_crc == pic_crc_client) and (pic_size == pic_size_read): msg1 = b'\xBB\xAD\xAA' msg1 = str(binascii.b2a_hex(msg1))[2:-1] msg2 = crc2hex(msg1) #crc32校验和 msg = '%s%s' % (msg1, msg2) ini_flag = b'\xFF\x00' ini_flag = str(binascii.b2a_hex(ini_flag))[2:-1] ini_data(0, 0, ini_flag, 0) #完成图片存储 print("3") #lena = mpimg.imread(name_path) # 读取和代码处于同一目录下的 lena.png # 此时 lena 就已经是一个 np.array 了,可以对它进行任意处理 #lena.shape #(512, 512, 3) #plt.imshow(lena) # 显示图片 #plt.axis('off') # 不显示坐标轴 #plt.show() return msg
def decompress(data): """ Decompress data """ # set init dict init_dict = list(INIT_DICT) init_dict += ' ' * (MAX_DICT_SIZE - INIT_DICT_SIZE) if len(data) < 16: raise Exception('Data must be at least 16 bytes long') write_offset = INIT_DICT_SIZE output_buffer = '' # make stream in_stream = StringIO(data) # read compressed RTF header comp_size = struct.unpack('<I', in_stream.read(4))[0] raw_size = struct.unpack('<I', in_stream.read(4))[0] comp_type = in_stream.read(4) crc_value = struct.unpack('<I', in_stream.read(4))[0] # get only data contents = StringIO(in_stream.read(comp_size - 12)) if comp_type == COMPRESSED: # check CRC if crc_value != crc32(contents.read()): raise Exception('CRC is invalid! The file is corrupt!') contents.seek(0) end = False while not end: val = contents.read(1) if not val: break control = '{0:08b}'.format(ord(val)) # check bits from LSB to MSB for i in xrange(1, 9): if control[-i] == '1': # token is reference (16 bit) val = contents.read(2) if not val: break token = struct.unpack('>H', val)[0] # big-endian # extract [12 bit offset][4 bit length] offset = (token >> 4) & 0b111111111111 length = token & 0b1111 # end indicator if write_offset == offset: end = True break actual_length = length + 2 for step in xrange(actual_length): read_offset = (offset + step) % MAX_DICT_SIZE char = init_dict[read_offset] output_buffer += char init_dict[write_offset] = char write_offset = (write_offset + 1) % MAX_DICT_SIZE else: # token is literal (8 bit) val = contents.read(1) if not val: break output_buffer += val init_dict[write_offset] = val write_offset = (write_offset + 1) % MAX_DICT_SIZE elif comp_type == UNCOMPRESSED: return contents.read(raw_size) else: raise Exception('Unknown type of RTF compression!') return output_buffer
def compress(data, compressed=True): """ Compress `data` with `compressed` flag """ output_buffer = '' # set init dict init_dict = list(INIT_DICT + ' ' * (MAX_DICT_SIZE - INIT_DICT_SIZE)) write_offset = INIT_DICT_SIZE # compressed if compressed: comp_type = COMPRESSED # make stream in_stream = StringIO(data) # init params control_byte = 0 control_bit = 1 token_offset = 0 token_buffer = '' match_len = 0 longest_match = 0 while True: # find longest match dict_offset, longest_match, write_offset = \ _find_longest_match(init_dict, in_stream, write_offset) char = in_stream.read(longest_match if longest_match > 1 else 1) # EOF input stream if not char: # update params control_byte |= 1 << control_bit - 1 control_bit += 1 token_offset += 2 # add dict reference dict_ref = (write_offset & 0xfff) << 4 token_buffer += struct.pack('>H', dict_ref) # add to output output_buffer += struct.pack('B', control_byte) output_buffer += token_buffer[:token_offset] break else: if longest_match > 1: # update params control_byte |= 1 << control_bit - 1 control_bit += 1 token_offset += 2 # add dict reference dict_ref = (dict_offset & 0xfff) << 4 | ( longest_match - 2) & 0xf token_buffer += struct.pack('>H', dict_ref) else: # character is not found in dictionary if longest_match == 0: init_dict[write_offset] = char write_offset = (write_offset + 1) % MAX_DICT_SIZE # update params control_byte |= 0 << control_bit - 1 control_bit += 1 token_offset += 1 # add literal token_buffer += char longest_match = 0 if control_bit > 8: # add to output output_buffer += struct.pack('B', control_byte) output_buffer += token_buffer[:token_offset] # reset params control_byte = 0 control_bit = 1 token_offset = 0 token_buffer = '' else: # if uncompressed - copy data to output comp_type = UNCOMPRESSED output_buffer = data # write compressed RTF header comp_size = struct.pack('<I', len(output_buffer) + 12) raw_size = struct.pack('<I', len(data)) crc_value = struct.pack('<I', crc32(output_buffer)) return comp_size + raw_size + comp_type + crc_value + output_buffer
def map_port(sp, dp): s_out, d_out = crc32(sp) & 0x0000ffff, crc32(dp) & 0x0000ffff return s_out, d_out
def compress(data, compressed=True): """ Compress `data` with `compressed` flag """ output_buffer = '' # set init dict init_dict = list(INIT_DICT + ' ' * (MAX_DICT_SIZE - INIT_DICT_SIZE)) write_offset = INIT_DICT_SIZE # compressed if compressed: comp_type = COMPRESSED # make stream in_stream = StringIO(data) # init params control_byte = 0 control_bit = 1 token_offset = 0 token_buffer = '' match_len = 0 longest_match = 0 while True: # find longest match dict_offset, longest_match, write_offset = \ _find_longest_match(init_dict, in_stream, write_offset) char = in_stream.read(longest_match if longest_match > 1 else 1) # EOF input stream if not char: # update params control_byte |= 1 << control_bit - 1 control_bit += 1 token_offset += 2 # add dict reference dict_ref = (write_offset & 0xfff) << 4 token_buffer += struct.pack('>H', dict_ref) # add to output output_buffer += struct.pack('B', control_byte) output_buffer += token_buffer[:token_offset] break else: if longest_match > 1: # update params control_byte |= 1 << control_bit - 1 control_bit += 1 token_offset += 2 # add dict reference dict_ref = (dict_offset & 0xfff) << 4 | (longest_match - 2) & 0xf token_buffer += struct.pack('>H', dict_ref) else: # character is not found in dictionary if longest_match == 0: init_dict[write_offset] = char write_offset = (write_offset + 1) % MAX_DICT_SIZE # update params control_byte |= 0 << control_bit - 1 control_bit += 1 token_offset += 1 # add literal token_buffer += char longest_match = 0 if control_bit > 8: # add to output output_buffer += struct.pack('B', control_byte) output_buffer += token_buffer[:token_offset] # reset params control_byte = 0 control_bit = 1 token_offset = 0 token_buffer = '' else: # if uncompressed - copy data to output comp_type = UNCOMPRESSED output_buffer = data # write compressed RTF header comp_size = struct.pack('<I', len(output_buffer) + 12) raw_size = struct.pack('<I', len(data)) crc_value = struct.pack('<I', crc32(output_buffer)) return comp_size + raw_size + comp_type + crc_value + output_buffer
def crc32_int(buf): return crc32(buf)
def get_descriptor(self): return struct.pack(self.descriptor_layout, crc32.crc32(self.name), self.handler, self.skip_on_boot, 0, 0)
os.chdir(WORK_DIR) compress_given_files(WORK_DIR) try: d = os.listdir(WORK_DIR) except OSError: print "error: cannot open path \"%s\"" % WORK_DIR sys.exit(1) table = init_file_table(WORK_DIR, d) if table == None: sys.exit(2) buffer = pack_files(table) crc = crc32.crc32(buffer) backend_version = sys.argv[1] h = open(FTFS_IMG, "wb") h.write(MAGIC) h.write(struct.pack("I", crc & 0xffffffffL)) h.write(struct.pack("I", 0 & 0xffffffffL)) h.write(struct.pack("I", int(backend_version, 0) & 0xffffffffL)) h.write(VERSION) h.write(buffer) h.close() st = os.stat(FTFS_IMG) if VERBOSE == 1: print "FTFS image created \"%s\"" % FTFS_IMG
def get_checksum(self, arch, addr, size): data = arch.read(addr, size) return crc32(0, data)