def _get_udp_scrape_data(self, host, port, connection_id): # build scrape request payload transaction_id = int(randrange(0, 255)) buff = struct.pack('!q', connection_id) buff += struct.pack('!i', ACTION_SCRAPE) buff += struct.pack('!i', transaction_id) buff += struct.pack('!20s', self.torrent_hash) # send payload and get response self._socket.sendto(buff, (host, port)) try: response = self._socket.recv(2048) except socket.timeout: return None if len(response) < 20: # TODO: issue warning here print "wrong response length" return None # extract response information resp_action, resp_transaction_id = struct.unpack_from('!ii', response, 0) if transaction_id != resp_transaction_id: # TODO: issue warning instead raise ValueError('Transaction IDs do not match (req=%d resp=%d)' % (transaction_id, resp_transaction_id)) if resp_action == ACTION_ERROR: error = struct.unpack_from('!s', response, 8)[0] # TODO: issue warning instead raise RuntimeError('Unable to get scrape data: %s' % error) elif resp_action == ACTION_SCRAPE: seeds, complete, leeches = struct.unpack_from('!iii', response, 8) return seeds, complete, leeches
def __init__(self, filename, perm): self.f = file(filename, perm) header = self.f.read(78) self.ident = header[0x3C:0x3C+8] self.num_sections, = struct.unpack_from('>H', header, 76) sections = self.f.read(self.num_sections*8) self.sections = struct.unpack_from('>%dL' % (self.num_sections*2), sections, 0)[::2] + (0xfffffff, )
def get_images(filename): bin_file = open(filename,'rb') buf = bin_file.read()#all the file are put into memory bin_file.close()# release the measure of operating system index = 0 magic, num_images, num_rows, num_colums = struct.unpack_from(big_endian+four_bytes, buf,index) index += struct.calcsize(big_endian + four_bytes) # TODO why not multy 4? print num_images images = [] #temp images as tuple for x in range(num_images): im = struct.unpack_from(big_endian + picture_bytes, buf, index) index += struct.calcsize(big_endian + picture_bytes) im = list(im) for i in range(len(im)) : if im[i] >= 1 and im[i] < 64: im[i] = 1 if im[i] >= 64 and im[i] < 128: im[i] = 2 if im[i] >= 128 and im[i] < 192: im[i] = 3 if im[i] >= 192 and im[i] < 256: im[i] = 4 else: im[i] = 0 images.append(im) a = np.array(images) return a
def _get_udp_connection(self, host, port): # build connection request payload transaction_id = int(randrange(0, 255)) buff = struct.pack('!q', CONNECTION_ID) buff += struct.pack('!i', ACTION_CONNECT) buff += struct.pack('!i', transaction_id) # send payload and get response self._socket.sendto(buff, (host, port)) try: response = self._socket.recv(2048) except socket.timeout: # TODO: issue warning here print "tracker down: %s" % host return None if len(response) < 16: # TODO: issue warning here print "wrong response length" return None # extract response information resp_action, resp_transaction_id = struct.unpack_from('!ii', response, 0) if transaction_id != resp_transaction_id: # TODO: issue warning instead raise ValueError('Transaction IDs do not match (req=%d resp=%d)' % (transaction_id, resp_transaction_id)) if resp_action == ACTION_ERROR: error = struct.unpack_from('!s', response, 8)[0] # TODO: issue warning instead raise RuntimeError('Unable to setup a connection: %s' % error) elif resp_action == ACTION_CONNECT: connection_id = struct.unpack_from('!q', response, 8)[0] return connection_id return None
def unpack(self, info): self._tag, = unpack_from('>c', info) skip = 1 if self._tag in b'BCDFIJSZs': self._value, = unpack_from('>H', info[skip:]) skip += 2 elif self._tag == b'e': self._value = unpack_from('>HH', info[skip:]) skip += 4 elif self._tag == b'c': self._value, = unpack_from('>H', info[skip:]) skip += 2 elif self._tag == b'@': annotation = RuntimeVisibleAnnotation(self._cf) skip += annotation.unpack(info[skip:]) self._value = annotation elif self._tag == b'[': num_values, = unpack_from('>H', info[skip:]) skip += 2 values = [] for n in range(num_values): value = ElementValue(self._cf) skip += value.unpack(info[skip:]) values.append(value) self._value = values else: raise ValueError("Unknown ElementValue tag {}".format(self._tag)) return skip
def _parse_directional_spectrum(self, offset, rules): """ Convert the binary data into particle data for the Directional Spectrum Data Type """ # Unpack the unpacking rules (num_freq_name, num_dir_name, good_name, dat_name),\ (num_freq_fmt, num_dir_fmt, good_fmt, dat_fmt) = zip(*rules) # First unpack the array lengths and single length values (num_freq_data, num_dir_data, dspec_good_data) = struct.unpack_from( '<%s%s%s' % (num_freq_fmt, num_dir_fmt, good_fmt), self.raw_data, offset) # Then unpack the array using the retrieved lengths values next_offset = offset + struct.calcsize(num_freq_fmt) + struct.calcsize(num_dir_fmt) + \ struct.calcsize(good_fmt) dspec_dat_list_data = struct.unpack_from( '<%s%s' % (num_freq_data * num_dir_data, dat_fmt), self.raw_data, next_offset) # convert to numpy array and reshape the data per IDD spec transformed_dat_data = numpy.array(dspec_dat_list_data).reshape( (num_freq_data, num_dir_data)).tolist() # Add to the collected parameter data self.final_result.extend( ({DataParticleKey.VALUE_ID: num_freq_name, DataParticleKey.VALUE: num_freq_data}, {DataParticleKey.VALUE_ID: num_dir_name, DataParticleKey.VALUE: num_dir_data}, {DataParticleKey.VALUE_ID: good_name, DataParticleKey.VALUE: dspec_good_data}, {DataParticleKey.VALUE_ID: dat_name, DataParticleKey.VALUE: transformed_dat_data}))
def _parse_hpr_time_series(self, offset, rules): """ Convert the binary data into particle data for the Heading, Pitch, Time Series Data Type """ # Unpack the unpacking rules (hpr_num_name, beam_angle_name, spare_name, hpr_time_names),\ (hpr_num_fmt, beam_angle_fmt, spare_fmt, hpr_time_fmt) = zip(*rules) # First unpack the array length and single length value, no need to unpack spare (hpr_num_data, beam_angle_data) = struct.unpack_from( '<%s%s' % (hpr_num_fmt, beam_angle_fmt), self.raw_data, offset) # Then unpack the array using the retrieved lengths value next_offset = offset + struct.calcsize(hpr_num_fmt) + struct.calcsize(beam_angle_fmt) + \ struct.calcsize(spare_fmt) hpr_time_list_data = struct.unpack_from( '<%s%s' % (hpr_num_data * HPR_TIME_SERIES_ARRAY_SIZE, hpr_time_fmt), self.raw_data, next_offset) # convert to numpy array and reshape the data to a 2d array per IDD spec transformed_hpr_time_data = numpy.array(hpr_time_list_data).reshape( (hpr_num_data, HPR_TIME_SERIES_ARRAY_SIZE)).transpose().tolist() # Add to the collected parameter data self.final_result.extend( ({DataParticleKey.VALUE_ID: hpr_num_name, DataParticleKey.VALUE: hpr_num_data}, {DataParticleKey.VALUE_ID: beam_angle_name, DataParticleKey.VALUE: beam_angle_data}, {DataParticleKey.VALUE_ID: hpr_time_names[HEADING_TIME_SERIES_IDX], DataParticleKey.VALUE: transformed_hpr_time_data[HEADING_TIME_SERIES_IDX]}, {DataParticleKey.VALUE_ID: hpr_time_names[PITCH_TIME_SERIES_IDX], DataParticleKey.VALUE: transformed_hpr_time_data[PITCH_TIME_SERIES_IDX]}, {DataParticleKey.VALUE_ID: hpr_time_names[ROLL_TIME_SERIES_IDX], DataParticleKey.VALUE: transformed_hpr_time_data[ROLL_TIME_SERIES_IDX]}))
def _parse(self, data): assert 104 <= len(data) (self.magic, self.checksum, self.signature) = struct.unpack_from("8sI20s", data, 0) (self.string_ids_size, self.string_ids_off) = struct.unpack_from("II", data, 56) (self.type_ids_size, self.type_ids_off) = struct.unpack_from("II", data, 64) (self.class_defs_size, self.class_defs_off) = struct.unpack_from("II", data, 96) (self.method_ids_size, self.method_ids_off) = struct.unpack_from("II", data, 88)
def Run(self, args): """Run.""" # This action might crash the box so we need to flush the transaction log. self.SyncTransactionLog() # Do any initialization we need to do. logging.debug("Querying device %s", args.path) fd = win32file.CreateFile( args.path, win32file.GENERIC_READ | win32file.GENERIC_WRITE, win32file.FILE_SHARE_READ | win32file.FILE_SHARE_WRITE, None, win32file.OPEN_EXISTING, win32file.FILE_ATTRIBUTE_NORMAL, None) data = win32file.DeviceIoControl(fd, INFO_IOCTRL, "", 1024, None) fmt_string = "QQl" cr3, _, number_of_runs = struct.unpack_from(fmt_string, data) result = rdfvalue.MemoryInformation( cr3=cr3, device=rdfvalue.PathSpec( path=args.path, pathtype=rdfvalue.PathSpec.PathType.MEMORY)) offset = struct.calcsize(fmt_string) for x in range(number_of_runs): start, length = struct.unpack_from("QQ", data, x * 16 + offset) result.runs.Append(offset=start, length=length) self.SendReply(result)
def get_spi_regions( fd ): pos = fd.find( SPI_FLASH_DESCRIPTOR_SIGNATURE ) if not (pos == 0x10): return None flmap0 = struct.unpack_from( '=I', fd[0x14:0x18] )[0] # Flash Region Base Address (bits [23:16]) frba = ( (flmap0 & 0x00FF0000) >> 12 ) # Number of Regions (bits [26:24]) nr = ( ((flmap0 & 0xFF000000) >> 24) & 0x7 ) flregs = [None] * spi.SPI_REGION_NUMBER_IN_FD for r in range( spi.SPI_REGION_NUMBER_IN_FD ): flreg_off = frba + r*4 flreg = struct.unpack_from( '=I', fd[flreg_off:flreg_off + 0x4] )[0] (base,limit) = spi.get_SPI_region(flreg) notused = (base > limit) flregs[r] = (r, spi.SPI_REGION_NAMES[r],flreg,base,limit,notused) fd_size = flregs[spi.FLASH_DESCRIPTOR][4] - flregs[spi.FLASH_DESCRIPTOR][3] + 1 fd_notused = flregs[spi.FLASH_DESCRIPTOR][5] if fd_notused or (fd_size != SPI_FLASH_DESCRIPTOR_SIZE): return None return flregs
def script_GetOp(bytes): i = 0 while i < len(bytes): vch = None opcode = ord(bytes[i]) i += 1 if opcode >= opcodes.OP_SINGLEBYTE_END: opcode <<= 8 opcode |= ord(bytes[i]) i += 1 if opcode <= opcodes.OP_PUSHDATA4: nSize = opcode if opcode == opcodes.OP_PUSHDATA1: nSize = ord(bytes[i]) i += 1 elif opcode == opcodes.OP_PUSHDATA2: (nSize,) = struct.unpack_from('<H', bytes, i) i += 2 elif opcode == opcodes.OP_PUSHDATA4: (nSize,) = struct.unpack_from('<I', bytes, i) i += 4 vch = bytes[i:i+nSize] i += nSize yield (opcode, vch, i)
def __read_block15(self, size): low = self.blksize * ((self.blksize / 4) ** 2) + self.blksize * 12 blk = [] ndx = self.blksize / 4 if self._cur_pos >= low: if self.inode.i_block[14] == 0: return blk cpos = self._cur_pos - low n3 = cpos / self.blksize r = cpos % self.blksize n2 = n3 / ndx n1 = n2 / ndx n0 = n1 / ndx fmt = "<%dI" % ndx b15 = struct.unpack_from(fmt, buffer(bytearray(self.filesys.read_block(self.inode.i_block[14])))) for x in b15[n0:]: assert(x > 0) c15 = struct.unpack_from(fmt, buffer(bytearray(self.filesys.read_block(x)))) for y in c15[n1:]: d15 = struct.unpack_from(fmt, buffer(bytearray(self.filesys.read_block(y)))) bk = self._do_read(d15[n2:], r, size) r = 0 n2 = 0 blk += bk size -= len(bk) if size == 0: break n1 = 0 if size == 0: break return blk
def __init__(self, idx, buf): self.inode_no = idx sb = buffer(bytearray(buf)) sz = 0 fmt = "<2H5I2H3I" (self.i_mode, self.i_uid, self.i_size, self.i_atime, self.i_ctime, self.i_mtime, self.i_dtime, self.i_gid, self.i_links_count, self.i_blocks, self.i_flags, self.i_osd1) = struct.unpack_from(fmt, sb, sz) sz += struct.calcsize(fmt) fmt = "<15I" self.i_block = struct.unpack_from(fmt, sb, sz) sz += struct.calcsize(fmt) fmt = "<4I12s" (self.i_gneration, self.i_file_acl, self.i_dir_acl, self.i_faddr, self.i_osd2) = struct.unpack_from(fmt, sb, sz)
def load_image_data_set(self, img_data_dir): logging.info("Load image data set from {0}.".format(img_data_dir)) with open(img_data_dir, "rb") as binary_file_handle: image_data_buffer = binary_file_handle.read() # '>IIII'是说使用大端法读取4个unsigned int32 # unpack_from(...) # Unpack the buffer, containing packed C structure data, according to # fmt, starting at offset. Requires len(buffer[offset:]) >= calcsize(fmt). head = struct.unpack_from('>IIII' , image_data_buffer ,0) logging.info("head:{0}".format(head)) magic_num = struct.calcsize('>IIII') img_num = head[1] img_width = head[2] img_height = head[3] logging.info("magic_num:{0}".format(magic_num)) logging.info("img_num:{0}".format(img_num)) logging.info("img_width:{0}".format(img_width)) logging.info("img_height:{0}".format(img_height)) #[60000]*28*28 all_img_bit = img_num * img_width * img_height all_img_bit_string = '>' + str(all_img_bit) + 'B' #like '>47040000B' logging.info("all_img_bit_string:{0}".format(all_img_bit_string)) all_image_2d_ndarray = struct.unpack_from(all_img_bit_string, image_data_buffer, magic_num) all_image_2d_ndarray = np.reshape(all_image_2d_ndarray, [img_num, img_width, img_height]) return all_image_2d_ndarray
def load_label_data_set(self, label_data_dir): logging.info("load label set from {0}.".format(label_data_dir)) with open(label_data_dir, "rb") as binary_file_handle: label_data_buffer = binary_file_handle.read() head = struct.unpack_from('>II' , label_data_buffer ,0) logging.info("head:{0}".format(head)) label_num = head[1] logging.info("img_num:{0}".format(label_num)) offset = struct.calcsize('>II') logging.info("offset:{0}".format(offset)) img_num_string='>'+str(label_num)+"B" logging.info("img_num_string:{0}".format(img_num_string)) all_label_2d_ndarray = struct.unpack_from(img_num_string, label_data_buffer, offset) all_label_2d_ndarray = np.reshape(all_label_2d_ndarray, [label_num, 1]) logging.info("len(all_label_2d_ndarray):{0}".format(len(all_label_2d_ndarray))) logging.info("type(all_label_2d_ndarray):{0}".format(type(all_label_2d_ndarray))) logging.info("all_label_2d_ndarray[0]:{0}".format(all_label_2d_ndarray[0])) logging.info("type(all_label_2d_ndarray[0]):{0}".format(type(all_label_2d_ndarray[0]))) logging.info("all_label_2d_ndarray[0][0]:{0}".format(all_label_2d_ndarray[0][0])) logging.info("type(all_label_2d_ndarray[0][0]):{0}".format(type(all_label_2d_ndarray[0][0]))) logging.info("Load label finished.") return all_label_2d_ndarray
def _get_desktops(self, *args): conn = self.parent.connection scr = self.parent.screen totalc = conn.core.GetProperty(0, scr.root, self._NET_NUMBER_OF_DESKTOPS, xproto.Atom.CARDINAL, 0, 12) namesc = conn.core.GetProperty(0, scr.root, self._NET_DESKTOP_NAMES, self.UTF8_STRING, 0, 32) totalr = totalc.reply() self.num_desktops = struct.unpack_from("I", totalr.value.buf())[0] namesr = namesc.reply() self.desktops = struct.unpack_from("%ds" % namesr.value_len, namesr.value.buf())[0].strip("\x00").split("\x00") self._update()
def _tag_aux(fp, tb): bytes_read = 1 tag = tb & CBOR_TYPE_MASK tag_aux = tb & CBOR_INFO_BITS if tag_aux <= 23: aux = tag_aux elif tag_aux == CBOR_UINT8_FOLLOWS: data = fp.read(1) aux = struct.unpack_from("!B", data, 0)[0] bytes_read += 1 elif tag_aux == CBOR_UINT16_FOLLOWS: data = fp.read(2) aux = struct.unpack_from("!H", data, 0)[0] bytes_read += 2 elif tag_aux == CBOR_UINT32_FOLLOWS: data = fp.read(4) aux = struct.unpack_from("!I", data, 0)[0] bytes_read += 4 elif tag_aux == CBOR_UINT64_FOLLOWS: data = fp.read(8) aux = struct.unpack_from("!Q", data, 0)[0] bytes_read += 8 else: assert tag_aux == CBOR_VAR_FOLLOWS, "bogus tag {0:02x}".format(tb) aux = None return tag, tag_aux, aux, bytes_read
def test_unpack_from(self): test_string = b'abcd01234' fmt = '4s' s = struct.Struct(fmt) for cls in (bytes, bytearray): data = cls(test_string) self.assertEqual(s.unpack_from(data), (b'abcd',)) self.assertEqual(s.unpack_from(data, 2), (b'cd01',)) self.assertEqual(s.unpack_from(data, 4), (b'0123',)) for i in range(6): self.assertEqual(s.unpack_from(data, i), (data[i:i+4],)) for i in range(6, len(test_string) + 1): self.assertRaises(struct.error, s.unpack_from, data, i) for cls in (bytes, bytearray): data = cls(test_string) self.assertEqual(struct.unpack_from(fmt, data), (b'abcd',)) self.assertEqual(struct.unpack_from(fmt, data, 2), (b'cd01',)) self.assertEqual(struct.unpack_from(fmt, data, 4), (b'0123',)) for i in range(6): self.assertEqual(struct.unpack_from(fmt, data, i), (data[i:i+4],)) for i in range(6, len(test_string) + 1): self.assertRaises(struct.error, struct.unpack_from, fmt, data, i) # keyword arguments self.assertEqual(s.unpack_from(buffer=test_string, offset=2), (b'cd01',))
def _decode_var_string(self, data): firstByte = struct.unpack_from("B", data)[0] # At the moment, all strings are treated the same # regardless of type stringType = firstByte & ~0xc0 stringSize = (firstByte & 0xc0) >> 6 size = 0 nextPointer = 0 stringOut = "" # 1 byte if stringSize == 0: size = struct.unpack_from("B", data, 1)[0] nextPointer = 1 + 1 + size stringOut = data[2:2+size] # 2 bytes elif stringSize == 1: size = struct.unpack_from("H", data, 1)[0] nextPointer = 1 + 2 + size stringOut = data[3:3+size] # 4 bytes elif stringSize == 2: size = struct.unpack_from("I", data, 1)[0] nextPointer = 1 + 4 + size stringOut = data[5:5+size] else: raise GCAPFormatError("unsupported variable string type") return (stringOut, nextPointer)
def parse(self, input): zip_buffer = StringIO.StringIO(input) with zipfile.ZipFile(zip_buffer, 'r') as zip: payload = zip.read('z') magic, payload_len = struct.unpack_from('<II', payload) if magic != DepotManifest.PROTOBUF_PAYLOAD_MAGIC: raise Exception("Expecting protobuf payload") self.payload = content_manifest_pb2.ContentManifestPayload() self.payload.ParseFromString(payload[8:8+payload_len]) pos_1 = 8+payload_len magic, meta_len = struct.unpack_from('<II', payload[pos_1:]) if magic != DepotManifest.PROTOBUF_METADATA_MAGIC: raise Exception("Expecting protobuf metadata") self.metadata = content_manifest_pb2.ContentManifestMetadata() self.metadata.ParseFromString(payload[8+pos_1:8+pos_1+meta_len]) pos_2 = 8+pos_1+meta_len magic, sig_len = struct.unpack_from('<II', payload[pos_2:]) if magic != DepotManifest.PROTOBUF_SIGNATURE_MAGIC: raise Exception("Expecting protobuf signature") self.signature = content_manifest_pb2.ContentManifestSignature() self.signature.ParseFromString(payload[8+pos_2:8+pos_2+sig_len])
def read_encoded_int(content, offset): i = struct.unpack_from('>B', content, offset)[0] if i < 0x80: return offset + 1, i return offset + 4, struct.unpack_from('>I', content, offset)[0] & ~0x80000000
def load_mnist(im_path, lb_path): # loading images binfile = open(im_path, 'rb') buf = binfile.read() index = 0 magic,numImages,numRows,numColumns = \ struct.unpack_from('>IIII' , buf , index) index += struct.calcsize('>IIII') if magic!=2051: raise NameError('MNIST TRAIN-IMAGE INCCORECT!') ims = np.zeros([numImages, numRows*numColumns]) for i in range(numImages): ims[i,:] = struct.unpack_from('>784B', buf, index) index += struct.calcsize('>784B'); # loading labels binfile = open(lb_path, 'rb') buf = binfile.read() index = 0 magic,numLabels = struct.unpack_from( '>II', buf, index ) index += struct.calcsize('>II') if magic!=2049: raise NameError('MNIST TRAIN-LABEL INCORRECT!') lbs = np.zeros(numLabels) lbs[:] = struct.unpack_from( '>'+ str(numLabels) +'B', buf, index ) return [ims, numRows, numColumns, lbs]
def __init__(self, data): self.ident = data[:4] self.record_size, self.type, self.count, self.encoding = unpack_from(b'>IHHI', data, 4) self.encoding = { 1252 : 'cp1252', 65001: 'utf-8', }.get(self.encoding, repr(self.encoding)) rest = list(unpack_from(b'>IIIIIIII', data, 16)) self.num_of_resource_records = rest[2] self.num_of_non_dummy_resource_records = rest[3] self.offset_to_href_record = rest[4] self.unknowns1 = rest[:2] self.unknowns2 = rest[5] self.header_length = rest[6] self.title_length = rest[7] self.resources = [] self.hrefs = [] if data[48:52] == b'EXTH': self.exth = EXTHHeader(data[48:]) self.title = data[48 + self.exth.length:][:self.title_length].decode(self.encoding) self.is_image_container = self.exth[539] == 'application/image' else: self.exth = ' No EXTH header present ' self.title = '' self.is_image_container = False self.bytes_after_exth = data[self.header_length + self.title_length:] self.null_bytes_after_exth = len(self.bytes_after_exth) - len(self.bytes_after_exth.replace(b'\0', b''))
def ReadRecord(d, offset=0x0): id = d[0] d=d[1:] # Eat id if id == 0xff or id == 0x4: # Normal end of Data return id, None, None sztotal = 1 assert RecPack.has_key(id), "Unknown record ID %i at offset %i" % (id, offset) if RecRepeat.has_key(id): sz = struct.calcsize(RecPack[id]) init=struct.unpack_from(RecRepeat[id][1], d) szinit=struct.calcsize(RecRepeat[id][1]) d=d[szinit:] sztotal += szinit res=[] for i in range(0, RecRepeat[id][0]): res.append(struct.unpack_from(RecPack[id], d)) d=d[sz:] sztotal += sz elif type(RecPack[id]) == str: sz = struct.calcsize(RecPack[id]) res = struct.unpack_from(RecPack[id], d) sztotal += sz elif type(RecPack[id]) == int: # 12-bit field array # A padding byte 0xFF may be present sz = RecPack[id] - 1 res = ReadPacked12Bit(d[:sz]) sztotal += sz return id, sztotal, res
def _read_tag_data_info(dmfile): tag_array_length = struct.unpack_from('>Q', dmfile.read(8))[0] #DM4 specifies this property as always big endian format_str = '>' + tag_array_length * 'q' #Big endian signed long tag_array_types = struct.unpack_from(format_str, dmfile.read(8*tag_array_length)) return (tag_array_length, tag_array_types)
def sec_info(self, secnum): start_offset, flgval = struct.unpack_from('>2L', self.datain, 78+(secnum*8)) if secnum == self.num_sections: next_offset = len(self.datain) else: next_offset, nflgval = struct.unpack_from('>2L', self.datain, 78+((secnum+1)*8)) return start_offset, flgval, next_offset
def parseString(db, offset): existence = unpack_from('b', db, offset)[0] if existence == 0x00: return ("", offset+1) elif existence == 0x0b: # decode ULEB128 length = 0 shift = 0 offset += 1 while True: val = unpack_from('B', db, offset)[0] length |= ((val & 0x7F) << shift) offset += 1 if (val & (1 << 7)) == 0: break shift += 7 string = unpack_from(str(length)+'s', db, offset)[0] offset += length unic = u'' try: unic = unicode(string, 'utf-8') except UnicodeDecodeError: print "Could not parse UTF-8 string, returning empty string." return (unic, offset)
def script_GetOp(bytes): i = 0 while i < len(bytes): vch = None opcode = ord(bytes[i]) i += 1 if opcode <= opcodes.OP_PUSHDATA4: nSize = opcode if opcode == opcodes.OP_PUSHDATA1: nSize = ord(bytes[i]) i += 1 elif opcode == opcodes.OP_PUSHDATA2: (nSize,) = struct.unpack_from('<H', bytes, i) i += 2 elif opcode == opcodes.OP_PUSHDATA4: (nSize,) = struct.unpack_from('<I', bytes, i) i += 4 if i+nSize > len(bytes): vch = "_INVALID_"+bytes[i:] i = len(bytes) else: vch = bytes[i:i+nSize] i += nSize yield (opcode, vch, i)
def parse_resp(self,buf): header_offset = 2 avp_offset = 8 nr = 0 ns = 0 # read the header (cflag,) = struct.unpack_from('!H', buf) cflag_bin = int2bin(cflag,16) ptype = cflag_bin[0] blen = cflag_bin[1] sbit = cflag_bin[4] obit = cflag_bin[6] pbit = cflag_bin[7] ver = cflag_bin[12:16] if self.debug: print "<- l2tp packet dump" print "<-: l2tp cflag bits : %s|%s|%s|%s|%s|%s" % (ptype, blen, sbit, obit, pbit, ver) if ver != '0010': # print '!! Not an valid l2tp packet : discarding' return None if blen == '1': (plen,) = struct.unpack_from('!H', buf, offset=header_offset) if self.debug: print "<-: l2tp length : %d" % plen header_offset += 2 (tid, sid) = struct.unpack_from('!HH', buf, offset=header_offset) if self.debug: print "<-: l2tp tunnel_id : %d, session_id : %d" % (tid, sid) header_offset += 4 if sbit == '1': (ns, nr) = struct.unpack_from('!HH', buf, offset=header_offset) if self.debug: print "<-: l2tp ns : %d, nr : %d" % (ns, nr) header_offset += 4 avp_offset += 4 if obit == '1': (offset_size, offset_pad) = struct.unpack_from('!HH', buf, offset=header_offset) if self.debug: print "<-: l2tp offset_size : %d, offset_pad : %d" % (offset_size, offset_pad) header_offset += 4 avp_offset += 4 if ptype == '0': # data packet # write to pppd data = buf[header_offset:] try: async_buf = self.pppd_sync_to_async(data) pty._writen(self.pppd_fd, async_buf) except OSError, se: if se.args[0] not in (errno.EAGAIN, errno.EINTR): raise
def receptionListUser(self,packet,status): self.listUser=[] lengthMsg=struct.unpack_from(">LH",packet)[1] print "00000000000000000000on reçoit la list0000000000000000000" lengthData=lengthMsg-8 offset=8 i=0 while i< lengthData: lengthNameUser_Dispo=struct.unpack_from(">B",packet,offset)[0] lengthNameUser=lengthNameUser_Dispo>>1 S=lengthNameUser_Dispo&1 Data=">BH"+ str(lengthNameUser) + "s" userName= struct.unpack_from(Data,packet,offset)[2] userId=struct.unpack_from(Data,packet,offset)[1] if (S==1): ROOM=ROOM_IDS.MAIN_ROOM else: ROOM=ROOM_IDS.MOVIE_ROOM self.listUser.append((userName,ROOM)) self.listUser_Id.append((userName,userId)) i+=(lengthNameUser+3) offset+=(lengthNameUser+3) liste=[] if status==userStatus['waitingMainRoomUserList'] or status==userStatus['mainRoom']: liste=self.listUser elif status==userStatus['waitingfMovieRoomUserList'] or status==userStatus['movieRoom']: print "******************on entre dans cette boucle************************" i=0 while i<len(self.listUser): liste.append((self.listUser[i][0],self.RoomName)) # il y'avait self.thisRoomName??? i+=1 if status==userStatus['mainRoom'] or status==userStatus['movieRoom']: self.clientProxy.setUserListONE(liste) print "§!§§§§§§§§§§§§§§§update ok "
# socket.SOCK_DGRAM) # UDP #print ("UDP target IP:", UDP_IP) #print ("UDP target port:", UDP_PORT) #data = struct.pack("<HH", 1, 1) #sock.sendto(data, (UDP_IP, UDP_PORT)) TCP_IP = "192.168.1.148" TCP_PORT = 5006 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((TCP_IP, TCP_PORT)) data = struct.pack("<H", 1) #start raw lidar readings stream sock.send(data) print("Data sent") recv = sock.recv(4) #readings message length READINGS_LENGTH = struct.unpack_from("<H", recv)[0] print(READINGS_LENGTH) import matplotlib.pyplot as plt import numpy as np plt.ion() fig, ax = plt.subplots() x, y = [0 for i in range(READINGS_LENGTH)], [0 for i in range(READINGS_LENGTH)] sc = ax.scatter(x, y) plt.grid(True) plt.xlim(-5, 5) plt.ylim(-5, 5) plt.draw() while True:
def decode_navdata(packet): """Decode a navdata packet.""" offset = 0 _ = struct.unpack_from("IIII", packet, offset) drone_state = dict() drone_state['fly_mask'] = _[1] & 1 # FLY MASK : (0) ardrone is landed, (1) ardrone is flying drone_state['video_mask'] = _[1] >> 1 & 1 # VIDEO MASK : (0) video disable, (1) video enable drone_state['vision_mask'] = _[1] >> 2 & 1 # VISION MASK : (0) vision disable, (1) vision enable */ drone_state['control_mask'] = _[1] >> 3 & 1 # CONTROL ALGO (0) euler angles control, (1) angular speed control */ drone_state['altitude_mask'] = _[1] >> 4 & 1 # ALTITUDE CONTROL ALGO : (0) altitude control inactive (1) altitude control active */ drone_state['user_feedback_start'] = _[1] >> 5 & 1 # USER feedback : Start button state */ drone_state['command_mask'] = _[1] >> 6 & 1 # Control command ACK : (0) None, (1) one received */ drone_state['fw_file_mask'] = _[1] >> 7 & 1 # Firmware file is good (1) */ drone_state['fw_ver_mask'] = _[1] >> 8 & 1 # Firmware update is newer (1) */ drone_state['fw_upd_mask'] = _[1] >> 9 & 1 # Firmware update is ongoing (1) */ drone_state['navdata_demo_mask'] = _[1] >> 10 & 1 # Navdata demo : (0) All navdata, (1) only navdata demo */ drone_state['navdata_bootstrap'] = _[1] >> 11 & 1 # Navdata bootstrap : (0) options sent in all or demo mode, (1) no navdata options sent */ drone_state['motors_mask'] = _[1] >> 12 & 1 # Motor status : (0) Ok, (1) Motors problem */ drone_state['com_lost_mask'] = _[1] >> 13 & 1 # Communication lost : (1) com problem, (0) Com is ok */ drone_state['vbat_low'] = _[1] >> 15 & 1 # VBat low : (1) too low, (0) Ok */ drone_state['user_el'] = _[1] >> 16 & 1 # User Emergency Landing : (1) User EL is ON, (0) User EL is OFF*/ drone_state['timer_elapsed'] = _[1] >> 17 & 1 # Timer elapsed : (1) elapsed, (0) not elapsed */ drone_state['angles_out_of_range'] = _[1] >> 19 & 1 # Angles : (0) Ok, (1) out of range */ drone_state['ultrasound_mask'] = _[1] >> 21 & 1 # Ultrasonic sensor : (0) Ok, (1) deaf */ drone_state['cutout_mask'] = _[1] >> 22 & 1 # Cutout system detection : (0) Not detected, (1) detected */ drone_state['pic_version_mask'] = _[1] >> 23 & 1 # PIC Version number OK : (0) a bad version number, (1) version number is OK */ drone_state['atcodec_thread_on'] = _[1] >> 24 & 1 # ATCodec thread ON : (0) thread OFF (1) thread ON */ drone_state['navdata_thread_on'] = _[1] >> 25 & 1 # Navdata thread ON : (0) thread OFF (1) thread ON */ drone_state['video_thread_on'] = _[1] >> 26 & 1 # Video thread ON : (0) thread OFF (1) thread ON */ drone_state['acq_thread_on'] = _[1] >> 27 & 1 # Acquisition thread ON : (0) thread OFF (1) thread ON */ drone_state['ctrl_watchdog_mask'] = _[1] >> 28 & 1 # CTRL watchdog : (1) delay in control execution (> 5ms), (0) control is well scheduled */ drone_state['adc_watchdog_mask'] = _[1] >> 29 & 1 # ADC Watchdog : (1) delay in uart2 dsr (> 5ms), (0) uart2 is good */ drone_state['com_watchdog_mask'] = _[1] >> 30 & 1 # Communication Watchdog : (1) com problem, (0) Com is ok */ drone_state['emergency_mask'] = _[1] >> 31 & 1 # Emergency landing : (0) no emergency, (1) emergency */ data = dict() data['drone_state'] = drone_state data['header'] = _[0] data['seq_nr'] = _[2] data['vision_flag'] = _[3] offset += struct.calcsize("IIII") while 1: try: id_nr, size = struct.unpack_from("HH", packet, offset) offset += struct.calcsize("HH") except struct.error: break values = [] for i in range(size-struct.calcsize("HH")): values.append(struct.unpack_from("c", packet, offset)[0]) offset += struct.calcsize("c") # navdata_tag_t in navdata-common.h if id_nr == 0: values = struct.unpack_from("IIfffIfffI", "".join(values)) values = dict(zip(['ctrl_state', 'battery', 'theta', 'phi', 'psi', 'altitude', 'vx', 'vy', 'vz', 'num_frames'], values)) # convert the millidegrees into degrees and round to int, as they # are not so precise anyways for i in 'theta', 'phi', 'psi': values[i] = int(values[i] / 1000) #values[i] /= 1000 data[id_nr] = values return data
def _parse_jp2_header(fp): """Parse the JP2 header box to extract size, component count and color space information, returning a PIL (size, mode) tuple.""" # Find the JP2 header box header = None while True: lbox, tbox = struct.unpack('>I4s', fp.read(8)) if lbox == 1: lbox = struct.unpack('>Q', fp.read(8))[0] hlen = 16 else: hlen = 8 if lbox < hlen: raise SyntaxError('Invalid JP2 header length') if tbox == b'jp2h': header = fp.read(lbox - hlen) break else: fp.seek(lbox - hlen, os.SEEK_CUR) if header is None: raise SyntaxError('could not find JP2 header') size = None mode = None bpc = None nc = None hio = io.BytesIO(header) while True: lbox, tbox = struct.unpack('>I4s', hio.read(8)) if lbox == 1: lbox = struct.unpack('>Q', hio.read(8))[0] hlen = 16 else: hlen = 8 content = hio.read(lbox - hlen) if tbox == b'ihdr': height, width, nc, bpc, c, unkc, ipr \ = struct.unpack('>IIHBBBB', content) size = (width, height) if unkc: if nc == 1 and (bpc & 0x7f) > 8: mode = 'I;16' elif nc == 1: mode = 'L' elif nc == 2: mode = 'LA' elif nc == 3: mode = 'RGB' elif nc == 4: mode = 'RGBA' break elif tbox == b'colr': meth, prec, approx = struct.unpack_from('>BBB', content) if meth == 1: cs = struct.unpack_from('>I', content, 3)[0] if cs == 16: # sRGB if nc == 1 and (bpc & 0x7f) > 8: mode = 'I;16' elif nc == 1: mode = 'L' elif nc == 3: mode = 'RGB' elif nc == 4: mode = 'RGBA' break elif cs == 17: # grayscale if nc == 1 and (bpc & 0x7f) > 8: mode = 'I;16' elif nc == 1: mode = 'L' elif nc == 2: mode = 'LA' break elif cs == 18: # sYCC if nc == 3: mode = 'RGB' elif nc == 4: mode = 'RGBA' break if size is None or mode is None: raise SyntaxError("Malformed jp2 header") return (size, mode)
def dump(f, fmt): length = struct.calcsize(fmt) data = f.read(length) value = struct.unpack_from(fmt, data) return value
file_size = f.tell() f.seek(0) print "File size: %d, (0x%08x)" % (file_size, file_size) # All chunks in a frame in the isoc stream start with the same "frame id" # see http://www.usb.org/developers/devclass_docs/USB_Video_Class_1_5.zip frame_id = dump(f, "<I")[0] eof_frame_id = frame_id | (0x02 << 8) f.seek(0) print "frame id %s eof %s" % (hex(frame_id), hex(eof_frame_id)) while f.tell() < file_size: data = f.read(4) tmp_frame_id = struct.unpack_from("<I", data)[0] # When a new chunk is met, skip the timestamps and read the next 4 # useful bytes if tmp_frame_id == frame_id or tmp_frame_id == eof_frame_id: f.read(8) # timestamp1_high = dump(f, "<H") # timestamp1_low = dump(f, "<H") # #print "timestamp1: ", (timestamp1_high << 16) + timestamp1_low # timestamp2_high = dump(f, "<H") # timestamp2_low = dump(f, "<H") # #print "timestamp2: ", (timestamp2_high << 16) + timestamp2_low data = f.read(4) g.write(data)
def __init__(self, kind="nand-retail", data=None): self.kind = kind if data is None: self.priority, self.arm11Entrypoint, self.arm9Entrypoint = 0, 0, 0 self.sections = [FirmSectionHeader(i, kind) for i in range(4)] self.reserved, self.signature = b'\x00' * 0x30, b'\x00' * 0x100 else: if data[:4] != b"FIRM": raise ValueError("Not a FIRM file") self.priority, self.arm11Entrypoint, self.arm9Entrypoint, self.reserved = unpack_from( "<3I48s", data, 4) self.sections = [ FirmSectionHeader(i, kind, data) for i in range(4) ] self.signature = data[0x100:0x200] self.check()
def export(self, basePath, extractModules=False, secretSector=None): if self.guessedType == "Kernel11 modules" and extractModules: pos = 0 if not os.path.isdir(os.path.join(basePath, "modules")): os.mkdir(os.path.join(basePath, "modules")) while pos < self.size: size = unpack_from("<I", self.sectionData, pos + 0x104)[0] * 0x200 name = self.sectionData[pos + 0x200:pos + 0x208].decode("ascii") nullBytePos = name.find('\x00') name = name if nullBytePos == -1 else name[:nullBytePos] name = "{0}.cxi".format(name) with open(os.path.join(basePath, "modules", name), "wb+") as f: f.write(self.sectionData[pos:pos + size]) pos += size with open( os.path.join(basePath, "section{0}.bin".format(self.num)), "wb+") as f: f.write(self.sectionData) elif self.guessedType.startswith("K9L") and secretSector is not None: # kek is in keyslot 0x11, as "normal key" encKeyX = self.sectionData[:0x10] if self.guessedType[ 3] == '0' else self.sectionData[0x60:0x70] kek = secretSector[:0x10] if self.guessedType[ 3] != '2' else secretSector[0x10:0x20] keyX = AES.new(kek, AES.MODE_ECB).decrypt(encKeyX) keyY = self.sectionData[0x10:0x20] key = unhexlify("{0:032X}".format( keyscrambler(int(hexlify(keyX), 16), int(hexlify(keyY), 16)))) ctr = self.sectionData[0x20:0x30] sizeDec = self.sectionData[0x30:0x38].decode("ascii") size = int(sizeDec[:sizeDec.find('\x00')], 10) data = self.sectionData if 0x800 + size <= self.size: cipher = AES.new(key, AES.MODE_CTR, initial_value=ctr, nonce=b'') decData = cipher.decrypt(self.sectionData[0x800:0x800 + size]) data = b''.join((self.sectionData[:0x800], decData, self.sectionData[0x800 + size:])) if extractModules: exportP9(basePath, data) with open( os.path.join(basePath, "section{0}.bin".format(self.num)), "wb+") as f: f.write(data) elif self.guessedType == "Kernel9": if extractModules: exportP9(basePath, self.sectionData) with open( os.path.join(basePath, "section{0}.bin".format(self.num)), "wb+") as f: f.write(self.sectionData) else: with open( os.path.join(basePath, "section{0}.bin".format(self.num)), "wb+") as f: f.write(self.sectionData)
def __getDport(self, sid): p = PcapIter(self.captureFile, 'host %s' % self.ip) for plen, pkt, ts in p: if sid in pkt: offset = sum(p.getOffsets(pkt)[0:2]) return unpack_from('!H', pkt, offset + 2)[0]
def read_uint32(self): result = struct.unpack_from('<I', self._data, self._position)[0] self._position += 4 return result
def __parseTCP(self, rtspSport, rtspDport, ptype): ''' Parse RTP over TCP session. ''' # Parse packets p = PcapIter( self.captureFile, 'host %s and tcp and src port %s and dst port %s' % (self.ip, rtspSport, rtspDport)) offsets = None packetlist = [] seqlist = [] lenlist = [] tslist = [] for plen, pkt, ts in p: if plen > 74 and 'RTSP/1.0' not in pkt and 'GStreamer' not in pkt: if not offsets: offsets = p.getOffsets(pkt) packetlist.append(pkt) seq = unpack_from('!xxxxI', pkt, sum(offsets[0:2]))[0] seqlist.append(seq) lenlist.append(plen - offsets[0]) tslist.append(ts) VTLOG.debug('TCP packet appended. Sequence: %s' % seq) seqlist, packetlist, lenlist, tslist = \ multiSort(seqlist, packetlist, lenlist, tslist) VTLOG.debug('Sequence list sorted') # Locate packet losses fill = [0 for i in range(0, len(seqlist))] for i in range(0, len(seqlist) - 1): if seqlist[i] + lenlist[i] < seqlist[i + 1]: fill[i] = 1 # Assemble the complete stream stream = '' for i in range(0, len(packetlist)): stream += packetlist[i][sum(offsets):] #Mark ENDOFPACKET and save time and length stream += 'ENDOFPACKET' stream += str(int(tslist[i] * 1000000)) stream += 'ENDOFPACKET' stream += str(lenlist[i]) stream += 'ENDOFPACKET' if fill[i]: #Mark PACKETLOSS VTLOG.debug('PACKETLOSS!') stream += 'PACKETLOSS' VTLOG.debug('TCP payloads assembled') # Parse the stream offset = 0 parsing = True while parsing: plen = unpack_from('!xxH', stream, offset)[0] loss = stream[offset + 4:offset + plen].find('PACKETLOSS') if loss == -1: #No loss: look inside then if ptype == unpack_from('!xB', stream, offset + 4)[0] & 0x7F: aux = stream[offset:].split('ENDOFPACKET') self.lengths.append(int(aux[2])) self.times.append(float(aux[1]) / 1000000) seq = unpack_from('!xxH', stream, offset + 4)[0] self.sequences.append(seq + self.__add) self.timestamps.append( unpack_from('!xxxxI', stream, offset + 4)[0]) VTLOG.debug('TCP/RTP packet found. Sequence: %s' % seq) if seq == 65535: self.__add += seq else: #Avoid PACKETLOSS plen = loss + 10 VTLOG.debug('PACKETLOSS!') offset += 4 + plen #Let's find the next RTSPi packet while parsing and not ( (0x24, 0x00) == unpack_from('!BB', stream, offset) \ and ptype == unpack_from('!xB', stream, offset+4)[0] & 0x7F): if stream[offset:offset + 10] == 'PACKETLOSS': #Avoid PACKETLOSS offset += 10 VTLOG.debug('PACKETLOSS!') else: #Find next packet offset += 1 if len(stream) - offset <= 5: #Yep! We're done! parsing = False VTLOG.debug('RTP session parsed')
def read_uint16(self): result = struct.unpack_from('<H', self._data, self._position)[0] self._position += 2 return result
def read_uint64(self): result = struct.unpack_from('<Q', self._data, self._position)[0] self._position += 8 return result
def decode_frame(data_in): """Receives raw socket data and attempts to turn it into a frame. Returns bytes used to make the frame and the frame :param str data_in: The raw data stream :rtype: tuple(bytes consumed, frame) :raises: pika.exceptions.InvalidFrameError """ # Look to see if it's a protocol header frame try: if data_in[0:4] == b'AMQP': major, minor, revision = struct.unpack_from('BBB', data_in, 5) return 8, ProtocolHeader(major, minor, revision) except (IndexError, struct.error): return 0, None # Get the Frame Type, Channel Number and Frame Size try: (frame_type, channel_number, frame_size) = struct.unpack('>BHL', data_in[0:7]) except struct.error: return 0, None # Get the frame data frame_end = spec.FRAME_HEADER_SIZE + frame_size + spec.FRAME_END_SIZE # We don't have all of the frame yet if frame_end > len(data_in): return 0, None # The Frame termination chr is wrong if data_in[frame_end - 1] != spec.FRAME_END: raise exceptions.InvalidFrameError("Invalid FRAME_END marker") # Get the raw frame data frame_data = data_in[spec.FRAME_HEADER_SIZE:frame_end - 1] if frame_type == spec.FRAME_METHOD: # Get the Method ID from the frame data method_id = struct.unpack_from('>I', frame_data)[0] # Get a Method object for this method_id method = spec.methods[method_id]() # Decode the content method.decode(frame_data, 4) # Return the amount of data consumed and the Method object return frame_end, Method(channel_number, method) elif frame_type == spec.FRAME_HEADER: # Return the header class and body size class_id, weight, body_size = struct.unpack_from('>HHQ', frame_data) # Get the Properties type properties = spec.props[class_id]() # Decode the properties out = properties.decode(frame_data[12:]) # Return a Header frame return frame_end, Header(channel_number, body_size, properties) elif frame_type == spec.FRAME_BODY: # Return the amount of data consumed and the Body frame w/ data return frame_end, Body(channel_number, frame_data) elif frame_type == spec.FRAME_HEARTBEAT: # Return the amount of data and a Heartbeat frame return frame_end, Heartbeat() raise exceptions.InvalidFrameError("Unknown frame type: %i" % frame_type)
def read_uint24(self): low, high = struct.unpack_from('<HB', self._data, self._position) self._position += 3 return low + (high << 16)
def main(args=None): parser = argparse.ArgumentParser( description="Parse Luma3DS exception dumps") parser.add_argument("filename") args = parser.parse_args() data = b"" with open(args.filename, "rb") as f: data = f.read() if unpack_from("<2I", data) != (0xdeadc0de, 0xdeadcafe): raise SystemExit("Invalid file format") version, processor, exceptionType, _, nbRegisters, codeDumpSize, stackDumpSize, additionalDataSize = unpack_from( "<8I", data, 8) nbRegisters //= 4 if version < (1 << 16) | 2: raise SystemExit( "Incompatible format version, please use the appropriate parser.") registers = unpack_from("<{0}I".format(nbRegisters), data, 40) codeOffset = 40 + 4 * nbRegisters codeDump = data[codeOffset:codeOffset + codeDumpSize] stackOffset = codeOffset + codeDumpSize stackDump = data[stackOffset:stackOffset + stackDumpSize] addtionalDataOffset = stackOffset + stackDumpSize additionalData = data[addtionalDataOffset:addtionalDataOffset + additionalDataSize] if processor == 9: print("Processor: ARM9") else: print("Processor: ARM11 (core {0})".format(processor >> 16)) typeDetailsStr = "" if exceptionType == 2: if (registers[16] & 0x20) == 0 and codeDumpSize >= 4: instr = unpack_from("<I", codeDump[-4:])[0] if instr == 0xe12fff7e: typeDetailsStr = " (kernel panic)" elif instr == 0xef00003c: typeDetailsStr = " " + (svcBreakReasons[registers[0]] if registers[0] < 3 else "(svcBreak)") elif (registers[16] & 0x20) == 1 and codeDumpSize >= 2: instr = unpack_from("<I", codeDump[-4:])[0] if instr == 0xdf3c: typeDetailsStr = " " + (svcBreakReasons[registers[0]] if registers[0] < 3 else "(svcBreak)") elif processor != 9 and (registers[20] & 0x80000000) != 0: typeDetailsStr = " (VFP exception)" print("Exception type: {0}{1}".format( "unknown" if exceptionType >= len(handledExceptionNames) else handledExceptionNames[exceptionType], typeDetailsStr)) if processor == 11 and exceptionType >= 2: xfsr = registers[18] if exceptionType == 2 else registers[17] print("Fault status: " + faultStatusSources[xfsr & 0xf]) if additionalDataSize != 0: print("Current process: {0} ({1:016x})".format( additionalData[:8].decode("ascii"), unpack_from("<Q", additionalData, 8)[0])) print("\nRegister dump:\n") for i in range(0, nbRegisters - (nbRegisters % 2), 2): if i == 16: print("") print( makeRegisterLine(registerNames[i], registers[i], registerNames[i + 1], registers[i + 1])) if nbRegisters % 2 == 1: print("{0:<15}{1:<20}".format( registerNames[nbRegisters - 1], "{0:08x}".format(registers[nbRegisters - 1]))) if processor == 11 and exceptionType == 3: print("{0:<15}{1:<20}Access type: {2}".format( "FAR", "{0:08x}".format(registers[19]), "Write" if registers[17] & (1 << 11) != 0 else "Read")) thumb = registers[16] & 0x20 != 0 addr = registers[15] - codeDumpSize + (2 if thumb else 4) print("\nCode dump:\n") objdump_res = "" try: path = os.path.join(os.environ["DEVKITARM"], "bin", "arm-none-eabi-objdump") if os.name == "nt" and path[0] == '/': path = ''.join((path[1], ':', path[2:])) objdump_res = subprocess.check_output( (path, "-marm", "-b", "binary", "--adjust-vma=" + hex(addr - codeOffset), "--start-address=" + hex(addr), "--stop-address=" + hex(addr + codeDumpSize), "-D", "-z", "-M", "reg-names-std" + (",force-thumb" if thumb else ""), args.filename)).decode("utf-8") objdump_res = '\n'.join( objdump_res[objdump_res.find('<.data+'):].split('\n')[1:]) except: objdump_res = "" print(objdump_res if objdump_res != "" else hexdump(addr, codeDump)) print("\nStack dump:\n") print(hexdump(registers[13], stackDump))
def read_uint32_at(data, offset): return struct.unpack_from("I", data, offset)[0]
def create_character(self, gltf_node, gltf_skin, gltf_mesh, gltf_data): #print("Creating skinned mesh for", gltf_mesh['name']) skel_name = gltf_node['skeletons'][0] root = gltf_data['nodes'][skel_name] character = Character(gltf_mesh['name']) bundle = character.get_bundle(0) skeleton = PartGroup(bundle, "<skeleton>") jvtmap = {} bind_shape_mat = self.load_matrix(gltf_skin['bindShapeMatrix']) bind_shape_mat.invert_in_place() bind_mats = [] ibmacc = gltf_data['accessors'][gltf_skin['inverseBindMatrices']] ibmbv = gltf_data['bufferViews'][ibmacc['bufferView']] ibmbuff = gltf_data['buffers'][ibmbv['buffer']] ibmdata = base64.b64decode(ibmbuff['uri'].split(',')[1]) for i in range(ibmacc['count']): mat = struct.unpack_from('<{}'.format('f' * 16), ibmdata, i * 16 * 4) #print('loaded', mat) #mat = bind_shape_mat * self.load_matrix(mat) mat = self.load_matrix(mat) mat.invert_in_place() bind_mats.append(mat) def create_joint(parent, node, transform): #print("Creating joint for:", node['name']) inv_transform = LMatrix4(transform) inv_transform.invert_in_place() joint_index = None joint_mat = LMatrix4.ident_mat() if node['jointName'] in gltf_skin['jointNames']: joint_index = gltf_skin['jointNames'].index(node['jointName']) joint_mat = bind_mats[joint_index] # glTF uses an absolute bind pose, Panda wants it local # also take this opportunity to bake in the bind shape matrix bind_pose = joint_mat * bind_shape_mat * inv_transform joint = CharacterJoint(character, bundle, parent, node['name'], bind_pose) # Non-deforming bones are not in the skin's jointNames, don't add them to the jvtmap if joint_index is not None: jvtmap[joint_index] = JointVertexTransform(joint) for child in node.get('children', []): #print("Create joint for child", child) bone_node = gltf_data['nodes'][child] create_joint(joint, bone_node, bind_pose * transform) create_joint(skeleton, root, LMatrix4.ident_mat()) #print("Adding skinned mesh to", gltf_node['name']) self.characters[gltf_node['name']] = character # convert animations #print("Looking for actions for", skel_name) anims = [ anim for anim in gltf_data.get('animations', {}).values() if skel_name in {chan['target']['id'] for chan in anim['channels']} ] if anims: #print("Found anims for", gltf_node['name']) for gltf_anim in anims: #print("\t", gltf_anim['name']) self.create_anim(character, skel_name, gltf_anim, gltf_data) return jvtmap
def _getmp(self): # Extract MP information. This method was inspired by the "highly # experimental" _getexif version that's been in use for years now, # itself based on the ImageFileDirectory class in the TIFF plug-in. # The MP record essentially consists of a TIFF file embedded in a JPEG # application marker. try: data = self.info["mp"] except KeyError: return None file_contents = io.BytesIO(data) head = file_contents.read(8) endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<" # process dictionary try: info = TiffImagePlugin.ImageFileDirectory_v2(head) file_contents.seek(info.next) info.load(file_contents) mp = dict(info) except Exception as e: raise SyntaxError("malformed MP Index (unreadable directory)") from e # it's an error not to have a number of images try: quant = mp[0xB001] except KeyError as e: raise SyntaxError("malformed MP Index (no number of images)") from e # get MP entries mpentries = [] try: rawmpentries = mp[0xB002] for entrynum in range(0, quant): unpackedentry = struct.unpack_from("{}LLLHH".format(endianness), rawmpentries, entrynum * 16) labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2") mpentry = dict(zip(labels, unpackedentry)) mpentryattr = { "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)), "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)), "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)), "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27, "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24, "MPType": mpentry["Attribute"] & 0x00FFFFFF, } if mpentryattr["ImageDataFormat"] == 0: mpentryattr["ImageDataFormat"] = "JPEG" else: raise SyntaxError("unsupported picture format in MPO") mptypemap = { 0x000000: "Undefined", 0x010001: "Large Thumbnail (VGA Equivalent)", 0x010002: "Large Thumbnail (Full HD Equivalent)", 0x020001: "Multi-Frame Image (Panorama)", 0x020002: "Multi-Frame Image: (Disparity)", 0x020003: "Multi-Frame Image: (Multi-Angle)", 0x030000: "Baseline MP Primary Image", } mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown") mpentry["Attribute"] = mpentryattr mpentries.append(mpentry) mp[0xB002] = mpentries except KeyError as e: raise SyntaxError("malformed MP Index (bad MP Entry)") from e # Next we should try and parse the individual image unique ID list; # we don't because I've never seen this actually used in a real MPO # file and so can't test it. return mp
def unpack_multi(self, fmt, advance=True): fmt = fmt.encode('ascii') result = struct.unpack_from(fmt, self.buffer, self.offset) if advance: self.advance(struct.calcsize(fmt)) return result
def load_mesh(self, meshid, gltf_mesh, gltf_data): node = self.meshes.get(meshid, GeomNode(gltf_mesh['name'])) # Clear any existing mesh data node.remove_all_geoms() # Check for skinning data mesh_attribs = gltf_mesh['primitives'][0]['attributes'] is_skinned = 'WEIGHT' in mesh_attribs # Describe the vertex data vert_array = GeomVertexArrayFormat() vert_array.add_column(InternalName.get_vertex(), 3, GeomEnums.NT_float32, GeomEnums.C_point) vert_array.add_column(InternalName.get_normal(), 3, GeomEnums.NT_float32, GeomEnums.C_normal) if is_skinned: # Find all nodes that use this mesh and try to find a skin gltf_nodes = [ gltf_node for gltf_node in gltf_data['nodes'].values() if 'meshes' in gltf_node and meshid in gltf_node['meshes'] ] gltf_node = [ gltf_node for gltf_node in gltf_nodes if 'skin' in gltf_node ][0] gltf_skin = gltf_data['skins'][gltf_node['skin']] jvtmap = self.create_character(gltf_node, gltf_skin, gltf_mesh, gltf_data) tb_va = GeomVertexArrayFormat() tb_va.add_column(InternalName.get_transform_blend(), 1, GeomEnums.NTUint16, GeomEnums.CIndex) tbtable = TransformBlendTable() uv_layers = [ i.replace('TEXCOORD_', '') for i in gltf_mesh['primitives'][0]['attributes'] if i.startswith('TEXCOORD_') ] for uv_layer in uv_layers: vert_array.add_column(InternalName.get_texcoord_name(uv_layer), 2, GeomEnums.NTFloat32, GeomEnums.CTexcoord) #reg_format = GeomVertexFormat.register_format(GeomVertexFormat(vert_array)) vformat = GeomVertexFormat() vformat.add_array(vert_array) if is_skinned: vformat.add_array(tb_va) aspec = GeomVertexAnimationSpec() aspec.set_panda() vformat.set_animation(aspec) reg_format = GeomVertexFormat.register_format(vformat) vdata = GeomVertexData(gltf_mesh['name'], reg_format, GeomEnums.UH_stream) if is_skinned: vdata.set_transform_blend_table(tbtable) # Write the vertex data pacc_name = mesh_attribs['POSITION'] pacc = gltf_data['accessors'][pacc_name] handle = vdata.modify_array(0).modify_handle() handle.unclean_set_num_rows(pacc['count']) buff_view = gltf_data['bufferViews'][pacc['bufferView']] buff = gltf_data['buffers'][buff_view['buffer']] buff_data = base64.b64decode(buff['uri'].split(',')[1]) start = buff_view['byteOffset'] end = buff_view['byteOffset'] + buff_view['byteLength'] handle.copy_data_from(buff_data[start:end]) handle = None #idx = start #while idx < end: # s = struct.unpack_from('<ffffff', buff_data, idx) # idx += 24 # print(s) # Write the transform blend table if is_skinned: tdata = GeomVertexWriter(vdata, InternalName.get_transform_blend()) sacc = gltf_data['accessors'][mesh_attribs['WEIGHT']] sbv = gltf_data['bufferViews'][sacc['bufferView']] sbuff = gltf_data['buffers'][sbv['buffer']] sbuff_data = base64.b64decode(sbuff['uri'].split(',')[1]) for i in range(0, sbv['byteLength'], 32): joints = struct.unpack_from('<BBBB', sbuff_data, i) weights = struct.unpack_from('<ffff', sbuff_data, i + 16) #print(i, joints, weights) tblend = TransformBlend() for j in range(4): joint = joints[j] weight = weights[j] try: jvt = jvtmap[joint] except KeyError: print( "Could not find joint in jvtmap:\n\tjoint={}\n\tjvtmap={}" .format(joint, jvtmap)) continue tblend.add_transform(jvt, weight) tdata.add_data1i(tbtable.add_blend(tblend)) tbtable.set_rows(SparseArray.lower_on(vdata.get_num_rows())) geom_idx = 0 for gltf_primitive in gltf_mesh['primitives']: # Grab the index data prim = GeomTriangles(GeomEnums.UH_stream) iacc_name = gltf_primitive['indices'] iacc = gltf_data['accessors'][iacc_name] num_verts = iacc['count'] if iacc['componentType'] == 5123: prim.set_index_type(GeomEnums.NTUint16) handle = prim.modify_vertices(num_verts).modify_handle() handle.unclean_set_num_rows(num_verts) buff_view = gltf_data['bufferViews'][iacc['bufferView']] buff = gltf_data['buffers'][buff_view['buffer']] buff_data = base64.b64decode(buff['uri'].split(',')[1]) start = buff_view['byteOffset'] end = buff_view['byteOffset'] + buff_view['byteLength'] handle.copy_data_from(buff_data[start:end]) #idx = start #indbuf = [] #while idx < end: # s = struct.unpack_from('<HHH', buff_data, idx) # idx += 6 # print(s) #print(prim.get_max_vertex(), vdata.get_num_rows()) handle = None #ss = StringStream() #vdata.write(ss) #print(ss.getData()) #prim.write(ss, 2) #print(ss.getData()) # Get a material matid = gltf_primitive.get('material', None) if matid is None: print( "Warning: mesh {} has a primitive with no material, using an empty RenderState" .format(meshid)) mat = RenderState.make_empty() elif matid not in self.mat_states: print( "Warning: material with name {} has no associated mat state, using an empty RenderState" .format(matid)) mat = RenderState.make_empty() else: mat = self.mat_states[gltf_primitive['material']] self.mat_mesh_map[gltf_primitive['material']].append( (meshid, geom_idx)) # Now put it together geom = Geom(vdata) geom.add_primitive(prim) node.add_geom(geom, mat) geom_idx += 1 self.meshes[meshid] = node
def __init__(self, header_byte_array, length): (self.Type, self.Length, self.Reserved, self.RegisterBaseAddress, self.ProximityDomain) = struct.unpack_from( DMARTable.RHSAStruct.STRUCT_FORMAT, header_byte_array)
def decode_hybi(buf, base64=False): """Decode HyBi style WebSocket packets.""" f = { "fin": 0, "opcode": 0, "mask": 0, "hlen": 2, "length": 0, "payload": None, "left": 0, "close_code": None, "close_reason": None, } blen = len(buf) f["left"] = blen if blen < f["hlen"]: return f # Incomplete frame header b1, b2 = struct.unpack_from(">BB", buf) f["opcode"] = b1 & 0x0F f["fin"] = (b1 & 0x80) >> 7 has_mask = (b2 & 0x80) >> 7 f["length"] = b2 & 0x7F if f["length"] == 126: f["hlen"] = 4 if blen < f["hlen"]: return f # Incomplete frame header (f["length"],) = struct.unpack_from(">xxH", buf) elif f["length"] == 127: f["hlen"] = 10 if blen < f["hlen"]: return f # Incomplete frame header (f["length"],) = struct.unpack_from(">xxQ", buf) full_len = f["hlen"] + has_mask * 4 + f["length"] if blen < full_len: # Incomplete frame return f # Incomplete frame header # Number of bytes that are part of the next frame(s) f["left"] = blen - full_len # Process 1 frame if has_mask: # unmask payload f["mask"] = buf[f["hlen"] : f["hlen"] + 4] b = c = "" if f["length"] >= 4: data = struct.unpack("<I", buf[f["hlen"] : f["hlen"] + 4])[0] of1 = f["hlen"] + 4 b = "" for i in xrange(0, int(f["length"] / 4)): mask = struct.unpack("<I", buf[of1 + 4 * i : of1 + 4 * (i + 1)])[0] b += struct.pack("I", data ^ mask) if f["length"] % 4: l = f["length"] % 4 of1 = f["hlen"] of2 = full_len - l c = "" for i in range(0, l): mask = struct.unpack("B", buf[of1 + i])[0] data = struct.unpack("B", buf[of2 + i])[0] c += chr(data ^ mask) f["payload"] = b + c else: f["payload"] = buf[(f["hlen"] + has_mask * 4) : full_len] if base64 and f["opcode"] in [1, 2]: f["payload"] = b64decode(f["payload"]) if f["opcode"] == 0x08: if f["length"] >= 2: f["close_code"] = struct.unpack_from(">H", f["payload"]) if f["length"] > 3: f["close_reason"] = f["payload"][2:] return f
def read(idx, server_connection, logger): connection = server_connection.connection data = connection.recv(2048) read_messages = [] logger.log("Dispatcher {0}: buffer={1}".format(idx, server_connection.buffer)) logger.log("Dispatcher {0}: read {1} bytes".format(idx, len(data))) logger.log("Dispatcher {0}: read {1}".format(idx, data)) if not data: raise ValueError('Client disconnected') server_connection.buffer.extend(data) length = 0 opcode = 0 message_data = 0 while server_connection.position < len(server_connection.buffer): left = len(server_connection.buffer) - server_connection.position logger.log("Dispatcher {0}: left={1}".format(idx, left)) #ima header if left >= 5: length = struct.unpack_from('<I', server_connection.buffer, server_connection.position)[0] server_connection.position += ClientMessage.INT_SIZE opcode = struct.unpack_from('<B', server_connection.buffer, server_connection.position)[0] server_connection.position += ClientMessage.BYTE_SIZE #ima i podataka if left - ClientMessage.INT_SIZE + ClientMessage.BYTE_SIZE >= length: start = ClientMessage.INT_SIZE + ClientMessage.BYTE_SIZE messageData = server_connection.buffer[start:length + start] server_connection.position += length logger.log( "Dispatcher {0}: read message, opcode={1}, length={2} data={3}" .format(idx, opcode, length, messageData)) logger.log("Dispatcher {0}: at position: {1}".format( idx, server_connection.position)) read_messages.append({ "opcode": opcode, "data": messageData, "client": server_connection.id }) if len(server_connection.buffer) > length + start: logger.log( "Dispatcher {0}: probably got parts of new message. Put it in buffer" .format(idx)) logger.log("Dispatcher {0}: current buffer={1}".format( idx, server_connection.buffer)) tmp = server_connection.buffer[length + start:len(server_connection. buffer)] server_connection.buffer = bytearray() server_connection.buffer.extend(tmp) server_connection.position = 0 logger.log( "Dispatcher {0}: current buffer length is {1}".format( idx, len(server_connection.buffer))) else: logger.log("Dispatcher {0}: reset buffer".format(idx)) server_connection.position = 0 server_connection.buffer = bytearray() break # TODO: implement raising of message receive event #notifyClientReceivedListeners(opcode, messageData, connection.getClientID()); #nema podataka pa utrpaj ostatak u buffer else: logger.log( "Dispatcher {0}: opcode={1}, length={2}. Whole message not yet received" .format(idx, opcode, length)) logger.log("Dispatcher {0}: at position: {1}".format( idx, server_connection.position)) logger.log( "Dispatcher {0}: message not complete, put received back to buffer" .format(idx)) server_connection.position = 0 break #nema headera, utrpaj ostatak u buffer else: logger.log( "Dispatcher {0}: message not complete, put received back to buffer" .format(idx)) break logger.log("Dispatcher {0}: buffer: {1}".format(idx, server_connection.buffer)) return read_messages
def unpack_unsigned_int(self, length): # unsigned big-endian (MSB first) return struct.unpack_from(b'>Q', b'\0' * (8 - length) + self.extract(length))[0]
def print_exstop(raw_buf): data = struct.unpack_from("<I", raw_buf) flags = data[0] exact_ip = flags & 1 print("IP: %u" % (exact_ip), end=' ')
def __init__(self, header_byte_array): (self.Type, self.Length) = struct.unpack_from( DMARTable.RemappingStructHeader.STRUCT_FORMAT, header_byte_array)
def print_cbr(raw_buf): data = struct.unpack_from("<BBBBII", raw_buf) cbr = data[0] f = (data[4] + 500) / 1000 p = ((cbr * 1000 / data[2]) + 5) / 10 print("%3u freq: %4u MHz (%3u%%)" % (cbr, f, p), end=' ')
def print_psb(raw_buf): data = struct.unpack_from("<IQ", raw_buf) offset = data[1] print("offset: %#x" % (offset), end=' ')
def print_ptwrite(raw_buf): data = struct.unpack_from("<IQ", raw_buf) flags = data[0] payload = data[1] exact_ip = flags & 1 print("IP: %u payload: %#x" % (exact_ip, payload), end=' ')
def print_mwait(raw_buf): data = struct.unpack_from("<IQ", raw_buf) payload = data[1] hints = payload & 0xff extensions = (payload >> 32) & 0x3 print("hints: %#x extensions: %#x" % (hints, extensions), end=' ')