def from_binary(data): nid = NodeId() encoding = ord(data.read(1)) nid.NodeIdType = NodeIdType(encoding & 0b00111111) if nid.NodeIdType == NodeIdType.TwoByte: nid.Identifier = ord(data.read(1)) elif nid.NodeIdType == NodeIdType.FourByte: nid.NamespaceIndex, nid.Identifier = struct.unpack("<BH", data.read(3)) elif nid.NodeIdType == NodeIdType.Numeric: nid.NamespaceIndex, nid.Identifier = struct.unpack("<HI", data.read(6)) elif nid.NodeIdType == NodeIdType.String: nid.NamespaceIndex = uatype_UInt16.unpack(data.read(2))[0] nid.Identifier = unpack_string(data) elif nid.NodeIdType == NodeIdType.ByteString: nid.NamespaceIndex = uatype_UInt16.unpack(data.read(2))[0] nid.Identifier = unpack_bytes(data) elif nid.NodeIdType == NodeIdType.Guid: nid.NamespaceIndex = uatype_UInt16.unpack(data.read(2))[0] nid.Identifier = Guid.from_binary(data) else: raise UaError("Unknown NodeId encoding: " + str(nid.NodeIdType)) if test_bit(encoding, 7): nid.NamespaceUri = unpack_string(data) if test_bit(encoding, 6): nid.ServerIndex = uatype_UInt32.unpack(data.read(4))[0] return nid
def deserialize(self, f): self.nVersion = struct.unpack("<i", f.read(4))[0] self.vin = deser_vector(f, CTxIn) self.vout = deser_vector(f, CTxOut) self.nLockTime = struct.unpack("<I", f.read(4))[0] self.sha256 = None self.hash = None
def _parsekv2(self,s): pos = 0 (strno,descriptorlen,descriptortype,valuelen) = struct.unpack('<2xHHHI', s[pos:pos+12]) pos += 12 descriptorname = s[pos:pos+descriptorlen] pos += descriptorlen descriptorvalue = s[pos:pos+valuelen] pos += valuelen value = None #print "%d %s [%d]" % (strno, descriptorname, valuelen) if descriptortype == 0x0000: # Unicode string value = descriptorvalue elif descriptortype == 0x0001: # Byte Array value = descriptorvalue elif descriptortype == 0x0002: # Bool value = struct.unpack('<H', descriptorvalue)[0] != 0 pass elif descriptortype == 0x0003: # DWORD value = struct.unpack('<I', descriptorvalue)[0] elif descriptortype == 0x0004: # QWORD value = struct.unpack('<Q', descriptorvalue)[0] elif descriptortype == 0x0005: # WORD value = struct.unpack('<H', descriptorvalue)[0] else: _print("Unknown Descriptor Type %d" % descriptortype) return (pos,descriptorname,value,strno)
def _ofs_from_idx(self, idx): ofs = struct.unpack('!I', str(buffer(self.ofstable, idx*4, 4)))[0] if ofs & 0x80000000: idx64 = ofs & 0x7fffffff ofs = struct.unpack('!Q', str(buffer(self.ofs64table, idx64*8, 8)))[0] return ofs
def saveScreenshot(self, filename): """ Takes a screenshot of what's being display on the device. Uses "screencap" on newer (Android 3.0+) devices (and some older ones with the functionality backported). This function also works on B2G. Throws an exception on failure. This will always fail on devices without the screencap utility. """ screencap = '/system/bin/screencap' if not self.fileExists(screencap): raise DMError("Unable to capture screenshot on device: no screencap utility") with open(filename, 'w') as pngfile: # newer versions of screencap can write directly to a png, but some # older versions can't tempScreenshotFile = self.getDeviceRoot() + "/ss-dm.tmp" self.shellCheckOutput(["sh", "-c", "%s > %s" % (screencap, tempScreenshotFile)], root=True) buf = self.pullFile(tempScreenshotFile) width = int(struct.unpack("I", buf[0:4])[0]) height = int(struct.unpack("I", buf[4:8])[0]) with open(filename, 'w') as pngfile: pngfile.write(self._writePNG(buf[12:], width, height)) self.removeFile(tempScreenshotFile)
def read_item(self): ftype = ord(self.input.read(1)) if ftype == 83: # 'S' val = self.read_longstr() elif ftype == 73: # 'I' val = unpack('>i', self.input.read(4))[0] elif ftype == 68: # 'D' d = self.read_octet() n = unpack('>i', self.input.read(4))[0] val = Decimal(n) / Decimal(10 ** d) elif ftype == 84: # 'T' val = self.read_timestamp() elif ftype == 70: # 'F' val = self.read_table() # recurse elif ftype == 65: # 'A' val = self.read_array() elif ftype == 116: val = self.read_bit() elif ftype == 100: val = self.read_float() else: raise FrameSyntaxError( 'Unknown value in table: {0!r} ({1!r})'.format( ftype, type(ftype))) return val
def _recv_dict(self, pipe): """Receive a serialized dict on a pipe Returns the dictionary. """ # Wire format: # // Pipe sees (all numbers are longs, except for the first): # // - num bytes in a long (sent as a single unsigned char!) # // - num elements of the MIME dictionary; Jupyter selects one to display. # // For each MIME dictionary element: # // - length of MIME type key # // - MIME type key # // - size of MIME data buffer (including the terminating 0 for # // 0-terminated strings) # // - MIME data buffer data = {} b1 = os.read(pipe, 1) sizeof_long = struct.unpack('B', b1)[0] if sizeof_long == 8: fmt = 'Q' else: fmt = 'L' buf = os.read(pipe, sizeof_long) num_elements = struct.unpack(fmt, buf)[0] for i in range(num_elements): buf = os.read(pipe, sizeof_long) len_key = struct.unpack(fmt, buf)[0] key = os.read(pipe, len_key).decode('utf8') buf = os.read(pipe, sizeof_long) len_value = struct.unpack(fmt, buf)[0] value = os.read(pipe, len_value).decode('utf8') data[key] = value return data
def patch(data): pos = data.find('\x7fELF', 1) if pos != -1 and data.find('\x7fELF', pos+1) == -1: data2 = data[pos:] try: (id, a, b, c, d, e, f, offset, g, h, i, j, entrySize, count, index) = unpack('QQHHIIIIIHHHHHH', data2[:52]) if id == 0x64010101464c457f and offset != 0: (a, b, c, d, nameTableOffset, size, e, f, g, h) = unpack('IIIIIIIIII', data2[offset+index * entrySize : offset+(index+1) * entrySize]) header = data2[offset : offset+count * entrySize] firstText = True for i in xrange(count): entry = header[i * entrySize : (i+1) * entrySize] (nameIndex, a, b, c, offset, size, d, e, f, g) = unpack('IIIIIIIIII', entry) nameOffset = nameTableOffset + nameIndex name = data2[nameOffset : data2.find('\x00', nameOffset)] if name == '.text': if firstText: firstText = False else: data2 = data2[offset : offset + size] patched = '' for i in xrange(len(data2) / 8): instruction, = unpack('Q', data2[i * 8 : i * 8 + 8]) if (instruction&0x9003f00002001000) == 0x0001a00000000000: instruction ^= (0x0001a00000000000 ^ 0x0000c00000000000) patched += pack('Q', instruction) return ''.join([data[:pos+offset], patched, data[pos + offset + size:]]) except error: pass return data
def do_checksum_buffer(self, buf, checksum): self._discFrameCounter += 1 # on first track ... if self._trackNumber == 1: # ... skip first 4 CD frames if self._discFrameCounter <= 4: gst.debug('skipping frame %d' % self._discFrameCounter) return checksum # ... on 5th frame, only use last value elif self._discFrameCounter == 5: values = struct.unpack("<I", buf[-4:]) checksum += common.SAMPLES_PER_FRAME * 5 * values[0] checksum &= 0xFFFFFFFF return checksum # on last track, skip last 5 CD frames if self._trackNumber == self._trackCount: discFrameLength = self._sampleLength / common.SAMPLES_PER_FRAME if self._discFrameCounter > discFrameLength - 5: self.debug('skipping frame %d', self._discFrameCounter) return checksum values = struct.unpack("<%dI" % (len(buf) / 4), buf) for i, value in enumerate(values): # self._bytes is updated after do_checksum_buffer checksum += (self._bytes / 4 + i + 1) * value checksum &= 0xFFFFFFFF # offset = self._bytes / 4 + i + 1 # if offset % common.SAMPLES_PER_FRAME == 0: # print 'frame %d, ends before %d, last value %08x, CRC %08x' % ( # offset / common.SAMPLES_PER_FRAME, offset, value, sum) return checksum
def _read_gzip_header(self): magic = self.fileobj.read(2) if magic != '\037\213': raise IOError, 'Not a gzipped file' method, flag, self.mtime = struct.unpack("<BBIxx", self._read_exact(8)) if method != 8: raise IOError, 'Unknown compression method' if flag & FEXTRA: # Read & discard the extra field, if present self._read_exact(struct.unpack("<H", self._read_exact(2))) if flag & FNAME: # Read and discard a null-terminated string containing the filename while True: s = self.fileobj.read(1) if not s or s=='\000': break if flag & FCOMMENT: # Read and discard a null-terminated string containing a comment while True: s = self.fileobj.read(1) if not s or s=='\000': break if flag & FHCRC: self._read_exact(2) # Read & discard the 16-bit header CRC
def _decode_one(buf): self = navio_imu_t() self.timestamp = struct.unpack(">q", buf.read(8))[0] self.imu_pos = struct.unpack('>3q', buf.read(24)) self.imu_vel = struct.unpack('>3q', buf.read(24)) self.imu_acc = struct.unpack('>3q', buf.read(24)) return self
def receive_one_ping(my_socket, ID, timeout): """ receive the ping from the socket. """ timeLeft = timeout while True: startedSelect = time.time() whatReady = select.select([my_socket], [], [], timeLeft) howLongInSelect = (time.time() - startedSelect) if whatReady[0] == []: # Timeout return timeReceived = time.time() recPacket, addr = my_socket.recvfrom(1024) icmpHeader = recPacket[20:28] type, code, checksum, packetID, sequence = struct.unpack( "bbHHh", icmpHeader ) if packetID == ID: bytesInDouble = struct.calcsize("d") timeSent = struct.unpack("d", recPacket[28:28 + bytesInDouble])[0] return timeReceived - timeSent timeLeft = timeLeft - howLongInSelect if timeLeft <= 0: return
def _version_response(self, endpoint, data): fw_names = { 0: "normal_fw", 1: "recovery_fw" } resp = {} for i in xrange(2): fwver_size = 47 offset = i*fwver_size+1 fw = {} fw["timestamp"],fw["version"],fw["commit"],fw["is_recovery"], \ fw["hardware_platform"],fw["metadata_ver"] = \ unpack("!i32s8s?bb", data[offset:offset+fwver_size]) fw["version"] = fw["version"].replace("\x00", "") fw["commit"] = fw["commit"].replace("\x00", "") fw_name = fw_names[i] resp[fw_name] = fw resp["bootloader_timestamp"],resp["hw_version"],resp["serial"] = \ unpack("!L9s12s", data[95:120]) resp["hw_version"] = resp["hw_version"].replace("\x00","") btmac_hex = binascii.hexlify(data[120:126]) resp["btmac"] = ":".join([btmac_hex[i:i+2].upper() for i in reversed(xrange(0, 12, 2))]) return resp
def sendCMDreceiveATT(self, data_length, code, data): checksum = 0 total_data = ['$', 'M', '<', data_length, code] + data for i in struct.pack('<2B%dh' % len(data), *total_data[3:len(total_data)]): checksum = checksum ^ ord(i) total_data.append(checksum) try: start = time.time() b = None b = self.ser.write(struct.pack('<3c2B%dhB' % len(data), *total_data)) while True: header = self.ser.read() if header == '$': header = header+self.ser.read(2) break datalength = struct.unpack('<b', self.ser.read())[0] code = struct.unpack('<b', self.ser.read()) data = self.ser.read(datalength) temp = struct.unpack('<'+'h'*(datalength/2),data) self.ser.flushInput() self.ser.flushOutput() elapsed = time.time() - start self.attitude['angx']=float(temp[0]/10.0) self.attitude['angy']=float(temp[1]/10.0) self.attitude['heading']=float(temp[2]) self.attitude['elapsed']=round(elapsed,3) self.attitude['timestamp']="%0.2f" % (time.time(),) return self.attitude except Exception, error: #print "\n\nError in sendCMDreceiveATT." #print "("+str(error)+")\n\n" pass
def getRSSI(self): """Detects whether the device is near by or not""" addr = self.address # Open hci socket hci_sock = bt.hci_open_dev() hci_fd = hci_sock.fileno() # Connect to device (to whatever you like) bt_sock = bluetooth.BluetoothSocket(bluetooth.L2CAP) bt_sock.settimeout(10) result = bt_sock.connect_ex((addr, 1)) # PSM 1 - Service Discovery try: # Get ConnInfo reqstr = struct.pack("6sB17s", bt.str2ba(addr), bt.ACL_LINK, "\0" * 17) request = array.array("c", reqstr ) handle = fcntl.ioctl(hci_fd, bt.HCIGETCONNINFO, request, 1) handle = struct.unpack("8xH14x", request.tostring())[0] # Get RSSI cmd_pkt=struct.pack('H', handle) rssi = bt.hci_send_req(hci_sock, bt.OGF_STATUS_PARAM, bt.OCF_READ_RSSI, bt.EVT_CMD_COMPLETE, 4, cmd_pkt) rssi = struct.unpack('b', rssi[3])[0] # Close sockets bt_sock.close() hci_sock.close() return rssi except Exception, e: return None
def handle_TRI_VERTEXL(self,lengthRemaining): count = struct.unpack("<H", self.file.read(2))[0] lengthRemaining -= 2 for i in range(count): self.object_vertices.append(V3(struct.unpack("<fff", self.file.read(3 * 4)))) lengthRemaining -= 3 * 4 self.skip(lengthRemaining)
def recieve_data(self): buffer = '' # recieve junk prev_byte = '\x00' while 1: cur_byte = self.ser.read(1) if prev_byte + cur_byte == HEADER: # header found, stop break prev_byte = cur_byte length = struct.unpack('<H', self.get_n_bytes(2)) [0] packet = self.get_n_bytes(length, True) reserved, command = struct.unpack('<HH', packet[:4]) data = packet[4:-1] checksum = ord(packet[-1]) #~ print self.tohex(packet[:-1]) packet_int = map(ord, packet[:-1]) checksum_calc = reduce( lambda x,y: x^y, packet_int ) if data[0] == '\x00': if checksum != checksum_calc: raise Exception, "bad checksum" return command, data
def __init__(self, path): with open(path, "rb") as f: # Read Device Information struct (defined in FlashOS.H, declared in FlashDev.c). self.version = unpack("H", f.read(2))[0] self.devName = f.read(128).split(b'\0',1)[0] self.devType = unpack("H", f.read(2))[0] self.devAddr = unpack("L", f.read(4))[0] self.szDev = unpack("L", f.read(4))[0] self.szPage = unpack("L", f.read(4))[0] skipped = f.read(4) self.valEmpty = unpack("B", f.read(1))[0] skipped = f.read(3) self.toProg = unpack("L", f.read(4))[0] self.toErase = unpack("L", f.read(4))[0] self.sectSize = [] self.sectAddr = [] while 1: addr = unpack("L", f.read(4))[0] size = unpack("L", f.read(4))[0] if addr == 0xffffffff: break elif size == 0xffffffff: break else: self.sectSize.append(size) self.sectAddr.append(addr)
def __init__(self, filename, myopen=open, swapYZ=False): super(MeshSTLBinary,self).__init__() with myopen(filename, "rb") as f: header = f.read(80) assert not header.startswith("solid") assert len(header) == 80 vertexCount = 0 numTriangles = struct.unpack("<I", f.read(4))[0] for i in range(numTriangles): assert len(f.read(12))==12 # skip normal for i in range(3): v = struct.unpack("<3f", f.read(12)) if swapYZ: v = (v[0],v[2],-v[1]) self.vertices.append(V3(v)) self.faces.append((0,(vertexCount,vertexCount+1,vertexCount+2))) vertexCount += 3 assert len(f.read(2))==2 # skip attributes assert self.vertices assert self.faces
def __init__(self, raw_packet=None): if raw_packet is None: self.ethernet = Ethernet() self.hardware_type = None self.protocol_type = None self.hardware_size = None self.protocol_size = None self.operation = None self.sender_hardware_address = None self.sender_protocol_address = None self.target_hardware_address = None self.target_protocol_address = None else: self.ethernet = Ethernet(raw_packet) raw_arp = self.ethernet.data self.hardware_type = struct.unpack('!H', raw_arp[:2])[0] self.protocol_type = struct.unpack('!H', raw_arp[2:4])[0] self.hardware_size = struct.unpack('!B', raw_arp[4])[0] self.protocol_size = struct.unpack('!B', raw_arp[5])[0] self.operation = struct.unpack('!H', raw_arp[6:8])[0] self.sender_hardware_address = raw_arp[8:14] self.sender_protocol_address = raw_arp[14:18] self.target_hardware_address = raw_arp[18:24] self.target_protocol_address = raw_arp[24:28]
def dump_services(dev): services = sorted(dev.getServices(), key=lambda s: s.hndStart) for s in services: print ("\t%04x: %s" % (s.hndStart, s)) if s.hndStart == s.hndEnd: continue chars = s.getCharacteristics() for i, c in enumerate(chars): props = c.propertiesToString() h = c.getHandle() if 'READ' in props: val = c.read() if c.uuid == btle.AssignedNumbers.device_name: string = '\'' + val.decode('utf-8') + '\'' elif c.uuid == btle.AssignedNumbers.device_information: string = repr(val) elif 'NOTIFY' in props: sensordata=bytearray(val) temp,=struct.unpack('f',sensordata[:4]) humidity,=struct.unpack('f',sensordata[4:]) print('temp: ',temp,'humidity: ',humidity) else: string = '<s' + binascii.b2a_hex(val).decode('utf-8') + '>' else: string='' print ("\t%04x: %-59s %-12s %s" % (h, c, props, string))
def ExtPak(fs,dirname): fs.seek(0) magic, desc, minor_ver, major_ver, index_len, flags, index_entries,\ data_offset, index_offset=unpack('16s32sHHIIIII',fs.read(72)) if(magic.rstrip(b'\0')!=b'DataPack5'): print("Format Error!") return 0 fs.seek(index_offset) idx=bytearray(fs.read(index_len)) if(flags&1): Decrypt(idx) idx=LzssUnc((bytes)(idx),index_entries*0x68) #int3() idxstm=io.BytesIO(idx) for i in range(index_entries): fname,offset,length=unpack('64sII32x',idxstm.read(0x68)) fname=fname.rstrip(b'\0').decode('932') if(fname==''): xxx=0 newf=open(os.path.join(dirname,fname),'wb') fs.seek(offset+data_offset) buff=fs.read(length) newf.write(buff) newf.close()
def getInfo(dbfile): header_part = HEADER_SIZE/4 ftree = open(dbfile, "r") ftree.seek(header_part) buf = ftree.read(header_part) key_len = struct.unpack("!L", buf.decode('hex'))[0] buf = ftree.read(header_part) max_user_id = struct.unpack("!L", buf.decode('hex'))[0] buf = ftree.read(header_part) next_user_id = struct.unpack("!L", buf.decode('hex'))[0] info = DBInfo() info.key_len = key_len info.max_user_id = max_user_id info.min_user_id = max_user_id>>1 if(next_user_id == info.min_user_id): info.current_user_id = 0 # no new user exists else: info.current_user_id = next_user_id-1 ftree.close() return info
def read_xbs_chunk(stream): """Read one chunk from the underlying xbstream file object :param stream: a file-like object :returns: XBSChunk instance """ header = stream.read(XBS_HEADER_SIZE) if not header: # end of stream return None magic, flags, _type, pathlen = struct.unpack(b'<8sBcI', header) if magic != XBS_MAGIC: raise common.UnpackError("Incorrect magic '%s' in chunk" % magic) path = stream.read(pathlen) if _type == b'E': return XBSChunk(flags, _type, path, b'', None) elif _type != b'P': raise common.UnpackError("Unknown chunk type '%r'" % _type) payload_length, payload_offset = struct.unpack(b'<QQ', stream.read(16)) checksum, = struct.unpack(b'<I', stream.read(4)) payload = stream.read(payload_length) computed_checksum = zlib.crc32(payload) & 0xffffffff if checksum != computed_checksum: raise common.UnpackError("Invalid checksum(offset=%d path=%s)" % (payload_offset, path)) return XBSChunk(flags, _type, path, payload, payload_offset)
def getUserConfig(dbfile, user_id): header_part = HEADER_SIZE/4 ftree = open(dbfile, "r") ftree.seek(header_part) buf = ftree.read(header_part) key_len = struct.unpack("!L", buf.decode('hex'))[0] buf = ftree.read(header_part) max_user_id = struct.unpack("!L", buf.decode('hex'))[0] if( (user_id < max_user_id/2) or (user_id > max_user_id -1)): print "user id ", user_id, " not found" return print "#user key configuration" print "user_id=", user_id print "key_len=", key_len while(user_id != 0): ftree.seek(HEADER_SIZE + (user_id * key_len)) key = ftree.read(key_len) print user_id, "=", key user_id = user_id >> 1 ftree.close()
def read_binary_int(self, fname,fp,length): ''' read a binary int value ''' if length > 3: raise CFFormatError('Integer greater than 8 bytes: %s' % length) nbytes = 1 << length val = None buff = fp.read(nbytes) if length == 0: val = unpack('>B', buff) val = val[0] elif length == 1: val = unpack('>H', buff) val = val[0] elif length == 2: val = unpack('>L', buff) val = val[0] elif length == 3: (hiword,loword) = unpack('>LL', buff) if not (hiword & 0x80000000) == 0: # 8 byte integers are always signed, and are negative when bit # 63 is set. Decoding into either a Fixnum or Bignum is tricky, # however, because the size of a Fixnum varies among systems, # and Ruby doesn't consider the number to be negative, and # won't sign extend. val = -(2**63 - ((hiword & 0x7fffffff) << 32 | loword)) else: val = hiword << 32 | loword return CFInteger(val)
def read(self, fp, length, endian, param): """ read data from fp :param fp: file pointer :param length: length to be readed :param endian: endian type in datafile :type param: list :param param: sampling rate,sample size, block time, channels :rtype: list of list :return: list of data """ buff = fp.read(length) samplrate = param[0] numbyte = param[1] numchan = param[3].count(1) num = (samplrate/10)*numbyte*numchan data = [[] for _ in range(numchan)] if (length != num): raise EVTBadDataError("Bad data lenght") for j in range(samplrate/10): for k in range(numchan): i = (j*numchan)+k if numbyte == 2: val = unpack(">i", buff[i*2:(i*2)+2] + '\0\0')[0] >> 8 elif numbyte == 3: val = unpack(">i", buff[i*3:(i*3)+3] + '\0')[0] >> 8 elif numbyte == 4: val = unpack(">i", buff[i*4:(i*4)+4])[0] else: raise EVTBadDataError("Bad data format") data[k].append(val) return data
def gen_subkeys(K,cipher): """Generate subkeys of cipher""" from struct import pack, unpack L = cipher.encrypt("00000000000000000000000000000000".decode("hex")) LHigh = unpack(">Q",L[:8])[0] LLow = unpack(">Q",L[8:])[0] K1High = ((LHigh << 1) | ( LLow >> 63 )) & 0xFFFFFFFFFFFFFFFF K1Low = (LLow << 1) & 0xFFFFFFFFFFFFFFFF if (LHigh >> 63): K1Low ^= 0x87 K2High = ((K1High << 1) | (K1Low >> 63)) & 0xFFFFFFFFFFFFFFFF K2Low = ((K1Low << 1)) & 0xFFFFFFFFFFFFFFFF if (K1High >> 63): K2Low ^= 0x87 K1 = pack(">QQ", K1High, K1Low) K2 = pack(">QQ", K2High, K2Low) return K1, K2
def init_cam(): global PASSWORD try: bcv = httpopen('http://%s/bacpac/cv' % CAM_IP).read() print('bacpac CV:', repr(bcv)) bacpac_version = struct.unpack('BBB', bcv[9:12]) bacpac_version = '.'.join([str(x) for x in bacpac_version]) print(' bacpac version:', bacpac_version) bacpac_mac = bcv[12:18] bacpac_mac = ':'.join(['%02x' % x for x in bacpac_mac]) print('bacpac mac:', bacpac_mac) bacpac_name = bcv[19:].decode('utf-8') print('bacpac name:', bacpac_name) bsd = httpopen('http://%s/bacpac/sd' % CAM_IP).read() print('bacpac SD:', repr(bsd)) PASSWORD = bsd[2:].decode('utf-8') print('bacpac password', PASSWORD) ccv = httpopen('http://%s/camera/cv' % CAM_IP).read() print('camera CV:', repr(ccv)) # b'\x00\x00\x01\x13HD2.08.12.198.47.00\x05HERO2' dlen = struct.unpack('B', ccv[3:4])[0] camera_version = ccv[4:4+dlen].decode('UTF-8') print('camera version', camera_version) ipos = 4+dlen dlen = struct.unpack('B', ccv[ipos:ipos+1])[0] ipos += 1 camera_model = ccv[ipos:ipos+dlen].decode('UTF-8') print('camera_model', camera_model) #FIXME this is the CN parameter return True except (urllib.error.HTTPError, urllib.error.URLError, socket.error): print('Error communicating with bacpac/camera') return False
def decode_pair(s, pos=0): """ Decodes a name/value pair. The number of bytes decoded as well as the name/value pair are returned. """ nameLength = ord(s[pos]) if nameLength & 128: nameLength = struct.unpack('!L', s[pos:pos+4])[0] & 0x7fffffff pos += 4 else: pos += 1 valueLength = ord(s[pos]) if valueLength & 128: valueLength = struct.unpack('!L', s[pos:pos+4])[0] & 0x7fffffff pos += 4 else: pos += 1 name = s[pos:pos+nameLength] pos += nameLength value = s[pos:pos+valueLength] pos += valueLength return (pos, (name, value))
def __getLong(self, begin, devider=10): return float(struct.unpack('!I', self.rawmsg[begin:begin+4])[0])/devider
def unpack_int32(n): try: return struct.unpack('B',n[0])[0] + (struct.unpack('B', n[1])[0] << 8) +\ (struct.unpack('B',n[2])[0] << 16) + (struct.unpack('B', n[3])[0] << 24) except TypeError: return n[0] + (n[1] << 8) + (n[2] << 16) + (n[3] << 24)
def unpack_uint16(n): return struct.unpack('<H', n[0:2])[0]
def check_ip(ip, timeout, verbose, uninstall): global negotiate_protocol_request, session_setup_request, tree_connect_request, trans2_session_setup # Connect to socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(float(timeout) if timeout else None) host = ip port = 445 s.connect((host, port)) # Send/receive negotiate protocol request if verbose: print_status(ip, "Sending negotiation protocol request") s.send(negotiate_protocol_request) s.recv(1024) # Send/receive session setup request if verbose: print_status(ip, "Sending session setup request") s.send(session_setup_request) session_setup_response = s.recv(1024) # Extract user ID from session setup response user_id = session_setup_response[32:34] if verbose: print_status(ip, "User ID = %s" % struct.unpack("<H", user_id)[0]) # Replace user ID in tree connect request packet modified_tree_connect_request = list(tree_connect_request) modified_tree_connect_request[32] = user_id[0] modified_tree_connect_request[33] = user_id[1] modified_tree_connect_request = "".join(modified_tree_connect_request) # Send tree connect request if verbose: print_status(ip, "Sending tree connect") s.send(modified_tree_connect_request) tree_connect_response = s.recv(1024) # Extract tree ID from response tree_id = tree_connect_response[28:30] if verbose: print_status(ip, "Tree ID = %s" % struct.unpack("<H", tree_id)[0]) # Replace tree ID and user ID in trans2 session setup packet modified_trans2_session_setup = list(trans2_session_setup) modified_trans2_session_setup[28] = tree_id[0] modified_trans2_session_setup[29] = tree_id[1] modified_trans2_session_setup[32] = user_id[0] modified_trans2_session_setup[33] = user_id[1] modified_trans2_session_setup = "".join(modified_trans2_session_setup) # Send trans2 sessions setup request if verbose: print_status(ip, "Sending trans2 session setup - ping command") s.send(modified_trans2_session_setup) final_response = s.recv(1024) # Check for 0x51 response to indicate DOUBLEPULSAR infection if final_response[34] == "\x51": signature = final_response[18:26] signature_long = struct.unpack('<Q', signature)[0] key = calculate_doublepulsar_xor_key(signature_long) arch = calculate_doublepulsar_arch(signature_long) with print_lock: print "[+] [%s] DOUBLEPULSAR SMB IMPLANT DETECTED!!! Arch: %s, XOR Key: %s" % ( ip, arch, hex(key)) if uninstall: # Update MID and op code via timeout modified_trans2_session_setup = list(modified_trans2_session_setup) modified_trans2_session_setup[34] = "\x42" modified_trans2_session_setup[49] = "\x0e" modified_trans2_session_setup[50] = "\x69" modified_trans2_session_setup[51] = "\x00" modified_trans2_session_setup[52] = "\x00" modified_trans2_session_setup = "".join( modified_trans2_session_setup) if verbose: print_status( ip, "Sending trans2 session setup - uninstall/burn command") s.send(modified_trans2_session_setup) uninstall_response = s.recv(1024) if uninstall_response[34] == "\x52": with print_lock: print "[+] [%s] DOUBLEPULSAR uninstall successful" % ip else: with print_lock: print "[-] [%s] No presence of DOUBLEPULSAR SMB implant" % ip s.close()
def main(argv): inputfile = '' outputfile = '' try: opts, args = getopt.getopt(argv,"hi:o:b:",["ifile=","ofile=","blocknum="]) except getopt.GetoptError: print('checksumsimos18.py -i <inputfile> -o <outputfile>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('checksumsimos18.py -i <inputfile> -o <outputfile>') sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg elif opt in ("-o", "--ofile"): outputfile = arg elif opt in ("-b", "--blocknum"): blocknum = int(arg) print("Checksumming " + inputfile + " to " + outputfile) f = open(inputfile, "rb") data_binary = f.read() checksum_location = checksum_block_location[blocknum] current_checksum = struct.unpack("<I", data_binary[checksum_location+4:checksum_location+8])[0] checksum_area_count = data_binary[checksum_location+8] base_address = base_addresses[blocknum] addresses = [] for i in range(0, checksum_area_count * 2): address = struct.unpack('<I', data_binary[checksum_location+12+(i*4):checksum_location+16+(i*4)]) offset = address[0] - base_address addresses.append(offset) checksum_data = bytearray() for i in range (0, len(addresses), 2): start_address = int(addresses[i]) end_address = int(addresses[i+1]) print("Adding " + hex(start_address) + ":" + hex(end_address)) checksum_data += data_binary[start_address:end_address+1] def crc32(data): poly = 0x4c11db7 crc = 0x00000000 for byte in data: for bit in range(7,-1,-1): # MSB to LSB z32 = crc>>31 # top bit crc = crc << 1 if ((byte>>bit)&1) ^ z32: crc = crc ^ poly crc = crc & 0xffffffff return crc checksum = crc32(checksum_data) print("Checksum = " + hex(checksum)) if(checksum == current_checksum): print("File is valid!") else: print("File is invalid! File checksum: " + hex(current_checksum) + " does not match " + hex(checksum)) if(len(outputfile) > 0): with open(outputfile, 'wb') as fullDataFile: data_binary = bytearray(data_binary) data_binary[checksum_location+4:checksum_location+8] = struct.pack('<I', checksum) fullDataFile.write(data_binary) print("Fixed checksums and wrote to : " + outputfile) else: exit(1)
def record_audio(block_size, devices, use_yeelight_bulbs=False, fs=8000): # initialize the yeelight devices: bulbs = [] if use_yeelight_bulbs: for d in devices: bulbs.append(Bulb(d)) try: bulbs[-1].turn_on() except: bulbs = [] # initialize recording process mid_buf_size = int(fs * block_size) pa = pyaudio.PyAudio() stream = pa.open(format=FORMAT, channels=1, rate=fs, input=True, frames_per_buffer=mid_buf_size) mid_buf = [] count = 0 global all_data global outstr all_data = [] outstr = datetime.datetime.now().strftime("%Y_%m_%d_%I:%M%p") # load segment model [classifier, mu, std, class_names, mt_win, mt_step, st_win, st_step, _] = aT.load_model("model") [clf_energy, mu_energy, std_energy, class_names_energy, mt_win_en, mt_step_en, st_win_en, st_step_en, _] = \ aT.load_model("energy") [clf_valence, mu_valence, std_valence, class_names_valence, mt_win_va, mt_step_va, st_win_va, st_step_va, _] = \ aT.load_model("valence") while 1: block = stream.read(mid_buf_size) count_b = len(block) / 2 format = "%dh" % (count_b) shorts = struct.unpack(format, block) cur_win = list(shorts) mid_buf = mid_buf + cur_win del cur_win if len(mid_buf) >= 5 * fs: # data-driven time x = numpy.int16(mid_buf) seg_len = len(x) # extract features # We are using the signal length as mid term window and step, # in order to guarantee a mid-term feature sequence of len 1 [mt_f, _, _] = mF(x, fs, seg_len, seg_len, round(fs * st_win), round(fs * st_step)) fv = (mt_f[:, 0] - mu) / std # classify vector: [res, prob] = aT.classifier_wrapper(classifier, "svm_rbf", fv) win_class = class_names[int(res)] if win_class == "silence": soft_valence = 0 soft_energy = 0 print("Silence") else: # extract features for music mood [f_2, _, _] = mF(x, fs, round(fs * mt_win_en), round(fs * mt_step_en), round(fs * st_win_en), round(fs * st_step_en)) [f_3, _, _] = mF(x, fs, round(fs * mt_win_va), round(fs * mt_step_va), round(fs * st_win_va), round(fs * st_step_va)) # normalize feature vector fv_2 = (f_2[:, 0] - mu_energy) / std_energy fv_3 = (f_3[:, 0] - mu_valence) / std_valence [res_energy, p_en] = aT.classifier_wrapper(clf_energy, "svm_rbf", fv_2) win_class_energy = class_names_energy[int(res_energy)] [res_valence, p_val] = aT.classifier_wrapper(clf_valence, "svm_rbf", fv_3) win_class_valence = class_names_valence[int(res_valence)] soft_energy = p_en[class_names_energy.index("high")] - \ p_en[class_names_energy.index("low")] soft_valence = p_val[class_names_valence.index("positive")] - \ p_val[class_names_valence.index("negative")] print(win_class, win_class_energy, win_class_valence, soft_valence, soft_energy) all_data += mid_buf mid_buf = [] h, w, _ = img.shape y_center, x_center = int(h / 2), int(w / 2) x = x_center + int((w / 2) * soft_valence) y = y_center - int((h / 2) * soft_energy) radius = 20 emo_map_img_2 = emo_map_img.copy() color = numpy.median(emo_map[y - 2:y + 2, x - 2:x + 2], axis=0).mean(axis=0) emo_map_img_2 = cv2.circle( emo_map_img_2, (x, y), radius, (int(color[0]), int(color[1]), int(color[2])), -1) emo_map_img_2 = cv2.circle(emo_map_img_2, (x, y), radius, (255, 255, 255), 2) cv2.imshow('Emotion Color Map', emo_map_img_2) # set yeelight bulb colors if use_yeelight_bulbs: for b in bulbs: if b: # attention: color is in bgr so we need to invert: b.set_rgb(int(color[2]), int(color[1]), int(color[0])) cv2.waitKey(10) count += 1
def read_list_of_floats_from_file(file_path): with open(file_path, 'rb') as f: s = struct.unpack('d' * BOTTLENECK_TENSOR_SIZE, f.read()) return list(s)
except socket.error: print( "Error: failed to open UDP / datagram sicjets, Terminating programme." ) running = True while running: print("Server Running... Waiting for request packet") read_sockets, _, _ = select.select([sock_eng, sock_mao, sock_ger], [], [], None) for s in read_sockets: data, addr = s.recvfrom(1024) # buffer #if s == sock_eng print("received message:", addr) MagicNo, PacketType, RequestType = struct.unpack('>hhh', data) running = False # Performs the necessary checks to see whether the packet is a valid DT-Request packet if MagicNo == 0x497E and PacketType == 0x0001 and RequestType == 0x0001 or RequestType == 0x0002: pass else: print("Error: request packet is not valid, terminating programme.") break port_number = s.getsockname()[1] # the port number requested if port_number == first_port: # Matching to appripriate language code languageCode = 0x0001 if port_number == second_port: languageCode = 0x0002 if port_number == third_port:
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with creddump. If not, see <http://www.gnu.org/licenses/>. """ @author: Brendan Dolan-Gavitt @license: GNU General Public License 2.0 or later @contact: [email protected] """ from struct import unpack from owade.fileAnalyze.creddump.newobj import Obj, Pointer ROOT_INDEX = 0x20 LH_SIG = unpack("<H", "lh")[0] LF_SIG = unpack("<H", "lf")[0] RI_SIG = unpack("<H", "ri")[0] def get_root(address_space): return Obj("_CM_KEY_NODE", ROOT_INDEX, address_space) def open_key(root, key): if key == []: return root keyname = key.pop(0) for s in subkeys(root): if s.Name.upper() == keyname.upper():
def __getShort(self, begin, devider=10): num = struct.unpack('!H', self.rawmsg[begin:begin+2])[0] if num == 65535: return -1 else: return float(num)/devider
def bufreverse(in_buf): out_words = [] for i in range(0, len(in_buf), 4): word = struct.unpack('@I', in_buf[i:i+4])[0] out_words.append(struct.pack('@I', bytereverse(word))) return ''.join(out_words)
def run(self): for idx, i in enumerate(self.instructions): op_tuple = fdb.tuple.unpack(i.value) op = op_tuple[0] # print("Stack is %r" % self.stack) # if op != "PUSH" and op != "SWAP": # print("%d. Instruction is %s" % (idx, op)) isDatabase = op.endswith(six.u('_DATABASE')) isSnapshot = op.endswith(six.u('_SNAPSHOT')) if isDatabase: op = op[:-9] obj = self.db elif isSnapshot: op = op[:-9] obj = self.current_transaction().snapshot else: obj = self.current_transaction() inst = Instruction(obj, self.stack, op, idx, isDatabase, isSnapshot) try: if inst.op == six.u("PUSH"): inst.push(op_tuple[1]) elif inst.op == six.u("DUP"): inst.stack.push(*self.stack[0]) elif inst.op == six.u("EMPTY_STACK"): self.stack = Stack() elif inst.op == six.u("SWAP"): idx = inst.pop() self.stack[0], self.stack[idx] = self.stack[ idx], self.stack[0] elif inst.op == six.u("POP"): inst.pop() elif inst.op == six.u("SUB"): a, b = inst.pop(2) inst.push(a - b) elif inst.op == six.u("CONCAT"): a, b = inst.pop(2) inst.push(a + b) elif inst.op == six.u("WAIT_FUTURE"): old_idx, item = inst.pop(with_idx=True) inst.stack.push(old_idx, item) elif inst.op == six.u("NEW_TRANSACTION"): self.new_transaction() elif inst.op == six.u("USE_TRANSACTION"): self.switch_transaction(inst.pop()) elif inst.op == six.u("ON_ERROR"): inst.push(inst.tr.on_error(inst.pop())) elif inst.op == six.u("GET"): key = inst.pop() num = random.randint(0, 2) if num == 0: f = obj[key] elif num == 1: f = obj.get(key) else: f = obj.__getitem__(key) if f == None: inst.push(b'RESULT_NOT_PRESENT') else: inst.push(f) elif inst.op == six.u("GET_KEY"): key, or_equal, offset, prefix = inst.pop(4) result = obj.get_key(fdb.KeySelector( key, or_equal, offset)) if result.startswith(prefix): inst.push(result) elif result < prefix: inst.push(prefix) else: inst.push(strinc(prefix)) elif inst.op == six.u("GET_RANGE"): begin, end, limit, reverse, mode = inst.pop(5) if limit == 0 and mode == -1 and random.random() < 0.5: if reverse: r = obj[begin:end:-1] else: r = obj[begin:end] else: r = obj.get_range(begin, end, limit, reverse, mode) self.push_range(inst, r) elif inst.op == six.u("GET_RANGE_STARTS_WITH"): prefix, limit, reverse, mode = inst.pop(4) self.push_range( inst, obj.get_range_startswith(prefix, limit, reverse, mode)) elif inst.op == six.u("GET_RANGE_SELECTOR"): begin_key, begin_or_equal, begin_offset, end_key, end_or_equal, end_offset, limit, reverse, mode, prefix = inst.pop( 10) beginSel = fdb.KeySelector(begin_key, begin_or_equal, begin_offset) endSel = fdb.KeySelector(end_key, end_or_equal, end_offset) if limit == 0 and mode == -1 and random.random() < 0.5: if reverse: r = obj[beginSel:endSel:-1] else: r = obj[beginSel:endSel] else: r = obj.get_range(beginSel, endSel, limit, reverse, mode) self.push_range(inst, r, prefix_filter=prefix) elif inst.op == six.u("GET_READ_VERSION"): self.last_version = obj.get_read_version().wait() inst.push(b"GOT_READ_VERSION") elif inst.op == six.u("SET"): key, value = inst.pop(2) if random.random() < 0.5: obj[key] = value else: obj.set(key, value) if obj == self.db: inst.push(b"RESULT_NOT_PRESENT") elif inst.op == six.u("LOG_STACK"): prefix = inst.pop() entries = {} while len(self.stack) > 0: stack_index = len(self.stack) - 1 entries[stack_index] = inst.pop(with_idx=True) if len(entries) == 100: self.log_stack(self.db, prefix, entries) entries = {} self.log_stack(self.db, prefix, entries) elif inst.op == six.u("ATOMIC_OP"): opType, key, value = inst.pop(3) getattr(obj, opType.lower())(key, value) if obj == self.db: inst.push(b"RESULT_NOT_PRESENT") elif inst.op == six.u("SET_READ_VERSION"): inst.tr.set_read_version(self.last_version) elif inst.op == six.u("CLEAR"): if random.random() < 0.5: del obj[inst.pop()] else: obj.clear(inst.pop()) if obj == self.db: inst.push(b"RESULT_NOT_PRESENT") elif inst.op == six.u("CLEAR_RANGE"): begin, end = inst.pop(2) num = random.randint(0, 2) if num == 0: del obj[begin:end] elif num == 1: obj.clear_range(begin, end) else: obj.__delitem__(slice(begin, end)) if obj == self.db: inst.push(b"RESULT_NOT_PRESENT") elif inst.op == six.u("CLEAR_RANGE_STARTS_WITH"): obj.clear_range_startswith(inst.pop()) if obj == self.db: inst.push(b"RESULT_NOT_PRESENT") elif inst.op == six.u("READ_CONFLICT_RANGE"): inst.tr.add_read_conflict_range(inst.pop(), inst.pop()) inst.push(b"SET_CONFLICT_RANGE") elif inst.op == six.u("WRITE_CONFLICT_RANGE"): inst.tr.add_write_conflict_range(inst.pop(), inst.pop()) inst.push(b"SET_CONFLICT_RANGE") elif inst.op == six.u("READ_CONFLICT_KEY"): inst.tr.add_read_conflict_key(inst.pop()) inst.push(b"SET_CONFLICT_KEY") elif inst.op == six.u("WRITE_CONFLICT_KEY"): inst.tr.add_write_conflict_key(inst.pop()) inst.push(b"SET_CONFLICT_KEY") elif inst.op == six.u("DISABLE_WRITE_CONFLICT"): inst.tr.options.set_next_write_no_write_conflict_range() elif inst.op == six.u("COMMIT"): inst.push(inst.tr.commit()) elif inst.op == six.u("RESET"): inst.tr.reset() elif inst.op == six.u("CANCEL"): inst.tr.cancel() elif inst.op == six.u("GET_COMMITTED_VERSION"): self.last_version = inst.tr.get_committed_version() inst.push(b"GOT_COMMITTED_VERSION") elif inst.op == six.u("GET_APPROXIMATE_SIZE"): approximate_size = inst.tr.get_approximate_size().wait() inst.push(b"GOT_APPROXIMATE_SIZE") elif inst.op == six.u("GET_VERSIONSTAMP"): inst.push(inst.tr.get_versionstamp()) elif inst.op == six.u("TUPLE_PACK"): count = inst.pop() items = inst.pop(count) inst.push(fdb.tuple.pack(tuple(items))) elif inst.op == six.u("TUPLE_PACK_WITH_VERSIONSTAMP"): prefix = inst.pop() count = inst.pop() items = inst.pop(count) if not fdb.tuple.has_incomplete_versionstamp( items) and random.random() < 0.5: inst.push(b"ERROR: NONE") else: try: packed = fdb.tuple.pack_with_versionstamp( tuple(items), prefix=prefix) inst.push(b"OK") inst.push(packed) except ValueError as e: if str(e).startswith("No incomplete"): inst.push(b"ERROR: NONE") else: inst.push(b"ERROR: MULTIPLE") elif inst.op == six.u("TUPLE_UNPACK"): for i in fdb.tuple.unpack(inst.pop()): inst.push(fdb.tuple.pack((i, ))) elif inst.op == six.u("TUPLE_SORT"): count = inst.pop() items = inst.pop(count) unpacked = map(fdb.tuple.unpack, items) if six.PY3: sorted_items = sorted(unpacked, key=fdb.tuple.pack) else: sorted_items = sorted(unpacked, cmp=fdb.tuple.compare) for item in sorted_items: inst.push(fdb.tuple.pack(item)) elif inst.op == six.u("TUPLE_RANGE"): count = inst.pop() items = inst.pop(count) r = fdb.tuple.range(tuple(items)) inst.push(r.start) inst.push(r.stop) elif inst.op == six.u("ENCODE_FLOAT"): f_bytes = inst.pop() f = struct.unpack(">f", f_bytes)[0] if not math.isnan(f) and not math.isinf( f) and not f == -0.0 and f == int(f): f = int(f) inst.push(fdb.tuple.SingleFloat(f)) elif inst.op == six.u("ENCODE_DOUBLE"): d_bytes = inst.pop() d = struct.unpack(">d", d_bytes)[0] inst.push(d) elif inst.op == six.u("DECODE_FLOAT"): f = inst.pop() f_bytes = struct.pack(">f", f.value) inst.push(f_bytes) elif inst.op == six.u("DECODE_DOUBLE"): d = inst.pop() d_bytes = struct.pack(">d", d) inst.push(d_bytes) elif inst.op == six.u("START_THREAD"): t = Tester(self.db, inst.pop()) thr = threading.Thread(target=t.run) thr.start() self.threads.append(thr) elif inst.op == six.u("WAIT_EMPTY"): prefix = inst.pop() Tester.wait_empty(self.db, prefix) inst.push(b"WAITED_FOR_EMPTY") elif inst.op == six.u("UNIT_TESTS"): try: test_db_options(db) test_options(db) test_watches(db) test_cancellation(db) test_retry_limits(db) test_db_retry_limits(db) test_timeouts(db) test_db_timeouts(db) test_combinations(db) test_locality(db) test_predicates() test_size_limit_option(db) test_get_approximate_size(db) except fdb.FDBError as e: print("Unit tests failed: %s" % e.description) traceback.print_exc() raise Exception("Unit tests failed: %s" % e.description) elif inst.op.startswith(six.u('DIRECTORY_')): self.directory_extension.process_instruction(inst) else: raise Exception("Unknown op %s" % inst.op) except fdb.FDBError as e: # print('ERROR: %r' % e) inst.stack.push( idx, fdb.tuple.pack( (b"ERROR", str(e.code).encode('ascii')))) # print(" to %s" % self.stack) # print() [thr.join() for thr in self.threads]
def decode_pkt(t, pkt): packet_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(t)) l1, flag1, src, dst, unk1, unk2, unk3 = struct.unpack( ">BBIIBBB", pkt[0:13]) packet_str += "len={0:02x} flag={1:02x} src={2:08x} dst={3:08x} {4:02x}{5:02x}{6:02x}".format( l1, flag1, src, dst, unk1, unk2, unk3) if (src & 0x80000000) or (dst == 0 and l1 >= 35): ts_h, ts_m, ts_s = decode_ts(pkt[13:16]) packet_str += "ts={0:02}:{1:02}:{2:06.3f}".format(ts_h, ts_m, ts_s) else: packet_str += ("rpt=" + to_hex(pkt[13:14]) + " " + to_hex(pkt[14:16])) if dst == 0 and l1 >= 35: # flood broadcast message unk4, unk5, hop, unk7, addr, unk8, l2 = struct.unpack( ">BBBBIIB", pkt[16:29]) packet_str += "{0:02x}{1:02x} hop={2:02x} {3:02x} addr={4:08x} {5:08x} len={6:02x}".format( unk4, unk5, hop, unk7, addr, unk8, l2) if l2 == 0: packet_str += to_hex(pkt[29:33]) unk9, l3 = struct.unpack(">BB", pkt[33:35]) packet_str += "{0:02x}".format(unk9) packet_str += ("next_" + str(l3) + "_days=" + to_hex(pkt[35:])) elif l2 == 6: packet_str += to_hex(pkt[29:33]) packet_str += "date=" + str(decode_date(pkt[33:35])) elif l2 == 0x27: packet_str += to_hex(pkt[29:33]) for x in range( 7 ): # 7 meter numbers (with first bit sometimes set to 1) followed by number 0x01-0x45 packet_str += to_hex(pkt[33 + 5 * x:37 + 5 * x]) packet_str += to_hex(pkt[37 + 5 * x:38 + 5 * x]) else: packet_str += to_hex(pkt[29:]) log.debug(packet_str) else: if src & 0x80000000: packet_str += "path=" + to_hex(pkt[16:24]) if ord(pkt[24]) == 0x40: packet_str += to_hex(pkt[24:28]) l4, unk12, cmd, cnt = struct.unpack(">BBBB", pkt[28:32]) packet_str += "len={0:02x} {1:02x} cmd={2:02x} cnt={3:02x}".format( l4, unk12, cmd, cnt) if cmd == 0xce: # fetch hourly usage data, every 6 hours unk13, hour = struct.unpack(">BH", pkt[32:]) packet_str += "{0:02x} first_hour={1:05}".format( unk13, hour) elif cmd == 0x22: packet_str += to_hex(pkt[32:]) elif cmd == 0x23: # path building stuff? every 6 hours unk14, unk15, unk16, your_id, parent_id, parent, unk17, n_children, unk19, level, unk21, unk22, unk23, unk24, unk25, unk26, unk27, unk28, unk29 = struct.unpack( ">BBBBBIBBBBBBBBBBHIB", pkt[32:58]) packet_str += "{0:02x} {1:02x} {2:02x} id={3:02x} par_id={4:02x} parent={5:08x} {6:02x} #child={7} {8:02x} lvl={9} {10:02x}{11:02x}{12:02x} {13:02x} {14:02x} {15:02x} {16:04x} {17:08x} {18:02x}".format( unk14, unk15, unk16, your_id, parent_id, parent, unk17, n_children, unk19, level, unk21, unk22, unk23, unk24, unk25, unk26, unk27, unk28, unk29) if l4 == 0x20: packet_str += "{0:02x}".format(ord(pkt[58])) packet_str += "date=" + str(decode_date(pkt[59:61])) # Prepare graph edges meter_parents[dst] = parent if level == 2: meter_parents[ parent] = src # Fill this in now, in case we don't hear from parent meter_gatekeepers[dst] = src if level >= 2: meter_gatekeepers[ parent] = src # Fill this in now, in case we don't hear from parent meter_levels[dst] = level meter_levels[parent] = level - 1 meter_levels[src] = 0 elif cmd == 0x28: packet_str += to_hex(pkt[32:]) elif cmd == 0x6a: packet_str += to_hex(pkt[32:]) else: # unknown command packet_str += to_hex(pkt[32:]) else: packet_str += to_hex(pkt[24:]) log.debug(packet_str) else: if len(pkt) > 16: l4 = ord(pkt[16]) if l4 == l1 - 17: # 1st byte of payload is a length if len(pkt) > 18: cmd = ord(pkt[18]) if cmd == 0xce: # hourly usage data, every 6 hours unk10, cmd, ctr, unk11, flag2, curr_hour, last_hour, n_hours = struct.unpack( ">BBBBBHHB", pkt[17:27]) packet_str += "len={0:02x} {1:02x} cmd={2:02x} ctr={3:02x} {4:02x} {5:02x} cur_hour={6:05} last_hour={7:05} n_hour={8:02}".format( l4, unk10, cmd, ctr, unk11, flag2, curr_hour, last_hour, n_hours) packet_str += to_hex(pkt[27:]) add_hourly( src, last_hour, struct.unpack(">" + "H" * n_hours, pkt[27:27 + 2 * n_hours])) # TODO: Get total meter reading elif cmd == 0x22: # just an acknowledgement unk10, cmd, ctr = struct.unpack(">BBB", pkt[17:20]) packet_str += "len={0:02x} {1:02x} cmd={2:02x} ctr={3:02x}".format( l4, unk10, cmd, ctr) packet_str += to_hex(pkt[20:]) elif cmd == 0x23: # path building stuff? every 6 hours unk10, cmd, ctr = struct.unpack(">BBB", pkt[17:20]) packet_str += "len={0:02x} {1:02x} cmd={2:02x} ctr={3:02x}".format( l4, unk10, cmd, ctr) packet_str += to_hex(pkt[20:]) # TODO: Parse the rest elif cmd == 0x28: # just an acknowledgement unk10, cmd, ctr = struct.unpack(">BBB", pkt[17:20]) packet_str += "len={0:02x} {1:02x} cmd={2:02x} ctr={3:02x}".format( l4, unk10, cmd, ctr) packet_str += to_hex(pkt[20:]) elif cmd == 0x6a: unk10, cmd, ctr = struct.unpack(">BBB", pkt[17:20]) packet_str += "len={0:02x} {1:02x} cmd={2:02x} ctr={3:02x}".format( l4, unk10, cmd, ctr) packet_str += to_hex(pkt[20:]) # TODO: Parse the rest else: todo_pkt = to_hex(pkt[16:]) log.warning('Unable to decode %s' % todo_pkt) packet_str += "todo=" + to_hex(pkt[16:]) # TODO: Investigate these else: packet_str += "len={0:02x}".format( l4) + " data=" + to_hex(pkt[17:]) else: packet_str += "weird=" + to_hex( pkt[16:]) # this happens from time to time log.debug(packet_str)
def get_blk_dt(blk_hdr): members = struct.unpack("<I", blk_hdr[68:68+4]) nTime = members[0] dt = datetime.datetime.fromtimestamp(nTime) dt_ym = datetime.datetime(dt.year, dt.month, 1) return (dt_ym, nTime)
def _recv_bytes(self, maxsize=None): buf = self._recv(4) size, = struct.unpack("!i", buf.getvalue()) if maxsize is not None and size > maxsize: return None return self._recv(size)
def test_gdal_grid_1(): if gdal_grid is None: pytest.skip() shape_drv = ogr.GetDriverByName('ESRI Shapefile') outfiles.append('tmp/n43.tif') try: os.remove('tmp/n43.shp') except OSError: pass try: os.remove('tmp/n43.dbf') except OSError: pass try: os.remove('tmp/n43.shx') except OSError: pass try: os.remove('tmp/n43.qix') except OSError: pass # Create an OGR grid from the values of n43.dt0 ds = gdal.Open('../gdrivers/data/n43.dt0') geotransform = ds.GetGeoTransform() shape_drv = ogr.GetDriverByName('ESRI Shapefile') shape_ds = shape_drv.CreateDataSource('tmp') shape_lyr = shape_ds.CreateLayer('n43') data = ds.ReadRaster(0, 0, 121, 121) array_val = struct.unpack('h' * 121 * 121, data) for j in range(121): for i in range(121): wkt = 'POINT(%f %f %s)' % ( geotransform[0] + (i + .5) * geotransform[1], geotransform[3] + (j + .5) * geotransform[5], array_val[j * 121 + i]) dst_feat = ogr.Feature(feature_def=shape_lyr.GetLayerDefn()) dst_feat.SetGeometry(ogr.CreateGeometryFromWkt(wkt)) shape_lyr.CreateFeature(dst_feat) dst_feat.Destroy() shape_ds.ExecuteSQL('CREATE SPATIAL INDEX ON n43') shape_ds.Destroy() # Create a GDAL dataset from the previous generated OGR grid (_, err) = gdaltest.runexternal_out_and_err( gdal_grid + ' -txe -80.0041667 -78.9958333 -tye 42.9958333 44.0041667 -outsize 121 121 -ot Int16 -a nearest:radius1=0.0:radius2=0.0:angle=0.0 -co TILED=YES -co BLOCKXSIZE=256 -co BLOCKYSIZE=256 tmp/n43.shp ' + outfiles[-1]) assert (err is None or err == ''), 'got error/warning' # We should get the same values as in n43.td0 ds2 = gdal.Open(outfiles[-1]) assert ds.GetRasterBand(1).Checksum() == ds2.GetRasterBand(1).Checksum(), \ ('bad checksum : got %d, expected %d' % (ds.GetRasterBand(1).Checksum(), ds2.GetRasterBand(1).Checksum())) assert ds2.GetRasterBand( 1).GetNoDataValue() is None, 'did not expect nodata value' ds = None ds2 = None
def loop(data): while data: t = data[0] length = unpack('!H', data[1:3])[0] v, data = data[3:length], data[length:] yield TLV(t, v)
sys.exit(1) for filename in sys.argv[1:]: f = open(filename, "rb") magic = f.read(4) if magic == "\xa1\xb2\xc3\xd4": #big endian endian = ">" elif magic == "\xd4\xc3\xb2\xa1": #little endian endian = "<" else: raise Exception("Not a pcap capture file (bad magic)") hdr = f.read(20) if len(hdr) < 20: raise Exception("Invalid pcap file (too short)") vermaj, vermin, tz, sig, snaplen, linktype = struct.unpack( endian + "HHIIII", hdr) packets = {} while True: hdr = f.read(16) if len(hdr) < 16: break sec, usec, caplen, wirelen = struct.unpack(endian + "IIII", hdr) pkt = f.read(caplen) decode_pkt(sec + usec / 1000000., pkt) for meter in sorted(meter_readings.keys()): log.info("Readings for LAN ID " + str(meter) + ":") if meter_first_hour[meter] > meter_last_hour[meter]: meter_last_hour[meter] += 65536 meter_readings_str = ''
def ip_decimal_notation(ips, ip): try: packedip = socket.inet_aton(ip) ips.add(struct.unpack("!l", packedip)[0]) except: pass
def unpack(cls, data, negotiated): if not negotiated.neighbor.aigp: # AIGP must only be accepted on configured sessions return None return cls(unpack('!Q', data[:8] & 0x000000FFFFFFFFFF), data[:8])
def wrelf32(self, base_offset, filename): """Description: Write elf file to memory Parameters: base_offset (int): Write offset filename (str): Elf file name """ print("----------------") f = open(filename, "rb") try: e_ident = f.read(16) e_ident = struct.unpack("BBBBBBBBBBBBBBBB", e_ident) if ((e_ident[0] != 0x7f) | (e_ident[1] != 0x45) | (e_ident[2] != 0x4c) | (e_ident[3] != 0x46)): raise Exception("Error: elf signature incorrect!") print("Loading elf file: ", filename) e_type = f.read(2) e_type = struct.unpack("H", e_type) if (e_type[0] != 0x02): raise Exception("Error: e_type is not executable!") print("-- e_type: ET_EXEC") e_machine = f.read(2) e_machine = struct.unpack("H", e_machine) if (e_machine[0] == 243): print("-- e_machine: RISC-V") else: print("-- e_machine: ", hex(e_machine[0])) e_version = f.read(4) e_version = struct.unpack("L", e_version) e_entry = f.read(4) e_entry = struct.unpack("L", e_entry) #print("-- e_entry: ", hex(e_entry[0])) e_phoff = f.read(4) e_phoff = struct.unpack("L", e_phoff) #print("-- e_phoff: ", hex(e_phoff[0])) e_shoff = f.read(4) e_shoff = struct.unpack("L", e_shoff) #print("-- e_shoff: ", hex(e_shoff[0])) e_flags = f.read(4) e_flags = struct.unpack("L", e_flags) #print("-- e_flags: ", hex(e_flags[0])) e_ehsize = f.read(2) e_ehsize = struct.unpack("H", e_ehsize) #print("-- e_ehsize: ", hex(e_ehsize[0])) e_phentsize = f.read(2) e_phentsize = struct.unpack("H", e_phentsize) #print("-- e_phentsize: ", hex(e_phentsize[0])) e_phnum = f.read(2) e_phnum = struct.unpack("H", e_phnum) #print("-- e_phnum: ", hex(e_phnum[0])) e_shentsize = f.read(2) e_shentsize = struct.unpack("H", e_shentsize) #print("-- e_shentsize: ", hex(e_shentsize[0])) e_shnum = f.read(2) e_shnum = struct.unpack("H", e_shnum) #print("-- e_shnum: ", hex(e_shnum[0])) e_shstrndx = f.read(2) e_shstrndx = struct.unpack("H", e_shstrndx) #print("-- e_shstrndx: ", hex(e_shstrndx[0])) prog_headers = [] print("Program Headers:") print( "-----------------------------------------------------------------------------------------------------------" ) print( " № | p_type | p_offset | p_vaddr | p_paddr | p_filesz | p_memsz | p_flags | p_align" ) phnum = 0 for h in range(e_phnum[0]): prog_header = f.read(32) prog_header = struct.unpack("LLLLLLLL", prog_header) PT_LOAD = 1 if prog_header[0] != PT_LOAD: raise Exception("Error: p_type incorrect: 0x%08x" % prog_header[0]) print("%2d" % phnum, "| 0x%08x" % prog_header[0], "| 0x%08x" % prog_header[1], "| 0x%08x" % prog_header[2], "| 0x%08x" % prog_header[3], "| 0x%08x" % prog_header[4], "| 0x%08x" % prog_header[5], "| 0x%08x" % prog_header[6], "| 0x%08x" % prog_header[7]) prog_headers.append( (prog_header[1], prog_header[2], prog_header[4])) phnum += 1 print( "-----------------------------------------------------------------------------------------------------------" ) for prog_header in prog_headers: offset = prog_header[0] vaddr = prog_header[1] size = prog_header[2] print("LOADING: file offset: 0x%08x" % offset, ", hw addr: 0x%08x" % vaddr, "size: 0x%08x" % size) f.seek(offset) dbs = f.read(size) dbs = struct.unpack('{}L'.format(len(dbs) >> 2), dbs) #print("dbs len: ", len(dbs)) #print("dbs[0]: ", hex(dbs[0])) #print("dbs[1]: ", hex(dbs[1])) self.wrarr32((base_offset + vaddr), dbs) finally: f.close() print("----------------")
def get_enccounter(self): """Get the encoder count in the controller """ self._ser.write(bytes([0x0A, 0x04, 0x01, 0x00, self.dst, self.src])) rsp = self._ser.read(12) enccounter = struct.unpack('<i', rsp[-4:]) return enccounter
def p0f_impersonate(pkt, osgenre=None, osdetails=None, signature=None, extrahops=0, mtu=1500, uptime=None): """Modifies pkt so that p0f will think it has been sent by a specific OS. If osdetails is None, then we randomly pick up a personality matching osgenre. If osgenre and signature are also None, we use a local signature (using p0f_getlocalsigs). If signature is specified (as a tuple), we use the signature. For now, only TCP Syn packets are supported. Some specifications of the p0f.fp file are not (yet) implemented.""" pkt = pkt.copy() # pkt = pkt.__class__(raw(pkt)) while pkt.haslayer(IP) and pkt.haslayer(TCP): pkt = pkt.getlayer(IP) if isinstance(pkt.payload, TCP): break pkt = pkt.payload if not isinstance(pkt, IP) or not isinstance(pkt.payload, TCP): raise TypeError("Not a TCP/IP packet") db = p0f_selectdb(pkt.payload.flags) if osgenre: pb = db.get_base() if pb is None: pb = [] pb = [x for x in pb if x[6] == osgenre] if osdetails: pb = [x for x in pb if x[7] == osdetails] elif signature: pb = [signature] else: pb = p0f_getlocalsigs()[db] if db == p0fr_kdb: # 'K' quirk <=> RST+ACK if pkt.payload.flags & 0x4 == 0x4: pb = [x for x in pb if 'K' in x[5]] else: pb = [x for x in pb if 'K' not in x[5]] if not pb: raise Scapy_Exception("No match in the p0f database") pers = pb[random.randint(0, len(pb) - 1)] # options (we start with options because of MSS) # Take the options already set as "hints" to use in the new packet if we # can. MSS, WScale and Timestamp can all be wildcarded in a signature, so # we'll use the already-set values if they're valid integers. orig_opts = dict(pkt.payload.options) int_only = lambda val: val if isinstance(val, six.integer_types) else None mss_hint = int_only(orig_opts.get('MSS')) wscale_hint = int_only(orig_opts.get('WScale')) ts_hint = [int_only(o) for o in orig_opts.get('Timestamp', (None, None))] options = [] if pers[4] != '.': for opt in pers[4].split(','): if opt[0] == 'M': # MSS might have a maximum size because of window size # specification if pers[0][0] == 'S': maxmss = (2**16 - 1) // int(pers[0][1:]) else: maxmss = (2**16 - 1) # disregard hint if out of range if mss_hint and not 0 <= mss_hint <= maxmss: mss_hint = None # If we have to randomly pick up a value, we cannot use # scapy RandXXX() functions, because the value has to be # set in case we need it for the window size value. That's # why we use random.randint() if opt[1:] == '*': if mss_hint is not None: options.append(('MSS', mss_hint)) else: options.append(('MSS', random.randint(1, maxmss))) elif opt[1] == '%': coef = int(opt[2:]) if mss_hint is not None and mss_hint % coef == 0: options.append(('MSS', mss_hint)) else: options.append( ('MSS', coef * random.randint(1, maxmss // coef))) else: options.append(('MSS', int(opt[1:]))) elif opt[0] == 'W': if wscale_hint and not 0 <= wscale_hint < 2**8: wscale_hint = None if opt[1:] == '*': if wscale_hint is not None: options.append(('WScale', wscale_hint)) else: options.append(('WScale', RandByte())) elif opt[1] == '%': coef = int(opt[2:]) if wscale_hint is not None and wscale_hint % coef == 0: options.append(('WScale', wscale_hint)) else: options.append(('WScale', coef * RandNum( min=1, max=(2**8 - 1) // coef))) # noqa: E501 else: options.append(('WScale', int(opt[1:]))) elif opt == 'T0': options.append(('Timestamp', (0, 0))) elif opt == 'T': # Determine first timestamp. if uptime is not None: ts_a = uptime elif ts_hint[0] and 0 < ts_hint[0] < 2**32: # Note: if first ts is 0, p0f registers it as "T0" not "T", # hence we don't want to use the hint if it was 0. ts_a = ts_hint[0] else: ts_a = random.randint(120, 100 * 60 * 60 * 24 * 365) # Determine second timestamp. if 'T' not in pers[5]: ts_b = 0 elif ts_hint[1] and 0 < ts_hint[1] < 2**32: ts_b = ts_hint[1] else: # FIXME: RandInt() here does not work (bug (?) in # TCPOptionsField.m2i often raises "OverflowError: # long int too large to convert to int" in: # oval = struct.pack(ofmt, *oval)" # Actually, this is enough to often raise the error: # struct.pack('I', RandInt()) ts_b = random.randint(1, 2**32 - 1) options.append(('Timestamp', (ts_a, ts_b))) elif opt == 'S': options.append(('SAckOK', '')) elif opt == 'N': options.append(('NOP', None)) elif opt == 'E': options.append(('EOL', None)) elif opt[0] == '?': if int(opt[1:]) in TCPOptions[0]: optname = TCPOptions[0][int(opt[1:])][0] optstruct = TCPOptions[0][int(opt[1:])][1] options.append( (optname, struct.unpack( optstruct, RandString(struct.calcsize(optstruct))._fix()) )) # noqa: E501 else: options.append((int(opt[1:]), '')) # FIXME: qqP not handled else: warning("unhandled TCP option " + opt) pkt.payload.options = options # window size if pers[0] == '*': pkt.payload.window = RandShort() elif pers[0].isdigit(): pkt.payload.window = int(pers[0]) elif pers[0][0] == '%': coef = int(pers[0][1:]) pkt.payload.window = coef * RandNum(min=1, max=(2**16 - 1) // coef) elif pers[0][0] == 'T': pkt.payload.window = mtu * int(pers[0][1:]) elif pers[0][0] == 'S': # needs MSS set mss = [x for x in options if x[0] == 'MSS'] if not mss: raise Scapy_Exception( "TCP window value requires MSS, and MSS option not set" ) # noqa: E501 pkt.payload.window = mss[0][1] * int(pers[0][1:]) else: raise Scapy_Exception('Unhandled window size specification') # ttl pkt.ttl = pers[1] - extrahops # DF flag pkt.flags |= (2 * pers[2]) # FIXME: ss (packet size) not handled (how ? may be with D quirk # if present) # Quirks if pers[5] != '.': for qq in pers[5]: # FIXME: not handled: P, I, X, ! # T handled with the Timestamp option if qq == 'Z': pkt.id = 0 elif qq == 'U': pkt.payload.urgptr = RandShort() elif qq == 'A': pkt.payload.ack = RandInt() elif qq == 'F': if db == p0fo_kdb: pkt.payload.flags |= 0x20 # U else: pkt.payload.flags |= random.choice([8, 32, 40]) # P/U/PU elif qq == 'D' and db != p0fo_kdb: pkt /= conf.raw_layer(load=RandString(random.randint( 1, 10))) # XXX p0fo.fp # noqa: E501 elif qq == 'Q': pkt.payload.seq = pkt.payload.ack # elif qq == '0': pkt.payload.seq = 0 # if db == p0fr_kdb: # '0' quirk is actually not only for p0fr.fp (see # packet2p0f()) if '0' in pers[5]: pkt.payload.seq = 0 elif pkt.payload.seq == 0: pkt.payload.seq = RandInt() while pkt.underlayer: pkt = pkt.underlayer return pkt
def __getbyte(self): rdata = self.ser.read(1) rdata = struct.unpack("B", rdata) return rdata[0]
def parseOspfLsaRtr(lsa, verbose=1, level=0): if verbose > 1: print prtbin(level * INDENT, lsa[:OSPF_LSARTR_LEN]) ( veb, _, nlinks, ) = struct.unpack(OSPF_LSARTR, lsa[:OSPF_LSARTR_LEN]) v = (veb & 0x01) e = (veb & 0x02) >> 1 b = (veb & 0x04) >> 2 if verbose > 0: print level * INDENT + "nlinks:%s, rtr desc: %s %s %s" % ( nlinks, v * "VIRTUAL", e * "EXTERNAL", b * "BORDER") lsa = lsa[OSPF_LSARTR_LEN:] i = 0 links = {} while i < nlinks: i += 1 if verbose > 1: print prtbin((level + 1) * INDENT, lsa[:OSPF_LINK_LEN]) (lid, ldata, ltype, ntos, metric) = struct.unpack(OSPF_LINK, lsa[:OSPF_LINK_LEN]) if verbose > 0: print (level+1)*INDENT +\ "%s: link id:%s, link data:%s, link type:%s, ntos:%s, metric:%s" %( i, id2str(lid), id2str(ldata), RTR_LINK_TYPE[ltype], ntos, metric) lsa = lsa[OSPF_LINK_LEN:] j = 0 metrics = { 0: metric, } while j < ntos: j += 1 if verbose > 1: print prtbin((level + 2) * INDENT, lsa[:OSPF_METRIC_LEN]) (tos, _, metric) = struct.unpack(OSPF_METRIC, lsa[:OSPF_METRIC_LEN]) if verbose > 0: print (level+2)*INDENT +\ "%s: tos:%s, metric:%s" % (j, int2bin(tos), metric) metrics[tos] = metric lsa = lsa[OSPF_METRIC_LEN:] links[i] = { "ID": lid, "DATA": ldata, "T": ltype, "NTOS": ntos, "METRICS": metrics, } return { "VIRTUAL": v, "EXTERNAL": e, "BORDER": b, "NLINKS": nlinks, "LINKS": links, }
def __len__(self): '''Return the number of bases in this ``BCL`` Returns: ``int``: The number of bases in this ``BCL``''' return unpack('I', self.data[:4])[0]
while True: input_packet = f.read(188) if not input_packet: break if device == 'B' and prevpacket and input_packet.startswith(bytes("\x47\x03\x00", encoding="raw_unicode_escape")): bs = list(input_packet) hour = int.from_bytes(prevpacket[174:178], byteorder='little') minute = int.from_bytes(prevpacket[178:182], byteorder='little') second = int.from_bytes(prevpacket[182:186], byteorder='little') year = int.from_bytes(prevpacket[186:188] + input_packet[146:148], byteorder='little') month = int.from_bytes(input_packet[148:152], byteorder='little') day = int.from_bytes(input_packet[152:156], byteorder='little') active = chr(bs[156]) lathem = chr(bs[157]) lonhem = chr(bs[158]) lat = fix_coordinates(lathem,struct.unpack('<f', input_packet[160:164])) lon = fix_coordinates(lonhem,struct.unpack('<f', input_packet[164:168])) speed_knots, = struct.unpack('<f', input_packet[168:172]) speed = speed_knots * 1.6 / 3.6 bearing, = struct.unpack('<f', input_packet[172:176]) print ('20{0:02}-{1:02}-{2:02} {3:02}:{4:02}:{5:02}'.format(year,month,day,hour,minute,second),active,lathem,lonhem,lat,lon,speed,bearing, sep=';') if device == 'V' and input_packet.startswith(bytes("\x47\x43\x00", encoding="raw_unicode_escape")): bs = list(input_packet) hour = int.from_bytes(input_packet[10:14], byteorder='little') minute = int.from_bytes(input_packet[14:18], byteorder='little') second = int.from_bytes(input_packet[18:22], byteorder='little') year = int.from_bytes(input_packet[22:26], byteorder='little') month = int.from_bytes(input_packet[26:30], byteorder='little') day = int.from_bytes(input_packet[30:34], byteorder='little') active = chr(bs[34]) lathem = chr(bs[35])
def RMD160Transform(state, block): # uint32 state[5], uchar block[64] x = [0] * 16 if sys.byteorder == 'little': x = struct.unpack('<16L', bytes([x for x in block[0:64]])) else: raise Exception("Error!!") a = state[0] b = state[1] c = state[2] d = state[3] e = state[4] a, c = R(a, b, c, d, e, F0, K0, 11, 0, x) e, b = R(e, a, b, c, d, F0, K0, 14, 1, x) d, a = R(d, e, a, b, c, F0, K0, 15, 2, x) c, e = R(c, d, e, a, b, F0, K0, 12, 3, x) b, d = R(b, c, d, e, a, F0, K0, 5, 4, x) a, c = R(a, b, c, d, e, F0, K0, 8, 5, x) e, b = R(e, a, b, c, d, F0, K0, 7, 6, x) d, a = R(d, e, a, b, c, F0, K0, 9, 7, x) c, e = R(c, d, e, a, b, F0, K0, 11, 8, x) b, d = R(b, c, d, e, a, F0, K0, 13, 9, x) a, c = R(a, b, c, d, e, F0, K0, 14, 10, x) e, b = R(e, a, b, c, d, F0, K0, 15, 11, x) d, a = R(d, e, a, b, c, F0, K0, 6, 12, x) c, e = R(c, d, e, a, b, F0, K0, 7, 13, x) b, d = R(b, c, d, e, a, F0, K0, 9, 14, x) a, c = R(a, b, c, d, e, F0, K0, 8, 15, x) e, b = R(e, a, b, c, d, F1, K1, 7, 7, x) d, a = R(d, e, a, b, c, F1, K1, 6, 4, x) c, e = R(c, d, e, a, b, F1, K1, 8, 13, x) b, d = R(b, c, d, e, a, F1, K1, 13, 1, x) a, c = R(a, b, c, d, e, F1, K1, 11, 10, x) e, b = R(e, a, b, c, d, F1, K1, 9, 6, x) d, a = R(d, e, a, b, c, F1, K1, 7, 15, x) c, e = R(c, d, e, a, b, F1, K1, 15, 3, x) b, d = R(b, c, d, e, a, F1, K1, 7, 12, x) a, c = R(a, b, c, d, e, F1, K1, 12, 0, x) e, b = R(e, a, b, c, d, F1, K1, 15, 9, x) d, a = R(d, e, a, b, c, F1, K1, 9, 5, x) c, e = R(c, d, e, a, b, F1, K1, 11, 2, x) b, d = R(b, c, d, e, a, F1, K1, 7, 14, x) a, c = R(a, b, c, d, e, F1, K1, 13, 11, x) e, b = R(e, a, b, c, d, F1, K1, 12, 8, x) d, a = R(d, e, a, b, c, F2, K2, 11, 3, x) c, e = R(c, d, e, a, b, F2, K2, 13, 10, x) b, d = R(b, c, d, e, a, F2, K2, 6, 14, x) a, c = R(a, b, c, d, e, F2, K2, 7, 4, x) e, b = R(e, a, b, c, d, F2, K2, 14, 9, x) d, a = R(d, e, a, b, c, F2, K2, 9, 15, x) c, e = R(c, d, e, a, b, F2, K2, 13, 8, x) b, d = R(b, c, d, e, a, F2, K2, 15, 1, x) a, c = R(a, b, c, d, e, F2, K2, 14, 2, x) e, b = R(e, a, b, c, d, F2, K2, 8, 7, x) d, a = R(d, e, a, b, c, F2, K2, 13, 0, x) c, e = R(c, d, e, a, b, F2, K2, 6, 6, x) b, d = R(b, c, d, e, a, F2, K2, 5, 13, x) a, c = R(a, b, c, d, e, F2, K2, 12, 11, x) e, b = R(e, a, b, c, d, F2, K2, 7, 5, x) d, a = R(d, e, a, b, c, F2, K2, 5, 12, x) c, e = R(c, d, e, a, b, F3, K3, 11, 1, x) b, d = R(b, c, d, e, a, F3, K3, 12, 9, x) a, c = R(a, b, c, d, e, F3, K3, 14, 11, x) e, b = R(e, a, b, c, d, F3, K3, 15, 10, x) d, a = R(d, e, a, b, c, F3, K3, 14, 0, x) c, e = R(c, d, e, a, b, F3, K3, 15, 8, x) b, d = R(b, c, d, e, a, F3, K3, 9, 12, x) a, c = R(a, b, c, d, e, F3, K3, 8, 4, x) e, b = R(e, a, b, c, d, F3, K3, 9, 13, x) d, a = R(d, e, a, b, c, F3, K3, 14, 3, x) c, e = R(c, d, e, a, b, F3, K3, 5, 7, x) b, d = R(b, c, d, e, a, F3, K3, 6, 15, x) a, c = R(a, b, c, d, e, F3, K3, 8, 14, x) e, b = R(e, a, b, c, d, F3, K3, 6, 5, x) d, a = R(d, e, a, b, c, F3, K3, 5, 6, x) c, e = R(c, d, e, a, b, F3, K3, 12, 2, x) b, d = R(b, c, d, e, a, F4, K4, 9, 4, x) a, c = R(a, b, c, d, e, F4, K4, 15, 0, x) e, b = R(e, a, b, c, d, F4, K4, 5, 5, x) d, a = R(d, e, a, b, c, F4, K4, 11, 9, x) c, e = R(c, d, e, a, b, F4, K4, 6, 7, x) b, d = R(b, c, d, e, a, F4, K4, 8, 12, x) a, c = R(a, b, c, d, e, F4, K4, 13, 2, x) e, b = R(e, a, b, c, d, F4, K4, 12, 10, x) d, a = R(d, e, a, b, c, F4, K4, 5, 14, x) c, e = R(c, d, e, a, b, F4, K4, 12, 1, x) b, d = R(b, c, d, e, a, F4, K4, 13, 3, x) a, c = R(a, b, c, d, e, F4, K4, 14, 8, x) e, b = R(e, a, b, c, d, F4, K4, 11, 11, x) d, a = R(d, e, a, b, c, F4, K4, 8, 6, x) c, e = R(c, d, e, a, b, F4, K4, 5, 15, x) b, d = R(b, c, d, e, a, F4, K4, 6, 13, x) aa = a bb = b cc = c dd = d ee = e a = state[0] b = state[1] c = state[2] d = state[3] e = state[4] a, c = R(a, b, c, d, e, F4, KK0, 8, 5, x) e, b = R(e, a, b, c, d, F4, KK0, 9, 14, x) d, a = R(d, e, a, b, c, F4, KK0, 9, 7, x) c, e = R(c, d, e, a, b, F4, KK0, 11, 0, x) b, d = R(b, c, d, e, a, F4, KK0, 13, 9, x) a, c = R(a, b, c, d, e, F4, KK0, 15, 2, x) e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x) d, a = R(d, e, a, b, c, F4, KK0, 5, 4, x) c, e = R(c, d, e, a, b, F4, KK0, 7, 13, x) b, d = R(b, c, d, e, a, F4, KK0, 7, 6, x) a, c = R(a, b, c, d, e, F4, KK0, 8, 15, x) e, b = R(e, a, b, c, d, F4, KK0, 11, 8, x) d, a = R(d, e, a, b, c, F4, KK0, 14, 1, x) c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x) b, d = R(b, c, d, e, a, F4, KK0, 12, 3, x) a, c = R(a, b, c, d, e, F4, KK0, 6, 12, x) e, b = R(e, a, b, c, d, F3, KK1, 9, 6, x) d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x) c, e = R(c, d, e, a, b, F3, KK1, 15, 3, x) b, d = R(b, c, d, e, a, F3, KK1, 7, 7, x) a, c = R(a, b, c, d, e, F3, KK1, 12, 0, x) e, b = R(e, a, b, c, d, F3, KK1, 8, 13, x) d, a = R(d, e, a, b, c, F3, KK1, 9, 5, x) c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x) b, d = R(b, c, d, e, a, F3, KK1, 7, 14, x) a, c = R(a, b, c, d, e, F3, KK1, 7, 15, x) e, b = R(e, a, b, c, d, F3, KK1, 12, 8, x) d, a = R(d, e, a, b, c, F3, KK1, 7, 12, x) c, e = R(c, d, e, a, b, F3, KK1, 6, 4, x) b, d = R(b, c, d, e, a, F3, KK1, 15, 9, x) a, c = R(a, b, c, d, e, F3, KK1, 13, 1, x) e, b = R(e, a, b, c, d, F3, KK1, 11, 2, x) d, a = R(d, e, a, b, c, F2, KK2, 9, 15, x) c, e = R(c, d, e, a, b, F2, KK2, 7, 5, x) b, d = R(b, c, d, e, a, F2, KK2, 15, 1, x) a, c = R(a, b, c, d, e, F2, KK2, 11, 3, x) e, b = R(e, a, b, c, d, F2, KK2, 8, 7, x) d, a = R(d, e, a, b, c, F2, KK2, 6, 14, x) c, e = R(c, d, e, a, b, F2, KK2, 6, 6, x) b, d = R(b, c, d, e, a, F2, KK2, 14, 9, x) a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x) e, b = R(e, a, b, c, d, F2, KK2, 13, 8, x) d, a = R(d, e, a, b, c, F2, KK2, 5, 12, x) c, e = R(c, d, e, a, b, F2, KK2, 14, 2, x) b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x) a, c = R(a, b, c, d, e, F2, KK2, 13, 0, x) e, b = R(e, a, b, c, d, F2, KK2, 7, 4, x) d, a = R(d, e, a, b, c, F2, KK2, 5, 13, x) c, e = R(c, d, e, a, b, F1, KK3, 15, 8, x) b, d = R(b, c, d, e, a, F1, KK3, 5, 6, x) a, c = R(a, b, c, d, e, F1, KK3, 8, 4, x) e, b = R(e, a, b, c, d, F1, KK3, 11, 1, x) d, a = R(d, e, a, b, c, F1, KK3, 14, 3, x) c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x) b, d = R(b, c, d, e, a, F1, KK3, 6, 15, x) a, c = R(a, b, c, d, e, F1, KK3, 14, 0, x) e, b = R(e, a, b, c, d, F1, KK3, 6, 5, x) d, a = R(d, e, a, b, c, F1, KK3, 9, 12, x) c, e = R(c, d, e, a, b, F1, KK3, 12, 2, x) b, d = R(b, c, d, e, a, F1, KK3, 9, 13, x) a, c = R(a, b, c, d, e, F1, KK3, 12, 9, x) e, b = R(e, a, b, c, d, F1, KK3, 5, 7, x) d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x) c, e = R(c, d, e, a, b, F1, KK3, 8, 14, x) b, d = R(b, c, d, e, a, F0, KK4, 8, 12, x) a, c = R(a, b, c, d, e, F0, KK4, 5, 15, x) e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x) d, a = R(d, e, a, b, c, F0, KK4, 9, 4, x) c, e = R(c, d, e, a, b, F0, KK4, 12, 1, x) b, d = R(b, c, d, e, a, F0, KK4, 5, 5, x) a, c = R(a, b, c, d, e, F0, KK4, 14, 8, x) e, b = R(e, a, b, c, d, F0, KK4, 6, 7, x) d, a = R(d, e, a, b, c, F0, KK4, 8, 6, x) c, e = R(c, d, e, a, b, F0, KK4, 13, 2, x) b, d = R(b, c, d, e, a, F0, KK4, 6, 13, x) a, c = R(a, b, c, d, e, F0, KK4, 5, 14, x) e, b = R(e, a, b, c, d, F0, KK4, 15, 0, x) d, a = R(d, e, a, b, c, F0, KK4, 13, 3, x) c, e = R(c, d, e, a, b, F0, KK4, 11, 9, x) b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) t = (state[1] + cc + d) % 0x100000000 state[1] = (state[2] + dd + e) % 0x100000000 state[2] = (state[3] + ee + a) % 0x100000000 state[3] = (state[4] + aa + b) % 0x100000000 state[4] = (state[0] + bb + c) % 0x100000000 state[0] = t % 0x100000000 pass
def packed_bdaddr_to_string(bdaddr_packed): return ':'.join('%02x' % i for i in struct.unpack('BBBBBB', bdaddr_packed[::-1]))
def _handle_openflow_PacketIn (self, event): """ Receive and process LLDP packets """ packet = event.parsed if (packet.effective_ethertype != pkt.ethernet.LLDP_TYPE or packet.dst != pkt.ETHERNET.NDP_MULTICAST): if not self._eat_early_packets: return if not event.connection.connect_time: return enable_time = time.time() - self.send_cycle_time - 1 if event.connection.connect_time > enable_time: return EventHalt return if self._explicit_drop: if event.ofp.buffer_id is not None: log.debug("Dropping LLDP packet %i", event.ofp.buffer_id) msg = of.ofp_packet_out() msg.buffer_id = event.ofp.buffer_id msg.in_port = event.port event.connection.send(msg) lldph = packet.find(pkt.lldp) if lldph is None or not lldph.parsed: log.error("LLDP packet could not be parsed") return EventHalt if len(lldph.tlvs) < 3: log.error("LLDP packet without required three TLVs") return EventHalt if lldph.tlvs[0].tlv_type != pkt.lldp.CHASSIS_ID_TLV: log.error("LLDP packet TLV 1 not CHASSIS_ID") return EventHalt if lldph.tlvs[1].tlv_type != pkt.lldp.PORT_ID_TLV: log.error("LLDP packet TLV 2 not PORT_ID") return EventHalt if lldph.tlvs[2].tlv_type != pkt.lldp.TTL_TLV: log.error("LLDP packet TLV 3 not TTL") return EventHalt def lookInSysDesc (): r = None for t in lldph.tlvs[3:]: if t.tlv_type == pkt.lldp.SYSTEM_DESC_TLV: # This is our favored way... for line in t.payload.split('\n'): if line.startswith('dpid:'): try: return int(line[5:], 16) except: pass if len(t.payload) == 8: # Maybe it's a FlowVisor LLDP... # Do these still exist? try: return struct.unpack("!Q", t.payload)[0] except: pass return None originatorDPID = lookInSysDesc() if originatorDPID == None: # We'll look in the CHASSIS ID if lldph.tlvs[0].subtype == pkt.chassis_id.SUB_LOCAL: if lldph.tlvs[0].id.startswith('dpid:'): # This is how NOX does it at the time of writing try: originatorDPID = int(lldph.tlvs[0].id[5:], 16) except: pass if originatorDPID == None: if lldph.tlvs[0].subtype == pkt.chassis_id.SUB_MAC: # Last ditch effort -- we'll hope the DPID was small enough # to fit into an ethernet address if len(lldph.tlvs[0].id) == 6: try: s = lldph.tlvs[0].id originatorDPID = struct.unpack("!Q",'\x00\x00' + s)[0] except: pass if originatorDPID == None: log.warning("Couldn't find a DPID in the LLDP packet") return EventHalt if originatorDPID not in core.openflow.connections: log.info('Received LLDP packet from unknown switch') return EventHalt # Get port number from port TLV if lldph.tlvs[1].subtype != pkt.port_id.SUB_PORT: log.warning("Thought we found a DPID, but packet didn't have a port") return EventHalt originatorPort = None if lldph.tlvs[1].id.isdigit(): # We expect it to be a decimal value originatorPort = int(lldph.tlvs[1].id) elif len(lldph.tlvs[1].id) == 2: # Maybe it's a 16 bit port number... try: originatorPort = struct.unpack("!H", lldph.tlvs[1].id)[0] except: pass if originatorPort is None: log.warning("Thought we found a DPID, but port number didn't " + "make sense") return EventHalt if (event.dpid, event.port) == (originatorDPID, originatorPort): log.warning("Port received its own LLDP packet; ignoring") return EventHalt link = Discovery.Link(originatorDPID, originatorPort, event.dpid, event.port) if link not in self.adjacency: self.adjacency[link] = time.time() log.info('link detected: %s', link) self.raiseEventNoErrors(LinkEvent, True, link) else: # Just update timestamp self.adjacency[link] = time.time() return EventHalt # Probably nobody else needs this event