def __decodeCustomType(self, itemType): cls = self.customTypes.get(itemType, None) obj = cls() fields = cls.fields io = self.__stream valueMap = varint.decode_stream(io) offset = 1 for k, t in fields.iteritems(): if valueMap & offset: ftype = t.type if ftype == FieldTypes.VARINT: value = varint.decode_stream(io) elif ftype == FieldTypes.APPLY_AREA_ENUM: value = varint.decode_stream(io) elif ftype & FieldTypes.TYPED_ARRAY: value = self.__decodeArray(ftype ^ FieldTypes.TYPED_ARRAY) elif ftype >= FieldTypes.CUSTOM_TYPE_OFFSET: value = self.__decodeCustomType( ftype / FieldTypes.CUSTOM_TYPE_OFFSET) else: raise SerializationException( 'Unsupported field type index') if not t.deprecated or hasattr(obj, k): setattr(obj, k, value) offset <<= 1 return obj
def __decodeCustomType(self, itemType, path=None, wanted=None): cls = self.customTypes.get(itemType, None) if wanted is None: obj = cls() else: obj = None fields = cls.fields io = self.__stream valueMap = varint.decode_stream(io) offset = 1 for k, t in fields.iteritems(): if t.xmlOnly: continue next = None if not path or path[0] != k else path[1] if valueMap & offset: ftype = t.type if ftype == FieldTypes.VARINT: value = varint.decode_stream(io) elif ftype == FieldTypes.APPLY_AREA_ENUM: value = varint.decode_stream(io) elif ftype & FieldTypes.TYPED_ARRAY: value = self.__decodeArray(ftype ^ FieldTypes.TYPED_ARRAY, k, path, next, wanted) elif ftype >= FieldTypes.CUSTOM_TYPE_OFFSET: value = self.__decodeCustomType(ftype / FieldTypes.CUSTOM_TYPE_OFFSET, next, wanted) else: raise SerializationException('Unsupported field type index') if not t.deprecated or hasattr(obj, k) or obj is None: if wanted is None: setattr(obj, k, value) elif path and path[1] is None and path[0] == k and value == wanted: raise FoundItemException() offset <<= 1 return obj
def __decodeArray(self, itemType): n = varint.decode_stream(self.__stream) if itemType == FieldTypes.VARINT: return [varint.decode_stream(self.__stream) for _ in xrange(n)] if itemType >= FieldTypes.CUSTOM_TYPE_OFFSET: customType = itemType / FieldTypes.CUSTOM_TYPE_OFFSET return [self.__decodeCustomType(customType) for _ in xrange(n)] raise SerializationException('Unsupported item type')
def ParseLogFile(fLog): try: with open(fLog, "rb") as f: logBytes = f.read() except Exception as e: print("[E] Can't read file " + fLog + " (" + str(e) + ")") return None print('Dump file: ' + fLog) print('') idx = 0 kvPair = dict() while (idx < len(logBytes)): crcBytes = logBytes[idx:idx + 4] size = (logBytes[idx + 5] << 8) + logBytes[idx + 4] type = logBytes[6] blockBytes = logBytes[idx + 7:idx + 7 + size] seq = int.from_bytes(blockBytes[:8], 'little') count = int.from_bytes(blockBytes[8:(8 + 4)], 'little') print('Log Block at ' + str(idx)) print(' TYPE = ' + str(type) + ', SEQ = ' + str(seq) + ', COUNT = ' + str(count)) stream = io.BytesIO(blockBytes[12:]) for i in range(count): st = stream.read(1)[0] keyLen = varint.decode_stream(stream) keyBytes = stream.read(keyLen) if st == 1: valLen = varint.decode_stream(stream) valBytes = stream.read(valLen) else: valLen = 0 valBytes = b'' kvPair[keyBytes] = [st, seq, valBytes] seq = seq + 1 if st == 1: print(' [O] KEY = ' + keyBytes.decode('utf-8')) else: print(' [X] KEY = ' + keyBytes.decode('utf-8')) if len(valBytes) > 0: if valBytes[0] == 1: print(' VAL = ' + valBytes[1:].decode('utf-8')) else: print(' VAL = ' + ''.join('{:02x}'.format(x) for x in valBytes)) else: print(' VAL = <None>') idx = idx + 7 + size print('') return kvPair
def __decodeArray(self, itemType, k, path, next, wanted): n = varint.decode_stream(self.__stream) if itemType == FieldTypes.VARINT: array = [ varint.decode_stream(self.__stream) for _ in xrange(n) ] if path and path[1] is None and path[0] == k and wanted in array: raise FoundItemException() return array elif itemType >= FieldTypes.CUSTOM_TYPE_OFFSET: customType = itemType / FieldTypes.CUSTOM_TYPE_OFFSET return [ self.__decodeCustomType(customType, next, wanted) for _ in xrange(n) ] else: raise SerializationException('Unsupported item type') return
def varint_decoder(buffer: bytes) -> List[int]: if not isinstance(buffer, bytes): raise TypeError('buffer must be a bytes object, not {}'.format( type(buffer))) stream = io.BytesIO(buffer) result = [] while stream.tell() < len(buffer): result.append(varint.decode_stream(stream)) return result
def decode(self, data): self.__stream = StringIO(data) try: code = varint.decode_stream(self.__stream) obj = self.__decodeCustomType(code) except EOFError: raise SerializationException('Cannot parse given stream') return obj
def read(self): l = varint.decode_stream(self.stream) try: data = self.stream.read(l) except Exception as e: print(e, sys.stderr) return data
def DumpBlock(title, subtitle, kvp, ldbBytes): print(title + ':') kvPair = dict() for k in kvp: streamVal = io.BytesIO(kvp[k][IDX_VALUE]) loc = varint.decode_stream(streamVal) size = varint.decode_stream(streamVal) if kvp[k][IDX_KEY_ST] == 1: print(" [O] KEY = " + k.decode('utf-8')) else: print(" [X] KEY = " + k.decode('utf-8')) print(" LOC = " + str(loc) + ", SIZE = " + str(size) + ", SEQ=" + str(kvp[k][IDX_KEY_SEQ])) print('') for k in kvp: streamVal = io.BytesIO(kvp[k][IDX_VALUE]) loc = varint.decode_stream(streamVal) size = varint.decode_stream(streamVal) print(subtitle + ' at ' + str(loc)) kvpBlock = ParseBlock(ldbBytes[loc:(loc + size)], ldbBytes[loc + size], ldbBytes[loc + size:loc + size + 4]) for blkKey in kvpBlock: kvPair[blkKey] = kvpBlock[blkKey] if kvpBlock[blkKey][IDX_KEY_ST] == 1: print(' [O] KEY = ' + blkKey.decode('utf-8')) else: print(' [X] KEY = ' + blkKey.decode('utf-8')) if len(kvpBlock[blkKey][IDX_VALUE]) > 0: if kvpBlock[blkKey][IDX_VALUE][0] == 1: print(' VAL = ' + kvpBlock[blkKey][IDX_VALUE][1:].decode('utf-8')) else: print(' VAL = ' + ''.join('{:02x}'.format(x) for x in kvpBlock[blkKey][IDX_VALUE])) else: print(' VAL = <None>') print(' SEQ = ' + str(kvpBlock[blkKey][IDX_KEY_SEQ])) print('') return kvPair
def decode(multihash): """ Decode a hash from the given multihash :param bytes multihash: multihash :return: decoded :py:class:`multihash.Multihash` object :rtype: :py:class:`multihash.Multihash` :raises TypeError: if `multihash` is not of type `bytes` :raises ValueError: if the length of multihash is less than 3 characters :raises ValueError: if the code is invalid :raises ValueError: if the length is invalid :raises ValueError: if the length is not same as the digest """ if not isinstance(multihash, bytes): raise TypeError('multihash should be bytes, not {}', type(multihash)) if len(multihash) < 3: raise ValueError('multihash must be greater than 3 bytes.') buffer = BytesIO(multihash) try: code = varint.decode_stream(buffer) except TypeError: raise ValueError('Invalid varint provided') if not is_valid_code(code): raise ValueError('Unsupported hash code {}'.format(code)) try: length = varint.decode_stream(buffer) except TypeError: raise ValueError('Invalid length provided') buf = buffer.read() if len(buf) != length: raise ValueError('Inconsistent multihash length {} != {}'.format( len(buf), length)) return Multihash(code=code, name=constants.CODE_HASHES.get(code, code), length=length, digest=buf)
def hasItem(self, data, path, value): self.__stream = StringIO(data) try: code = varint.decode_stream(self.__stream) self.__decodeCustomType(code, path, value) except EOFError: raise SerializationException('Cannot parse given stream') except FoundItemException: return True return False
def decode(self, data): # Decode the varuint prefix off the data first, then smash the remaining # data into the decode buffer and reset it to read any previous tail. prefix_stream = io.BytesIO(data) decoded_bytes = varint.decode_stream(prefix_stream) self._decoder_buffer.write(data[prefix_stream.tell():]) self._decoder_buffer.seek(0) decoded_data = self._decoder.decompress( self._decoder_buffer.getbuffer(), decoded_bytes) reset_buffer(self._decoder_buffer, self._decoder.unconsumed_tail) return decoded_data.decode('utf-8')
def ParseBlock(blockBytes, compressed, crcBytes): if compressed == 1: blockBytes = snappy.uncompress(blockBytes) kvPair = dict() try: numRestarts = blockBytes[-1] stream2 = io.BytesIO(blockBytes[:-1 * (1 + 4 * numRestarts)]) bContinue = True curKey = '' while (bContinue): sharedKeyLen = varint.decode_stream(stream2) inlineKeyLen = varint.decode_stream(stream2) valueLen = varint.decode_stream(stream2) inlineKey = stream2.read(inlineKeyLen) valData = stream2.read(valueLen) if len(inlineKey) >= 8: keyName = inlineKey[:-8] keySequence = int.from_bytes(inlineKey[-7:], 'little') keySt = inlineKey[-8] if sharedKeyLen != 0: curKey = curKey[:sharedKeyLen] + keyName else: curKey = keyName kvPair[curKey] = [keySt, keySequence, valData] if (keySequence == 0xffffffffffffff): bContinue = False if inlineKeyLen == 0 and valueLen == 0: bContinue = False except Exception as e: print("ParseBlock exception: " + str(e)) return kvPair
def ParseLdbFile(fLdb): try: with open(fLdb, "rb") as f: ldbBytes = f.read() except Exception as e: print("[E] Can't read file " + fLdb + " (" + str(e) + ")") return None if ldbBytes[-8:] != LDB_FOOTER_BYTES: print("[E] Not a valid LDB file: can't find footer!") return None print('Dump file: ' + fLdb) print('') stream = io.BytesIO(ldbBytes[-48:]) metaIndexLoc = varint.decode_stream(stream) metaIndexSize = varint.decode_stream(stream) indexBlockLoc = varint.decode_stream(stream) indexBlockSize = varint.decode_stream(stream) kvp = ParseBlock( ldbBytes[metaIndexLoc:(metaIndexLoc + metaIndexSize)], ldbBytes[metaIndexLoc + metaIndexSize], ldbBytes[metaIndexLoc + metaIndexSize:metaIndexLoc + metaIndexSize + 4]) DumpBlock('Meta Index Block', 'Meta Block', kvp, ldbBytes) kvp = ParseBlock( ldbBytes[indexBlockLoc:(indexBlockLoc + indexBlockSize)], ldbBytes[indexBlockLoc + indexBlockSize], ldbBytes[indexBlockLoc + indexBlockSize:indexBlockLoc + indexBlockSize + 4]) blkkvp = DumpBlock('Index Block', 'Data Block', kvp, ldbBytes) return blkkvp
def bytes_iter(buf): buf_io = io.BytesIO(buf) while buf_io.tell() < len(buf): offset = buf_io.tell() code = varint.decode_stream(buf_io) proto = None try: proto = protocol_with_code(code) codec = codec_by_name(proto.codec) except (ImportError, exceptions.ProtocolNotFoundError) as exc: raise exceptions.BinaryParseError( "Unknown Protocol", buf, proto.name if proto else code, ) from exc size = size_for_addr(codec, buf_io) yield offset, proto, codec, buf_io.read(size)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s = ssl.wrap_socket(s) s.connect((HOST, PORT)) s.send(bytes([41])) s.send(bytes([2])) s.send(varint.encode(len(x))) s.send(x) print("reading") version = s.recv(1) while True: responseTag = s.recv(1) if responseTag in [b'\x03', b'\x07', b'\x08']: length = varint.decode_stream(s) msg = s.recv(length) if responseTag == b'\x03': lresp = mcs_pb2.LoginResponse() lresp.ParseFromString(msg) print("RECV LOGIN RESP") print(lresp) elif responseTag == b'\x07': iqs = mcs_pb2.IqStanza() iqs.ParseFromString(msg) print("RECV IQ") print(iqs) elif responseTag == b'\x08': dms = mcs_pb2.DataMessageStanza() dms.ParseFromString(msg)
def read_varint(self): return varint.decode_stream(self)
def receive_verification_from_gcm(self, retry=True): # Return the last verification_code that we receive. # Note: We cannot return on the first verification_code because the server sometimes sends # the same code twice. self._establish_connection() verification_data = None try: while True: # Sometimes the server sends a response_tag and length but doesn't send the actual content, # so we need to remeber them and read just the content on the next call. if not self.responseTag: self.responseTag = ord(self._rcv_exact(1)) self.length = varint.decode_stream(self.sock) msg = self._rcv_exact(self.length) self.counter += 1 if self.responseTag == 3: pass # login elif self.responseTag == 4: raise Exception("socket closed by server") elif self.responseTag == 8: dms = mcs_pb2.DataMessageStanza() dms.ParseFromString(msg) message_type, data = "", None for app_data in dms.app_data: if app_data.key == "message_type_id": message_type = app_data.value elif app_data.key == "payload": data = app_data.value if dms.category == "com.tellm.android.app" and message_type == "16": verification_data = data self.responseTag, self.length = 0, 0 except socket.timeout: self._gcm_send_heartbeat() except Exception: # maybe the socket was closed because we timed out in between calls or # the connection was interrupted. We close the socket and try to reopen. try: self.sock.close() except: pass self.sock = None if retry: return self.receive_verification_from_gcm(False) else: raise try: d = json.loads(verification_data) return d except Exception as e: raise_from(GcmException("No verification_code received"), None)
def size_for_addr(codec, buf_io): if codec.SIZE >= 0: return codec.SIZE // 8 else: return varint.decode_stream(buf_io)
def decode(self, data): self.__stream = StringIO(data) code = varint.decode_stream(self.__stream) obj = self.__decodeCustomType(code) return obj