def decompress(data, level): #log.info("decompress(%s bytes, %s) type=%s", len(data), get_compression_type(level)) if level & LZ4_FLAG: if not has_lz4: raise InvalidCompressionException("lz4 is not available") if not use_lz4: raise InvalidCompressionException("lz4 is not enabled") size = LZ4_HEADER.unpack_from(data[:4])[0] #TODO: it would be better to use the max_size we have in protocol, #but this hardcoded value will do for now if size > MAX_SIZE: raise Exception( "uncompressed data is too large: %iMB, limit is %iMB" % (size // 1024 // 1024, MAX_SIZE // 1024 // 1024)) return LZ4_uncompress(data) elif level & LZO_FLAG: if not has_lzo: raise InvalidCompressionException("lzo is not available") if not use_lzo: raise InvalidCompressionException("lzo is not enabled") return LZO_decompress(data) else: if not use_zlib: raise InvalidCompressionException("zlib is not enabled") if isinstance(data, memoryview): data = data.tobytes() return zlib.decompress(data)
def do_test_rountrip_method(compress, i_data, c_data=None): from lz4 import LZ4_uncompress #@UnresolvedImport c = compress(i_data) if c_data is not None: assert c_data == c, "expected compressed data to look like %s, but got %s" % ( hl(c_data), hl(c)) d = LZ4_uncompress(c) assert d == i_data, "expected decompressed data to look like original %s, but got %s" % ( hl(i_data), hl(d))
def decompress(data, level): #log.info("decompress(%s bytes, %s) type=%s", len(data), get_compression_type(level)) if level & LZ4_FLAG: if not has_lz4: raise InvalidCompressionException("lz4 is not available") if not use_lz4: raise InvalidCompressionException("lz4 is not enabled") return LZ4_uncompress(data) elif level & LZO_FLAG: if not has_lzo: raise InvalidCompressionException("lzo is not available") if not use_lzo: raise InvalidCompressionException("lzo is not enabled") return LZO_decompress(data) else: if not use_zlib: raise InvalidCompressionException("zlib is not enabled") return zlib.decompress(data)
def decompress(data, level): #log.info("decompress(%s bytes, %s) type=%s", len(data), get_compression_type(level)) if level & LZ4_FLAG: if not has_lz4: raise InvalidCompressionException("lz4 is not available") if not use_lz4: raise InvalidCompressionException("lz4 is not enabled") size = LZ4_HEADER.unpack_from(data[:4])[0] #TODO: it would be better to use the max_size we have in protocol, #but this hardcoded value will do for now assert size <= (256 * 1024 * 1024) return LZ4_uncompress(data) elif level & LZO_FLAG: if not has_lzo: raise InvalidCompressionException("lzo is not available") if not use_lzo: raise InvalidCompressionException("lzo is not enabled") return LZO_decompress(data) else: if not use_zlib: raise InvalidCompressionException("zlib is not enabled") return zlib.decompress(data)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. Concatenate the raw packet data, then try to parse it. Extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done via the callback process_packet_cb, this will be called from this parsing thread so any calls that need to be made from the UI thread will need to use a callback (usually via 'idle_add') """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") self.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if payload_size < 0: head = read_buffer[:8] if read_buffer[0] not in ("P", ord("P")): self._invalid_header(read_buffer) return if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes _, protocol_flags, compression_level, packet_index, data_size = unpack_header( head) #sanity check size (will often fail if not an xpra client): if data_size > self.abs_max_packet_size: self._invalid_header(read_buffer) return bl = len(read_buffer) - 8 if protocol_flags & Protocol.FLAGS_CIPHER: if self.cipher_in_block_size == 0 or not self.cipher_in_name: log.warn( "received cipher block but we don't have a cipher to decrypt it with, not an xpra client?" ) self._invalid_header(read_buffer) return padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 read_buffer = read_buffer[8:] if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if not self._closed: log("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check > self.max_packet_size: self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: 0x%s), dropping this connection!" % (size_to_check, self.max_packet_size, repr_ellipsized(packet_header))) return False self.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log( "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data)) self._connection_lost( "encryption error (wrong key?)") return data = data[:-len(padding)] #uncompress if needed: if compression_level > 0: try: if compression_level & LZ4_FLAG: assert use_lz4 data = LZ4_uncompress(data) else: data = decompress(data) except Exception, e: if self.cipher_in: return self._call_connection_lost( "decompression failed (invalid encryption key?): %s" % e) return self._call_connection_lost( "decompression failed: %s" % e) if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost( "unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: return self._call_connection_lost( "too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet! not an xpra client?" packet = list(rencode_loads(data)) else: #if sys.version>='3': # data = data.decode("latin1") packet, l = bdecode(data) assert l == len(data) except ValueError, e: log.error("value error reading packet: %s", e, exc_info=True) if self._closed: return log("failed to parse packet: %s", binascii.hexlify(data)) msg = "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % ( repr_ellipsized(data), packet_index, payload_size, bl, e) self.gibberish(msg, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} packet_type = packet[0] if self.receive_aliases and type( packet_type ) == int and packet_type in self.receive_aliases: packet_type = self.receive_aliases.get(packet_type) packet[0] = packet_type self.input_stats[packet_type] = self.output_stats.get( packet_type, 0) + 1 self.input_packetcount += 1 log("processing packet %s", packet_type) self._process_packet_cb(self, packet)