def test_roundtrip(self): print("hello") for encode_flag in (FLAGS_BENCODE, FLAGS_RENCODE, FLAGS_YAML): for comp_flag in (ZLIB_FLAG, LZ4_FLAG, LZO_FLAG, BROTLI_FLAG): for cipher in (0, FLAGS_CIPHER): for level in (0, 1, 10): for index in (0, 1, 255): for size in (0, 1, 2**8, 2**16, 2**24): proto_flags = encode_flag | comp_flag | cipher header = pack_header(proto_flags, level, index, size) assert header uproto_flags, ulevel, uindex, usize = unpack_header( header)[1:] assert uproto_flags == proto_flags assert ulevel == level assert uindex == index assert usize == size for i in range(0, len(header) - 1): try: unpack_header(header[:i]) except Exception: pass else: raise Exception( "header unpacking should have failed for size %i" % i)
def guess_packet_type(data): if not data: return None if data[0] == ord("P"): from xpra.net.header import ( unpack_header, HEADER_SIZE, FLAGS_RENCODE, FLAGS_YAML, LZ4_FLAG, BROTLI_FLAG, ) header = data.ljust(HEADER_SIZE, b"\0") _, protocol_flags, compression_level, packet_index, data_size = unpack_header( header) #this normally used on the first packet, so the packet index should be 0 #and I don't think we can make packets smaller than 8 bytes, #even with packet name aliases and rencode #(and aliases should not be defined for the initial packet anyway) if packet_index == 0 and 8 < data_size < 256 * 1024 * 1024: rencode = bool(protocol_flags & FLAGS_RENCODE) yaml = bool(protocol_flags & FLAGS_YAML) lz4 = bool(protocol_flags & LZ4_FLAG) brotli = bool(protocol_flags & BROTLI_FLAG) def is_xpra(): compressors = sum((lz4, brotli)) #only one compressor can be enabled: if compressors > 1: return False if compressors == 1 and compression_level <= 0: #if compression is enabled, the compression level must be set: return False if rencode and yaml: #rencode and yaml are mutually exclusive: return False return True if is_xpra(): return "xpra" if data[:4] == b"SSH-": return "ssh" if data[0] == 0x16: return "ssl" if data[:4] == b"RFB ": return "vnc" line1 = data.splitlines()[0] if line1.find(b"HTTP/") > 0 or line1.split(b" ")[0] in (b"GET", b"POST"): return "http" if line1.lower().startswith(b"<!doctype html") or line1.lower().startswith( b"<html"): return "http" return None
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. Concatenate the raw packet data, then try to parse it. Extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done via the callback process_packet_cb, this will be called from this parsing thread so any calls that need to be made from the UI thread will need to use a callback (usually via 'idle_add') """ read_buffer = None payload_size = -1 padding_size = 0 packet_index = 0 compression_level = False packet = None raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("parse thread: empty marker, exiting") self.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: packet = None bl = len(read_buffer) if bl <= 0: break if payload_size < 0: if read_buffer[0] not in ("P", ord("P")): self._invalid_header(read_buffer) return if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes _, protocol_flags, compression_level, packet_index, data_size = unpack_header( read_buffer[:8]) #sanity check size (will often fail if not an xpra client): if data_size > self.abs_max_packet_size: self._invalid_header(read_buffer) return bl = len(read_buffer) - 8 if protocol_flags & FLAGS_CIPHER: if self.cipher_in_block_size == 0 or not self.cipher_in_name: cryptolog.warn( "received cipher block but we don't have a cipher to decrypt it with, not an xpra client?" ) self._invalid_header(read_buffer) return padding_size = self.cipher_in_block_size - ( data_size % self.cipher_in_block_size) payload_size = data_size + padding_size else: #no cipher, no padding: padding_size = 0 payload_size = data_size assert payload_size > 0, "invalid payload size: %i" % payload_size read_buffer = read_buffer[8:] if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if self._closed: return False log("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check > self.max_packet_size: msg = "packet size requested is %s but maximum allowed is %s" % \ (size_to_check, self.max_packet_size) self.invalid(msg, packet_header) return False self.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & FLAGS_CIPHER: cryptolog("received %i %s encrypted bytes with %s padding", payload_size, self.cipher_in_name, padding_size) data = self.cipher_in.decrypt(raw_string) if padding_size > 0: def debug_str(s): try: return binascii.hexlify(bytearray(s)) except: return csv(list(str(s))) # pad byte value is number of padding bytes added padtext = pad(self.cipher_in_padding, padding_size) if data.endswith(padtext): cryptolog("found %s %s padding", self.cipher_in_padding, self.cipher_in_name) else: actual_padding = data[-padding_size:] cryptolog.warn( "Warning: %s decryption failed: invalid padding", self.cipher_in_name) cryptolog( " data does not end with %s padding bytes %s", self.cipher_in_padding, debug_str(padtext)) cryptolog(" but with %s (%s)", debug_str(actual_padding), type(data)) cryptolog(" decrypted data: %s", debug_str(data[:128])) return self._internal_error( "%s encryption padding error - wrong key?" % self.cipher_in_name) data = data[:-padding_size] #uncompress if needed: if compression_level > 0: try: data = decompress(data, compression_level) except InvalidCompressionException as e: self.invalid("invalid compression: %s" % e, data) return except Exception as e: ctype = compression.get_compression_type( compression_level) log("%s packet decompression failed", ctype, exc_info=True) msg = "%s packet decompression failed" % ctype if self.cipher_in: msg += " (invalid encryption key?)" else: #only include the exception text when not using encryption #as this may leak crypto information: msg += " %s" % e return self.gibberish(msg, data) if self.cipher_in and not (protocol_flags & FLAGS_CIPHER): self.invalid("unencrypted packet dropped", data) return if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: self.invalid( "too many raw packets: %s" % len(raw_packets), data) return continue #final packet (packet_index==0), decode it: try: packet = decode(data, protocol_flags) except InvalidPacketEncodingException as e: self.invalid("invalid packet encoding: %s" % e, data) return except ValueError as e: etype = packet_encoding.get_packet_encoding_type( protocol_flags) log.error("Error parsing %s packet:", etype) log.error(" %s", e) if self._closed: return log("failed to parse %s packet: %s", etype, binascii.hexlify(data[:128])) log(" %s", e) log(" data: %s", repr_ellipsized(data)) log(" packet index=%i, packet size=%i, buffer size=%s", packet_index, payload_size, bl) self.gibberish("failed to parse %s packet" % etype, data) return if self._closed: return payload_size = -1 padding_size = 0 #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} packet_type = packet[0] if self.receive_aliases and type( packet_type ) == int and packet_type in self.receive_aliases: packet_type = self.receive_aliases.get(packet_type) packet[0] = packet_type self.input_stats[packet_type] = self.output_stats.get( packet_type, 0) + 1 self.input_packetcount += 1 log("processing packet %s", packet_type) self._process_packet_cb(self, packet) packet = None INJECT_FAULT(self)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. Concatenate the raw packet data, then try to parse it. Extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done via the callback process_packet_cb, this will be called from this parsing thread so any calls that need to be made from the UI thread will need to use a callback (usually via 'idle_add') """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False packet = None raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") self.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: packet = None bl = len(read_buffer) if bl <= 0: break if payload_size < 0: head = read_buffer[:8] if read_buffer[0] not in ("P", ord("P")): self._invalid_header(read_buffer) return if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes _, protocol_flags, compression_level, packet_index, data_size = unpack_header( head) #sanity check size (will often fail if not an xpra client): if data_size > self.abs_max_packet_size: self._invalid_header(read_buffer) return bl = len(read_buffer) - 8 if protocol_flags & FLAGS_CIPHER: if self.cipher_in_block_size == 0 or not self.cipher_in_name: log.warn( "received cipher block but we don't have a cipher to decrypt it with, not an xpra client?" ) self._invalid_header(read_buffer) return padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 read_buffer = read_buffer[8:] if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if self._closed: return False log("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check > self.max_packet_size: msg = "packet size requested is %s but maximum allowed is %s" % \ (size_to_check, self.max_packet_size) self.invalid(msg, packet_header) return False self.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log( "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data)) self._internal_error( "encryption error (wrong key?)") return data = data[:-len(padding)] #uncompress if needed: if compression_level > 0: try: data = decompress(data, compression_level) except InvalidCompressionException, e: self.invalid("invalid compression: %s" % e, data) return except Exception, e: ctype = compression.get_compression_type( compression_level) log("%s packet decompression failed", ctype, exc_info=True) msg = "%s packet decompression failed" % ctype if self.cipher_in: msg += " (invalid encryption key?)" else: msg += " %s" % e return self.gibberish(msg, data) if self.cipher_in and not (protocol_flags & FLAGS_CIPHER): self.invalid("unencrypted packet dropped", data) return if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: self.invalid( "too many raw packets: %s" % len(raw_packets), data) return continue #final packet (packet_index==0), decode it: try: packet = decode(data, protocol_flags) except InvalidPacketEncodingException, e: self.invalid("invalid packet encoding: %s" % e, data) return
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. Concatenate the raw packet data, then try to parse it. Extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done via the callback process_packet_cb, this will be called from this parsing thread so any calls that need to be made from the UI thread will need to use a callback (usually via 'idle_add') """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False packet = None raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") self.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: packet = None bl = len(read_buffer) if bl <= 0: break if payload_size < 0: if read_buffer[0] not in ("P", ord("P")): self._invalid_header(read_buffer) return if bl < 8: break # packet still too small # packet format: struct.pack('cBBBL', ...) - 8 bytes _, protocol_flags, compression_level, packet_index, data_size = unpack_header(read_buffer[:8]) # sanity check size (will often fail if not an xpra client): if data_size > self.abs_max_packet_size: self._invalid_header(read_buffer) return bl = len(read_buffer) - 8 if protocol_flags & FLAGS_CIPHER: if self.cipher_in_block_size == 0 or not self.cipher_in_name: log.warn( "received cipher block but we don't have a cipher to decrypt it with, not an xpra client?" ) self._invalid_header(read_buffer) return padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: # no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 read_buffer = read_buffer[8:] if payload_size > self.max_packet_size: # this packet is seemingly too big, but check again from the main UI thread # this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if self._closed: return False log( "check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size, ) if size_to_check > self.max_packet_size: msg = "packet size requested is %s but maximum allowed is %s" % ( size_to_check, self.max_packet_size, ) self.invalid(msg, packet_header) return False self.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break # chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = "" else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] # decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log( "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data), ) self._internal_error("encryption error (wrong key?)") return data = data[: -len(padding)] # uncompress if needed: if compression_level > 0: try: data = decompress(data, compression_level) except InvalidCompressionException as e: self.invalid("invalid compression: %s" % e, data) return except Exception as e: ctype = compression.get_compression_type(compression_level) log("%s packet decompression failed", ctype, exc_info=True) msg = "%s packet decompression failed" % ctype if self.cipher_in: msg += " (invalid encryption key?)" else: msg += " %s" % e return self.gibberish(msg, data) if self.cipher_in and not (protocol_flags & FLAGS_CIPHER): self.invalid("unencrypted packet dropped", data) return if self._closed: return if packet_index > 0: # raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: self.invalid("too many raw packets: %s" % len(raw_packets), data) return continue # final packet (packet_index==0), decode it: try: packet = decode(data, protocol_flags) except InvalidPacketEncodingException as e: self.invalid("invalid packet encoding: %s" % e, data) return except ValueError as e: etype = packet_encoding.get_packet_encoding_type(protocol_flags) log.error("failed to parse %s packet: %s", etype, e, exc_info=not self._closed) if self._closed: return log("failed to parse %s packet: %s", etype, binascii.hexlify(data)) msg = "packet index=%s, packet size=%s, buffer size=%s, error=%s" % ( packet_index, payload_size, bl, e, ) self.gibberish("failed to parse %s packet" % etype, data) return if self._closed: return payload_size = -1 padding = None # add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): # replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} packet_type = packet[0] if self.receive_aliases and type(packet_type) == int and packet_type in self.receive_aliases: packet_type = self.receive_aliases.get(packet_type) packet[0] = packet_type self.input_stats[packet_type] = self.output_stats.get(packet_type, 0) + 1 self.input_packetcount += 1 log("processing packet %s", packet_type) self._process_packet_cb(self, packet) packet = None