def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if payload_size<0: if read_buffer[0] not in ("P", ord("P")): return self._call_connection_lost("invalid packet header byte: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl<8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: _, protocol_flags, compression_level, packet_index, data_size = unpack_header(read_buffer[:8]) except Exception, e: raise Exception("failed to parse packet header: %s" % list(read_buffer[:8]), e) read_buffer = read_buffer[8:] bl = len(read_buffer) if protocol_flags & Protocol.FLAGS_CIPHER: assert self.cipher_in_block_size>0, "received cipher block but we don't have a cipher do decrypt it with" padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size>0 if payload_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check>self.max_packet_size: return self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl<payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: assert data.endswith(padding), "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)" % (padding, list(bytearray(raw_string)), type(raw_string), list(bytearray(data)), type(data)) data = data[:-len(padding)] #uncompress if needed: if compression_level>0: if self.chunked_compression: data = decompress(data) else: data = self._decompressor.decompress(data) if sys.version>='3': data = data.decode("latin1") if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost("unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets)>=4: return self._call_connection_lost("too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(data)) else: packet, l = bdecode(data) assert l==len(data) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet_cb(self, [Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost("gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(data), packet_index, payload_size, bl, e)) gobject.idle_add(gibberish, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} try: self._process_packet_cb(self, packet) self.input_packetcount += 1 except KeyboardInterrupt: raise except: log.warn("Unhandled error while processing a '%s' packet from peer", packet[0], exc_info=True)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if payload_size < 0: if read_buffer[0] not in ("P", ord("P")): return self._call_connection_lost( "invalid packet header byte: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: _, protocol_flags, compression_level, packet_index, data_size = unpack_header( read_buffer[:8]) except Exception, e: raise Exception( "failed to parse packet header: %s" % list(read_buffer[:8]), e) read_buffer = read_buffer[8:] bl = len(read_buffer) if protocol_flags & Protocol.FLAGS_CIPHER: assert self.cipher_in_block_size > 0, "received cipher block but we don't have a cipher do decrypt it with" padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check > self.max_packet_size: return self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: assert data.endswith( padding ), "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)" % ( padding, list(bytearray(raw_string)), type(raw_string), list( bytearray(data)), type(data)) data = data[:-len(padding)] #uncompress if needed: if compression_level > 0: if self.chunked_compression: data = decompress(data) else: data = self._decompressor.decompress(data) if sys.version >= '3': data = data.decode("latin1") if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost( "unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: return self._call_connection_lost( "too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(data)) else: packet, l = bdecode(data) assert l == len(data) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet_cb(self, [Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(data), packet_index, payload_size, bl, e)) gobject.idle_add(gibberish, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} try: self._process_packet_cb(self, packet) self.input_packetcount += 1 except KeyboardInterrupt: raise except: log.warn( "Unhandled error while processing a '%s' packet from peer", packet[0], exc_info=True)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None current_packet_size = -1 packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if current_packet_size<0: if read_buffer[0] not in ["P", ord("P")]: return self._call_connection_lost("invalid packet header: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl<8: break #packet still too small if read_buffer[1] in ["S", ord("S")]: #old packet format: "PS%02d%012d" - 16 bytes #can be dropped when we drop compatibility with clients older than 0.5 #0.3 and 0.4 still send the initial "hello" packet using this old format.. if bl<16: break current_packet_size = int(read_buffer[2:16]) packet_index = 0 compression_level = 0 protocol_version = 0 #only bencode supported with old protocol read_buffer = read_buffer[16:] else: #packet format: struct.pack('cBBBL', ...) - 8 bytes try: (_, protocol_version, compression_level, packet_index, current_packet_size) = struct.unpack_from('!cBBBL', read_buffer) except Exception, e: raise Exception("invalid packet format: %s", e) read_buffer = read_buffer[8:] bl = len(read_buffer) if current_packet_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check>self.max_packet_size: return self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, current_packet_size, read_buffer[:32]) if current_packet_size>0 and bl<current_packet_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==current_packet_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:current_packet_size] read_buffer = read_buffer[current_packet_size:] if compression_level>0: if self.chunked_compression: raw_string = zlib.decompress(raw_string) else: raw_string = self._decompressor.decompress(raw_string) if sys.version>='3': raw_string = raw_string.decode("latin1") if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = raw_string current_packet_size = -1 packet_index = 0 continue result = None try: #final packet (packet_index==0), decode it: if protocol_version==0: result = bdecode(raw_string) if result is None: break packet, l = result assert l==len(raw_string) elif protocol_version==1: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(raw_string)) else: raise Exception("unsupported protocol version: %s" % protocol_version) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost("gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(raw_string), packet_index, current_packet_size, bl, e)) gobject.idle_add(gibberish, raw_string) return if self._closed: return current_packet_size = -1 #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} gobject.idle_add(self._process_packet, packet)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None current_packet_size = -1 packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if current_packet_size < 0: if read_buffer[0] not in ["P", ord("P")]: return self._call_connection_lost( "invalid packet header: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: (_, protocol_version, compression_level, packet_index, current_packet_size) = struct.unpack_from( '!cBBBL', read_buffer) except Exception, e: raise Exception( "invalid packet header: %s" % list(read_buffer[:8]), e) read_buffer = read_buffer[8:] bl = len(read_buffer) if current_packet_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check > self.max_packet_size: return self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, current_packet_size, read_buffer[:32]) if current_packet_size > 0 and bl < current_packet_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == current_packet_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:current_packet_size] read_buffer = read_buffer[current_packet_size:] if compression_level > 0: if self.chunked_compression: raw_string = zlib.decompress(raw_string) else: raw_string = self._decompressor.decompress(raw_string) if sys.version >= '3': raw_string = raw_string.decode("latin1") if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = raw_string current_packet_size = -1 packet_index = 0 continue result = None try: #final packet (packet_index==0), decode it: if protocol_version == 0: result = bdecode(raw_string) if result is None: break packet, l = result assert l == len(raw_string) elif protocol_version == 1: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(raw_string)) else: raise Exception("unsupported protocol version: %s" % protocol_version) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet_cb(self, [Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(raw_string), packet_index, current_packet_size, bl, e)) gobject.idle_add(gibberish, raw_string) return if self._closed: return current_packet_size = -1 #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} try: self._process_packet_cb(self, packet) self.input_packetcount += 1 except KeyboardInterrupt: raise except: log.warn( "Unhandled error while processing packet from peer", exc_info=True) NOYIELD or time.sleep(0)