def te(s, exc): print(s) try: bdecode(s) except exc: pass else: assert False, "didn't raise exception"
def t(s, value, remainder): print(s) # Test "one-shot": rv, rr = bdecode(s) assert rv == value, "expected value %s but got %s" % (rv, value) rrstr = s[rr:] assert rrstr == remainder, "expected remainder value %s but got %s" % (remainder, rrstr) # With gibberish added: g_str = s + "asdf" rv, rr = bdecode(g_str) assert rv == value, "expected value %s but got %s" % (rv, value) rrstr = g_str[rr:] assert rrstr.endswith("asdf")
def t(v, encstr=None): be = bencode(v) print("bencode(%s)=%s" % (v, be)) if encstr: assert be==encstr restored = bdecode(be) print("decode(%s)=%s" % (be, restored)) list = restored[0] if len(list)!=len(v): print("MISMATCH!") print("v=%s" % v) print("l=%s" % list) assert len(list)==2 assert list[0]==v[0] for ok,ov in v[1].items(): d = list[1] if ok not in d: print("restored dict is missing %s" % ok) return list rv = d.get(ok) if rv!=ov: print("value for %s does not match: %s vs %s" % (ok, ov, rv)) return list return list
def test_large_dict(): import gtk.gdk from wimpiggy.lowlevel import get_keycode_mappings #@UnresolvedImport mappings = get_keycode_mappings(gtk.gdk.get_default_root_window()) b = bencode(mappings) print("bencode(%s)=%s" % (mappings, b)) d = bdecode(b) print("bdecode(%s)=%s" % (b, d))
def _read_parse_thread_loop(self): try: read_buffer = None current_packet_size = -1 while not self._closed: buf = self._read_queue.get() if not buf: return self._call_connection_lost( "empty marker in read queue") if self._decompressor is not None: buf = self._decompressor.decompress(buf) if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) if self.max_packet_size > 0 and bl > self.max_packet_size: return self._call_connection_lost( "read buffer too big: %s (maximum is %s), dropping this connection!" % (bl, self.max_packet_size)) while not self._closed: bl = len(read_buffer) if bl <= 0: break try: if current_packet_size < 0 and bl > 0 and read_buffer[ 0] in ["P", ord("P")]: #spotted packet size header if bl < 16: break #incomplete current_packet_size = int(read_buffer[2:16]) read_buffer = read_buffer[16:] bl = len(read_buffer) if current_packet_size > 0 and bl < current_packet_size: log.debug( "incomplete packet: only %s of %s bytes received", bl, current_packet_size) break if sys.version >= '3': result = bdecode(read_buffer.decode("latin1")) else: result = bdecode(read_buffer) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) #could be a partial packet (without size header) #or could be just a broken packet... def packet_error(buf, packet_size, data_size, value_error): if self._closed: return # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(buf), packet_size, data_size, value_error)) if current_packet_size > 0: #we had the size, so the packet should have been valid! packet_error(read_buffer, current_packet_size, bl, e) return else: #wait a little before deciding #unsized packets are either old clients (don't really care about them) #or hello packets (small-ish) def check_error_state(old_buffer, packet_size, data_size, value_error): log.info( "check_error_state old_buffer=%s, read buffer=%s", old_buffer, read_buffer) if old_buffer == read_buffer: packet_error(read_buffer, packet_size, data_size, value_error) log.info( "error parsing packet without a size header: %s, current_packet_size=%s, will check again in 1 second", e, current_packet_size) gobject.timeout_add(1000, check_error_state, read_buffer, current_packet_size, bl, e) break current_packet_size = -1 if result is None or self._closed: break packet, l = result gobject.idle_add(self._process_packet, packet) unprocessed = read_buffer[l:] if packet[0] == "set_deflate": had_deflate = (self._decompressor is not None) level = packet[1] log( "set_deflate packet, changing decompressor to level=%s", level) if level == 0: self._decompressor = None else: self._decompressor = zlib.decompressobj() if not had_deflate and (self._decompressor is not None): # deflate was just enabled: so decompress the unprocessed # data unprocessed = self._decompressor.decompress( unprocessed) read_buffer = unprocessed finally: log("read parse thread: ended")
def _read_parse_thread_loop(self): try: current_packet_size = -1 while not self._closed: buf = self._read_queue.get() if not buf: return self._call_connection_lost( "empty marker in read queue") if self._decompressor is not None: buf = self._decompressor.decompress(buf) if self._read_buffer: self._read_buffer = self._read_buffer + buf else: self._read_buffer = buf while not self._closed and len(self._read_buffer) > 0: had_deflate = (self._decompressor is not None) try: if current_packet_size < 0 and len( self._read_buffer ) > 0 and self._read_buffer[0] == "P": #spotted packet size header if len(self._read_buffer) < 16: break #incomplete current_packet_size = int(self._read_buffer[2:16]) self._read_buffer = self._read_buffer[16:] if current_packet_size > 0 and len( self._read_buffer) < current_packet_size: log.debug( "incomplete packet: only %s of %s bytes received", len(self._read_buffer), current_packet_size) break result = bdecode(self._read_buffer) except ValueError: #could be a partial packet (without size header) #or could be just a broken packet... def packet_error(buf): if self._closed: return # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s" % repr_ellipsized(buf)) if current_packet_size > 0: #we had the size, so the packet should have been valid! packet_error(self._read_buffer) return else: #wait a little before deciding #unsized packets are either old clients (don't really care about them) #or hello packets (small-ish) def check_error_state(old_buffer): if old_buffer == self._read_buffer: packet_error(self._read_buffer) gobject.timeout_add_seconds( 1000, check_error_state, self._read_buffer) break current_packet_size = -1 if result is None or self._closed: break packet, l = result gobject.idle_add(self._process_packet, packet) unprocessed = self._read_buffer[l:] if packet[0] == "set_deflate": level = packet[1] log( "set_deflate packet, changing decompressor to level=%s", level) if level == 0: self._decompressor = None else: self._decompressor = zlib.decompressobj() if not had_deflate and (self._decompressor is not None): # deflate was just enabled: so decompress the unprocessed # data unprocessed = self._decompressor.decompress( unprocessed) self._read_buffer = unprocessed finally: log("read parse thread: ended")
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None current_packet_size = -1 packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if current_packet_size<0: if read_buffer[0] not in ["P", ord("P")]: return self._call_connection_lost("invalid packet header: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl<8: break #packet still too small if read_buffer[1] in ["S", ord("S")]: #old packet format: "PS%02d%012d" - 16 bytes #can be dropped when we drop compatibility with clients older than 0.5 #0.3 and 0.4 still send the initial "hello" packet using this old format.. if bl<16: break current_packet_size = int(read_buffer[2:16]) packet_index = 0 compression_level = 0 protocol_version = 0 #only bencode supported with old protocol read_buffer = read_buffer[16:] else: #packet format: struct.pack('cBBBL', ...) - 8 bytes try: (_, protocol_version, compression_level, packet_index, current_packet_size) = struct.unpack_from('!cBBBL', read_buffer) except Exception, e: raise Exception("invalid packet format: %s", e) read_buffer = read_buffer[8:] bl = len(read_buffer) if current_packet_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check>self.max_packet_size: return self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, current_packet_size, read_buffer[:32]) if current_packet_size>0 and bl<current_packet_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==current_packet_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:current_packet_size] read_buffer = read_buffer[current_packet_size:] if compression_level>0: if self.chunked_compression: raw_string = zlib.decompress(raw_string) else: raw_string = self._decompressor.decompress(raw_string) if sys.version>='3': raw_string = raw_string.decode("latin1") if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = raw_string current_packet_size = -1 packet_index = 0 continue result = None try: #final packet (packet_index==0), decode it: if protocol_version==0: result = bdecode(raw_string) if result is None: break packet, l = result assert l==len(raw_string) elif protocol_version==1: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(raw_string)) else: raise Exception("unsupported protocol version: %s" % protocol_version) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost("gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(raw_string), packet_index, current_packet_size, bl, e)) gobject.idle_add(gibberish, raw_string) return if self._closed: return current_packet_size = -1 #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} gobject.idle_add(self._process_packet, packet)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if payload_size<0: if read_buffer[0] not in ("P", ord("P")): return self._call_connection_lost("invalid packet header byte: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl<8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: _, protocol_flags, compression_level, packet_index, data_size = unpack_header(read_buffer[:8]) except Exception, e: raise Exception("failed to parse packet header: %s" % list(read_buffer[:8]), e) read_buffer = read_buffer[8:] bl = len(read_buffer) if protocol_flags & Protocol.FLAGS_CIPHER: assert self.cipher_in_block_size>0, "received cipher block but we don't have a cipher do decrypt it with" padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size>0 if payload_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check>self.max_packet_size: return self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl<payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: assert data.endswith(padding), "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)" % (padding, list(bytearray(raw_string)), type(raw_string), list(bytearray(data)), type(data)) data = data[:-len(padding)] #uncompress if needed: if compression_level>0: if self.chunked_compression: data = decompress(data) else: data = self._decompressor.decompress(data) if sys.version>='3': data = data.decode("latin1") if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost("unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets)>=4: return self._call_connection_lost("too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(data)) else: packet, l = bdecode(data) assert l==len(data) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet_cb(self, [Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost("gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(data), packet_index, payload_size, bl, e)) gobject.idle_add(gibberish, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} try: self._process_packet_cb(self, packet) self.input_packetcount += 1 except KeyboardInterrupt: raise except: log.warn("Unhandled error while processing a '%s' packet from peer", packet[0], exc_info=True)
def _read_parse_thread_loop(self): try: current_packet_size = -1 while not self._closed: buf = self._read_queue.get() if not buf: return self._call_connection_lost("empty marker in read queue") if self._decompressor is not None: buf = self._decompressor.decompress(buf) if self._read_buffer: self._read_buffer = self._read_buffer + buf else: self._read_buffer = buf while not self._closed and len(self._read_buffer)>0: had_deflate = (self._decompressor is not None) try: if current_packet_size<0 and len(self._read_buffer)>0 and self._read_buffer[0]=="P": #spotted packet size header if len(self._read_buffer)<16: break #incomplete current_packet_size = int(self._read_buffer[2:16]) self._read_buffer = self._read_buffer[16:] if current_packet_size>0 and len(self._read_buffer)<current_packet_size: log.debug("incomplete packet: only %s of %s bytes received", len(self._read_buffer), current_packet_size) break result = bdecode(self._read_buffer) except ValueError: #could be a partial packet (without size header) #or could be just a broken packet... def packet_error(buf): if self._closed: return # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost("gibberish received: %s" % repr_ellipsized(buf)) if current_packet_size>0: #we had the size, so the packet should have been valid! packet_error(self._read_buffer) return else: #wait a little before deciding #unsized packets are either old clients (don't really care about them) #or hello packets (small-ish) def check_error_state(old_buffer): if old_buffer==self._read_buffer: packet_error(self._read_buffer) gobject.timeout_add_seconds(1000, check_error_state, self._read_buffer) break current_packet_size = -1 if result is None or self._closed: break packet, l = result gobject.idle_add(self._process_packet, packet) unprocessed = self._read_buffer[l:] if packet[0]=="set_deflate": level = packet[1] log("set_deflate packet, changing decompressor to level=%s", level) if level==0: self._decompressor = None else: self._decompressor = zlib.decompressobj() if not had_deflate and (self._decompressor is not None): # deflate was just enabled: so decompress the unprocessed # data unprocessed = self._decompressor.decompress(unprocessed) self._read_buffer = unprocessed finally: log("read parse thread: ended")
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if payload_size < 0: if read_buffer[0] not in ("P", ord("P")): return self._call_connection_lost( "invalid packet header byte: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: _, protocol_flags, compression_level, packet_index, data_size = unpack_header( read_buffer[:8]) except Exception, e: raise Exception( "failed to parse packet header: %s" % list(read_buffer[:8]), e) read_buffer = read_buffer[8:] bl = len(read_buffer) if protocol_flags & Protocol.FLAGS_CIPHER: assert self.cipher_in_block_size > 0, "received cipher block but we don't have a cipher do decrypt it with" padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check > self.max_packet_size: return self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: assert data.endswith( padding ), "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)" % ( padding, list(bytearray(raw_string)), type(raw_string), list( bytearray(data)), type(data)) data = data[:-len(padding)] #uncompress if needed: if compression_level > 0: if self.chunked_compression: data = decompress(data) else: data = self._decompressor.decompress(data) if sys.version >= '3': data = data.decode("latin1") if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost( "unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: return self._call_connection_lost( "too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(data)) else: packet, l = bdecode(data) assert l == len(data) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet_cb(self, [Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(data), packet_index, payload_size, bl, e)) gobject.idle_add(gibberish, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} try: self._process_packet_cb(self, packet) self.input_packetcount += 1 except KeyboardInterrupt: raise except: log.warn( "Unhandled error while processing a '%s' packet from peer", packet[0], exc_info=True)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None current_packet_size = -1 packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if current_packet_size < 0: if read_buffer[0] not in ["P", ord("P")]: return self._call_connection_lost( "invalid packet header: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: (_, protocol_version, compression_level, packet_index, current_packet_size) = struct.unpack_from( '!cBBBL', read_buffer) except Exception, e: raise Exception( "invalid packet header: %s" % list(read_buffer[:8]), e) read_buffer = read_buffer[8:] bl = len(read_buffer) if current_packet_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check > self.max_packet_size: return self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, current_packet_size, read_buffer[:32]) if current_packet_size > 0 and bl < current_packet_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == current_packet_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:current_packet_size] read_buffer = read_buffer[current_packet_size:] if compression_level > 0: if self.chunked_compression: raw_string = zlib.decompress(raw_string) else: raw_string = self._decompressor.decompress(raw_string) if sys.version >= '3': raw_string = raw_string.decode("latin1") if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = raw_string current_packet_size = -1 packet_index = 0 continue result = None try: #final packet (packet_index==0), decode it: if protocol_version == 0: result = bdecode(raw_string) if result is None: break packet, l = result assert l == len(raw_string) elif protocol_version == 1: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(raw_string)) else: raise Exception("unsupported protocol version: %s" % protocol_version) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet_cb(self, [Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(raw_string), packet_index, current_packet_size, bl, e)) gobject.idle_add(gibberish, raw_string) return if self._closed: return current_packet_size = -1 #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} try: self._process_packet_cb(self, packet) self.input_packetcount += 1 except KeyboardInterrupt: raise except: log.warn( "Unhandled error while processing packet from peer", exc_info=True) NOYIELD or time.sleep(0)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then decompress them (old protocol only), then try to parse them. Either the old (16 bytes) header, or the new (8 bytes) header with extra attributes. We extract the individual packet from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data (new protocol only) and re-construct the one python-object-packet from potentially multiple packets (see raw_packets and packet_index - new protocol only). The actual processing of the packet is done in the main thread via gobject.idle_add The "set_deflate" are a special case (old protocol) that we trap here in order to ensure we enable compression synchronously within the thread. (this due for removal when we drop old protocol support) """ try: read_buffer = None current_packet_size = -1 packet_index = 0 compression_level = 0 raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if current_packet_size < 0: if read_buffer[0] not in ["P", ord("P")]: return self._call_connection_lost( "invalid packet header, not an xpra client?") if bl < 2: break if read_buffer[1] in ["S", ord("S")]: #old packet format: "PS%02d%012d" - 16 bytes if bl < 16: break current_packet_size = int(read_buffer[2:16]) packet_index = 0 compression_level = 0 read_buffer = read_buffer[16:] else: #new packet format: struct.pack('cBBBL', ...) - 8 bytes if bl < 8: break try: (_, _, compression_level, packet_index, current_packet_size) = struct.unpack_from( '!cBBBL', read_buffer) except Exception, e: raise Exception("invalid packet format: %s", e) read_buffer = read_buffer[8:] bl = len(read_buffer) if current_packet_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check > self.max_packet_size: return self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, current_packet_size, read_buffer[:32]) if current_packet_size > 0 and bl < current_packet_size: # incomplete packet break #chop this packet from the buffer: if len(read_buffer) == current_packet_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:current_packet_size] read_buffer = read_buffer[current_packet_size:] if compression_level > 0: if self.chunked_compression: raw_string = zlib.decompress(raw_string) else: raw_string = self._decompressor.decompress( raw_string) if sys.version >= '3': raw_string = raw_string.decode("latin1") if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = raw_string current_packet_size = -1 packet_index = 0 continue result = None try: #final packet (packet_index==0), decode it: result = bdecode(raw_string) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost( "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(raw_string), packet_index, current_packet_size, bl, e)) gobject.idle_add(gibberish, raw_string) return current_packet_size = -1 if result is None or self._closed: break packet, l = result #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} gobject.idle_add(self._process_packet, packet) assert l == len(raw_string) if packet[0] == "set_deflate": level = packet[1] if level != self._compression_level: log( "set_deflate packet, changing compressor to level=%s", level) previous_level = self._compression_level self._compression_level = level if level > 0: if previous_level == 0 and not self.raw_packets: # deflate was just enabled: so decompress the unprocessed data: read_buffer = self._decompressor.decompress( read_buffer)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then decompress them (old protocol only), then try to parse them. Either the old (16 bytes) header, or the new (8 bytes) header with extra attributes. We extract the individual packet from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data (new protocol only) and re-construct the one python-object-packet from potentially multiple packets (see raw_packets and packet_index - new protocol only). The actual processing of the packet is done in the main thread via gobject.idle_add The "set_deflate" are a special case (old protocol) that we trap here in order to ensure we enable compression synchronously within the thread. (this due for removal when we drop old protocol support) """ read_buffer = None current_packet_size = -1 packet_index = 0 compression_level = 0 raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") gobject.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if current_packet_size<0: if read_buffer[0] not in ["P", ord("P")]: return self._call_connection_lost("invalid packet header: ('%s...'), not an xpra client?" % read_buffer[:32]) if bl<2: break if read_buffer[1] in ["S", ord("S")]: #old packet format: "PS%02d%012d" - 16 bytes if bl<16: break current_packet_size = int(read_buffer[2:16]) packet_index = 0 compression_level = 0 read_buffer = read_buffer[16:] else: #new packet format: struct.pack('cBBBL', ...) - 8 bytes if bl<8: break try: (_, _, compression_level, packet_index, current_packet_size) = struct.unpack_from('!cBBBL', read_buffer) except Exception, e: raise Exception("invalid packet format: %s", e) read_buffer = read_buffer[8:] bl = len(read_buffer) if current_packet_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run def check_packet_size(size_to_check, packet_header): log("check_packet_size(%s, %s) limit is %s", size_to_check, packet_header, self.max_packet_size) if size_to_check>self.max_packet_size: return self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: '%s'), dropping this connection!" % (size_to_check, self.max_packet_size, packet_header)) gobject.timeout_add(1000, check_packet_size, current_packet_size, read_buffer[:32]) if current_packet_size>0 and bl<current_packet_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==current_packet_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:current_packet_size] read_buffer = read_buffer[current_packet_size:] if compression_level>0: if self.chunked_compression: raw_string = zlib.decompress(raw_string) else: raw_string = self._decompressor.decompress(raw_string) if sys.version>='3': raw_string = raw_string.decode("latin1") if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = raw_string current_packet_size = -1 packet_index = 0 continue result = None try: #final packet (packet_index==0), decode it: result = bdecode(raw_string) except ValueError, e: import traceback traceback.print_exc() log.error("value error reading packet: %s", e) if self._closed: return def gibberish(buf): # Peek at the data we got, in case we can make sense of it: self._process_packet([Protocol.GIBBERISH, buf]) # Then hang up: return self._connection_lost("gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(raw_string), packet_index, current_packet_size, bl, e)) gobject.idle_add(gibberish, raw_string) return if self._closed: return current_packet_size = -1 if result is None: break packet, l = result #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} gobject.idle_add(self._process_packet, packet) assert l==len(raw_string) #special case: we can't wait for idle_add to make the call... #(this will be removed in 0.5 in favour of the per-packet compression header) if packet[0]=="set_deflate": level = packet[1] if level!=self._compression_level: log("set_deflate packet, changing compressor to level=%s", level) previous_level = self._compression_level self._compression_level = level if level>0: if previous_level==0 and not self.raw_packets: # deflate was just enabled: so decompress the unprocessed data: read_buffer = self._decompressor.decompress(read_buffer)