def te(s, exc): print(s) try: bdecode(s) except exc: pass else: assert False, "didn't raise exception"
def t(s, value, remainder): print(s) # Test "one-shot": rv, rr = bdecode(s) assert rv == value, "expected value %s but got %s" % (rv, value) rrstr = s[rr:] assert rrstr == remainder, "expected remainder value %s but got %s" % (remainder, rrstr) # With gibberish added: g_str = s + "asdf" rv, rr = bdecode(g_str) assert rv == value, "expected value %s but got %s" % (rv, value) rrstr = g_str[rr:] assert rrstr.endswith("asdf")
def test_large_dict(): try: from xpra.x11.gtk_x11 import gdk_display_source #@UnusedImport from xpra.x11.bindings.keyboard_bindings import X11KeyboardBindings #@UnresolvedImport keyboard_bindings = X11KeyboardBindings() mappings = keyboard_bindings.get_keycode_mappings() b = bencode(mappings) print("bencode(%s)=%s" % (mappings, b)) d = bdecode(b) print("bdecode(%s)=%s" % (b, d)) except ImportError, e: print("test_large_dict() skipped because of: %s" % e)
def decode(data, protocol_flags): if protocol_flags & FLAGS_RENCODE: if not has_rencode: raise InvalidPacketEncodingException("rencode is not available") if not use_rencode: raise InvalidPacketEncodingException("rencode is disabled") return list(rencode_loads(data)) elif protocol_flags & FLAGS_YAML: if not has_yaml: raise InvalidPacketEncodingException("yaml is not available") if not use_yaml: raise InvalidPacketEncodingException("yaml is disabled") return list(yaml_decode(data)) else: if not has_bencode: raise InvalidPacketEncodingException("bencode is not available") if not use_bencode: raise InvalidPacketEncodingException("bencode is disabled") packet, l = bdecode(data) assert l==len(data) return packet
def decode(data, protocol_flags): if protocol_flags & FLAGS_RENCODE: if not has_rencode: raise InvalidPacketEncodingException("rencode is not available") if not use_rencode: raise InvalidPacketEncodingException("rencode is disabled") return list(rencode_loads(data)) elif protocol_flags & FLAGS_YAML: if not has_yaml: raise InvalidPacketEncodingException("yaml is not available") if not use_yaml: raise InvalidPacketEncodingException("yaml is disabled") return list(yaml_decode(data)) else: if not has_bencode: raise InvalidPacketEncodingException("bencode is not available") if not use_bencode: raise InvalidPacketEncodingException("bencode is disabled") packet, l = bdecode(data) assert l == len(data) return packet
def t(v, encstr=None): be = bencode(v) print("bencode(%s)=%s" % (v, be)) if encstr: assert be==encstr restored = bdecode(be) print("decode(%s)=%s" % (be, restored)) rlist = restored[0] if len(rlist)!=len(v): print("MISMATCH!") print("v=%s" % v) print("l=%s" % rlist) assert len(rlist)==2 assert rlist[0]==v[0] for ok,ov in v[1].items(): d = rlist[1] if ok not in d: print("restored dict is missing %s" % ok) return rlist rv = d.get(ok) if rv!=ov: print("value for %s does not match: %s vs %s" % (ok, ov, rv)) return rlist return rlist
def do_bdecode(data): packet, l = bdecode(data) assert len(data) == l, "expected %i bytes, but got %i" % (l, len(data)) return packet
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. Concatenate the raw packet data, then try to parse it. Extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done via the callback process_packet_cb, this will be called from this parsing thread so any calls that need to be made from the UI thread will need to use a callback (usually via 'idle_add') """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") self.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if payload_size<0: head = read_buffer[:8] if read_buffer[0] not in ("P", ord("P")): self._invalid_header(read_buffer) return if bl<8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes _, protocol_flags, compression_level, packet_index, data_size = unpack_header(head) #sanity check size (will often fail if not an xpra client): if data_size>self.abs_max_packet_size: self._invalid_header(read_buffer) return bl = len(read_buffer)-8 if protocol_flags & Protocol.FLAGS_CIPHER: if self.cipher_in_block_size==0 or not self.cipher_in_name: log.warn("received cipher block but we don't have a cipher to decrypt it with, not an xpra client?") self._invalid_header(read_buffer) return padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size>0 read_buffer = read_buffer[8:] if payload_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if not self._closed: log("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check>self.max_packet_size: self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: 0x%s), dropping this connection!" % (size_to_check, self.max_packet_size, repr_ellipsized(packet_header))) return False self.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl<payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log("decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data)) self._connection_lost("encryption error (wrong key?)") return data = data[:-len(padding)] #uncompress if needed: if compression_level>0: try: if compression_level & LZ4_FLAG: assert has_lz4 data = LZ4_uncompress(data) else: data = decompress(data) except Exception, e: if self.cipher_in: return self._call_connection_lost("decompression failed (invalid encryption key?): %s" % e) return self._call_connection_lost("decompression failed: %s" % e) if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost("unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets)>=4: return self._call_connection_lost("too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet! not an xpra client?" packet = list(rencode_loads(data)) else: #if sys.version>='3': # data = data.decode("latin1") packet, l = bdecode(data) assert l==len(data) except ValueError, e: log.error("value error reading packet: %s", e, exc_info=True) if self._closed: return log("failed to parse packet: %s", binascii.hexlify(data)) msg = "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(data), packet_index, payload_size, bl, e) self.gibberish(msg, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} packet_type = packet[0] if self.receive_aliases and type(packet_type)==int and packet_type in self.receive_aliases: packet_type = self.receive_aliases.get(packet_type) packet[0] = packet_type self.input_stats[packet_type] = self.output_stats.get(packet_type, 0)+1 self.input_packetcount += 1 log("processing packet %s", packet_type) self._process_packet_cb(self, packet)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. Concatenate the raw packet data, then try to parse it. Extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done via the callback process_packet_cb, this will be called from this parsing thread so any calls that need to be made from the UI thread will need to use a callback (usually via 'idle_add') """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: log("read thread: empty marker, exiting") self.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if payload_size < 0: head = read_buffer[:8] if read_buffer[0] not in ("P", ord("P")): self._invalid_header(read_buffer) return if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes _, protocol_flags, compression_level, packet_index, data_size = unpack_header( head) #sanity check size (will often fail if not an xpra client): if data_size > self.abs_max_packet_size: self._invalid_header(read_buffer) return bl = len(read_buffer) - 8 if protocol_flags & Protocol.FLAGS_CIPHER: if self.cipher_in_block_size == 0 or not self.cipher_in_name: log.warn( "received cipher block but we don't have a cipher to decrypt it with, not an xpra client?" ) self._invalid_header(read_buffer) return padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 read_buffer = read_buffer[8:] if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if not self._closed: log("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check > self.max_packet_size: self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: 0x%s), dropping this connection!" % (size_to_check, self.max_packet_size, repr_ellipsized(packet_header))) return False self.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: log("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log( "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data)) self._connection_lost( "encryption error (wrong key?)") return data = data[:-len(padding)] #uncompress if needed: if compression_level > 0: try: if compression_level & LZ4_FLAG: assert use_lz4 data = LZ4_uncompress(data) else: data = decompress(data) except Exception, e: if self.cipher_in: return self._call_connection_lost( "decompression failed (invalid encryption key?): %s" % e) return self._call_connection_lost( "decompression failed: %s" % e) if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost( "unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: return self._call_connection_lost( "too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet! not an xpra client?" packet = list(rencode_loads(data)) else: #if sys.version>='3': # data = data.decode("latin1") packet, l = bdecode(data) assert l == len(data) except ValueError, e: log.error("value error reading packet: %s", e, exc_info=True) if self._closed: return log("failed to parse packet: %s", binascii.hexlify(data)) msg = "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % ( repr_ellipsized(data), packet_index, payload_size, bl, e) self.gibberish(msg, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} packet_type = packet[0] if self.receive_aliases and type( packet_type ) == int and packet_type in self.receive_aliases: packet_type = self.receive_aliases.get(packet_type) packet[0] = packet_type self.input_stats[packet_type] = self.output_stats.get( packet_type, 0) + 1 self.input_packetcount += 1 log("processing packet %s", packet_type) self._process_packet_cb(self, packet)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: debug("read thread: empty marker, exiting") scheduler.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl<=0: break if payload_size<0: if read_buffer[0] not in ("P", ord("P")): err = "invalid packet header byte: '%s', not an xpra client?" % hex(ord(read_buffer[0])) if len(read_buffer)>1: err += " read buffer=0x%s" % repr_ellipsized(read_buffer) if len(read_buffer)>40: err += "..." self._gibberish(err, read_buffer[:8]) return if bl<8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: _, protocol_flags, compression_level, packet_index, data_size = unpack_header(read_buffer[:8]) except Exception, e: self._gibberish("failed to parse packet header: 0x%s: %s" % (repr_ellipsized(read_buffer[:8]), e), read_buffer[:8]) return read_buffer = read_buffer[8:] bl = len(read_buffer) if protocol_flags & Protocol.FLAGS_CIPHER: assert self.cipher_in_block_size>0, "received cipher block but we don't have a cipher do decrypt it with" padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size>0 if payload_size>self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if not self._closed: debug("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check>self.max_packet_size: self._call_connection_lost("invalid packet: size requested is %s (maximum allowed is %s - packet header: 0x%s), dropping this connection!" % (size_to_check, self.max_packet_size, repr_ellipsized(packet_header))) return False scheduler.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl<payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer)==payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: debug("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log("decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data)) self._connection_lost("encryption error (wrong key?)") return data = data[:-len(padding)] #uncompress if needed: if compression_level>0: if self.chunked_compression: data = decompress(data) else: data = self._decompressor.decompress(data) if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost("unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index>0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets)>=4: return self._call_connection_lost("too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(data)) else: #if sys.version>='3': # data = data.decode("latin1") packet, l = bdecode(data) assert l==len(data) except ValueError, e: log.error("value error reading packet: %s", e, exc_info=True) if self._closed: return msg = "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % (repr_ellipsized(data), packet_index, payload_size, bl, e) self._gibberish(msg, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index,raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} self.input_packetcount += 1 self._process_packet_cb(self, packet)
def do_read_parse_thread_loop(self): """ Process the individual network packets placed in _read_queue. We concatenate them, then try to parse them. We extract the individual packets from the potentially large buffer, saving the rest of the buffer for later, and optionally decompress this data and re-construct the one python-object-packet from potentially multiple packets (see packet_index). The 8 bytes packet header gives us information on the packet index, packet size and compression. The actual processing of the packet is done in the main thread via gobject.idle_add """ read_buffer = None payload_size = -1 padding = None packet_index = 0 compression_level = False raw_packets = {} while not self._closed: buf = self._read_queue.get() if not buf: debug("read thread: empty marker, exiting") scheduler.idle_add(self.close) return if read_buffer: read_buffer = read_buffer + buf else: read_buffer = buf bl = len(read_buffer) while not self._closed: bl = len(read_buffer) if bl <= 0: break if payload_size < 0: if read_buffer[0] not in ("P", ord("P")): err = "invalid packet header byte: '%s', not an xpra client?" % hex( ord(read_buffer[0])) if len(read_buffer) > 1: err += " read buffer=0x%s" % repr_ellipsized( read_buffer) if len(read_buffer) > 40: err += "..." self._gibberish(err, read_buffer[:8]) return if bl < 8: break #packet still too small #packet format: struct.pack('cBBBL', ...) - 8 bytes try: _, protocol_flags, compression_level, packet_index, data_size = unpack_header( read_buffer[:8]) except Exception, e: self._gibberish( "failed to parse packet header: 0x%s: %s" % (repr_ellipsized(read_buffer[:8]), e), read_buffer[:8]) return read_buffer = read_buffer[8:] bl = len(read_buffer) if protocol_flags & Protocol.FLAGS_CIPHER: assert self.cipher_in_block_size > 0, "received cipher block but we don't have a cipher do decrypt it with" padding = (self.cipher_in_block_size - data_size % self.cipher_in_block_size) * " " payload_size = data_size + len(padding) else: #no cipher, no padding: padding = None payload_size = data_size assert payload_size > 0 if payload_size > self.max_packet_size: #this packet is seemingly too big, but check again from the main UI thread #this gives 'set_max_packet_size' a chance to run from "hello" def check_packet_size(size_to_check, packet_header): if not self._closed: debug("check_packet_size(%s, 0x%s) limit is %s", size_to_check, repr_ellipsized(packet_header), self.max_packet_size) if size_to_check > self.max_packet_size: self._call_connection_lost( "invalid packet: size requested is %s (maximum allowed is %s - packet header: 0x%s), dropping this connection!" % (size_to_check, self.max_packet_size, repr_ellipsized(packet_header))) return False scheduler.timeout_add(1000, check_packet_size, payload_size, read_buffer[:32]) if bl < payload_size: # incomplete packet, wait for the rest to arrive break #chop this packet from the buffer: if len(read_buffer) == payload_size: raw_string = read_buffer read_buffer = '' else: raw_string = read_buffer[:payload_size] read_buffer = read_buffer[payload_size:] #decrypt if needed: data = raw_string if self.cipher_in and protocol_flags & Protocol.FLAGS_CIPHER: debug("received %s encrypted bytes with %s padding", payload_size, len(padding)) data = self.cipher_in.decrypt(raw_string) if padding: def debug_str(s): try: return list(bytearray(s)) except: return list(str(s)) if not data.endswith(padding): log( "decryption failed: string does not end with '%s': %s (%s) -> %s (%s)", padding, debug_str(raw_string), type(raw_string), debug_str(data), type(data)) self._connection_lost( "encryption error (wrong key?)") return data = data[:-len(padding)] #uncompress if needed: if compression_level > 0: if self.chunked_compression: data = decompress(data) else: data = self._decompressor.decompress(data) if self.cipher_in and not (protocol_flags & Protocol.FLAGS_CIPHER): return self._call_connection_lost( "unencrypted packet dropped: %s" % repr_ellipsized(data)) if self._closed: return if packet_index > 0: #raw packet, store it and continue: raw_packets[packet_index] = data payload_size = -1 packet_index = 0 if len(raw_packets) >= 4: return self._call_connection_lost( "too many raw packets: %s" % len(raw_packets)) continue #final packet (packet_index==0), decode it: try: if protocol_flags & Protocol.FLAGS_RENCODE: assert has_rencode, "we don't support rencode mode but the other end sent us a rencoded packet!" packet = list(rencode_loads(data)) else: #if sys.version>='3': # data = data.decode("latin1") packet, l = bdecode(data) assert l == len(data) except ValueError, e: log.error("value error reading packet: %s", e, exc_info=True) if self._closed: return msg = "gibberish received: %s, packet index=%s, packet size=%s, buffer size=%s, error=%s" % ( repr_ellipsized(data), packet_index, payload_size, bl, e) self._gibberish(msg, data) return if self._closed: return payload_size = -1 padding = None #add any raw packets back into it: if raw_packets: for index, raw_data in raw_packets.items(): #replace placeholder with the raw_data packet data: packet[index] = raw_data raw_packets = {} self.input_packetcount += 1 self._process_packet_cb(self, packet)