def _encode_decode(self, encode, decode, message): result = encode(message) try: decode(None, 0, result[0]) except DropPacket: raise except: pass return result
def _encode_decode(self, encode, decode, message): result = encode(message) try: decode(None, 0, result[0]) except DropPacket: raise except: pass return result
def on_created_e2e(self, messages): for message in messages: cache = self.request_cache.pop(u"e2e-request", message.payload.identifier) shared_secret = self.crypto.verify_and_generate_shared_secret( cache.hop.dh_secret, message.payload.key, message.payload.auth, cache.hop.public_key.key.pk) session_keys = self.crypto.generate_session_keys(shared_secret) _, rp_info = decode( self.crypto.decrypt_str(message.payload.rp_sock_addr, session_keys[EXIT_NODE], session_keys[EXIT_NODE_SALT])) if self.notifier: self.notifier.notify(NTFY_TUNNEL, NTFY_ONCREATED_E2E, cache.info_hash.encode('hex')[:6], rp_info[0]) # Since it is the seeder that chose the rendezvous_point, we're essentially losing 1 hop of anonymity # at the downloader end. To compensate we add an extra hop. self.create_circuit( self.hops[cache.info_hash] + 1, CIRCUIT_TYPE_RENDEZVOUS, callback=lambda circuit, cookie=rp_info[ 1], session_keys=session_keys, info_hash=cache.info_hash, sock_addr=cache.sock_addr: self.create_link_e2e( circuit, cookie, session_keys, info_hash, sock_addr), required_endpoint=rp_info[0], info_hash=cache.info_hash)
def on_dht_response(self, messages): for message in messages: self.request_cache.pop(u"dht-request", message.payload.identifier) info_hash = message.payload.info_hash _, peers = decode(message.payload.peers) peers = set(peers) self.tunnel_logger.info( "Received dht response containing %d peers" % len(peers)) blacklist = self.dht_blacklist[info_hash] if self.notifier: self.notifier.notify(NTFY_TUNNEL, NTFY_DHT_LOOKUP, info_hash.encode('hex')[:6], peers) # cleanup dht_blacklist for i in xrange(len(blacklist) - 1, -1, -1): if time.time() - blacklist[i][0] > 60: blacklist.pop(i) exclude = [rp[2] for rp in self.my_download_points.values() ] + [sock_addr for _, sock_addr in blacklist] for peer in peers: if peer not in exclude: self.tunnel_logger.info("Requesting key from dht peer %s", peer) # Blacklist this sock_addr for a period of at least 60s self.dht_blacklist[info_hash].append((time.time(), peer)) self.create_key_request(info_hash, peer)
def _decode_encr_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the encr-payload") str_identifier, str_prefs, str_hprefs = payload identifier, = unpack_from("!H", str_identifier) length = len(str_prefs) if length % 128 != 0: raise DropPacket("Invalid number of bytes available (encr_res)") if length: hashpack = "128s" * (length / 128) hashes = unpack_from("!" + hashpack, str_prefs) hashes = [bytes_to_long(hash) for hash in hashes] length = len(str_hprefs) if length % 20 != 0: raise DropPacket("Invalid number of bytes available (encr_res)") if length: hashpack = "20s" * (length / 20) his_hashes = list(unpack_from("!" + hashpack, str_hprefs)) else: his_hashes = [] return offset, placeholder.meta.payload.implement(identifier, hashes, his_hashes)
def _decode_mark_torrent(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "infohash" in dic: raise DropPacket("Missing 'infohash'") infohash = dic["infohash"] if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "type" in dic: raise DropPacket("Missing 'type'") type = dic["type"] if not (isinstance(type, unicode) and len(type) < 25): raise DropPacket("Invalid 'type' type or value") return offset, placeholder.meta.payload.implement( infohash, type, timestamp)
def _decode_modification(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "modification-type" in dic: raise DropPacket("Missing 'modification-type'") modification_type = dic["modification-type"] if not isinstance(modification_type, unicode): raise DropPacket("Invalid 'modification_type' type") if not "modification-value" in dic: raise DropPacket("Missing 'modification-value'") modification_value = dic["modification-value"] if not (isinstance(modification_value, unicode) and len(modification_value) < 1024): raise DropPacket("Invalid 'modification_value' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "modification-on-mid" in dic: raise DropPacket("Missing 'modification-on-mid'") modification_on_mid = dic["modification-on-mid"] if not (isinstance(modification_on_mid, str) and len(modification_on_mid) == 20): raise DropPacket("Invalid 'modification-on-mid' type or value") if not "modification-on-global-time" in dic: raise DropPacket("Missing 'modification-on-global-time'") modification_on_global_time = dic["modification-on-global-time"] if not isinstance(modification_on_global_time, (int, long)): raise DropPacket("Invalid 'modification-on-global-time' type") try: packet_id, packet, message_name = self._get_message(modification_on_global_time, modification_on_mid) modification_on = Packet(self._community.get_meta_message(message_name), packet, packet_id) except DropPacket: member = self._community.get_member(mid=modification_on_mid) if not member: raise DelayPacketByMissingMember(self._community, modification_on_mid) raise DelayPacketByMissingMessage(self._community, member, modification_on_global_time) prev_modification_mid = dic.get("prev-modification-mid", None) if prev_modification_mid and not (isinstance(prev_modification_mid, str) and len(prev_modification_mid) == 20): raise DropPacket("Invalid 'prev-modification-mid' type or value") prev_modification_global_time = dic.get("prev-modification-global-time", None) if prev_modification_global_time and not isinstance(prev_modification_global_time, (int, long)): raise DropPacket("Invalid 'prev-modification-global-time' type") try: packet_id, packet, message_name = self._get_message(prev_modification_global_time, prev_modification_mid) prev_modification_packet = Packet(self._community.get_meta_message(message_name), packet, packet_id) except: prev_modification_packet = None return offset, placeholder.meta.payload.implement(modification_type, modification_value, timestamp, modification_on, prev_modification_packet, prev_modification_mid, prev_modification_global_time)
def _decode_mark_torrent(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "infohash" in dic: raise DropPacket("Missing 'infohash'") infohash = dic["infohash"] if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "type" in dic: raise DropPacket("Missing 'type'") type = dic["type"] if not (isinstance(type, unicode) and len(type) < 25): raise DropPacket("Invalid 'type' type or value") return offset, placeholder.meta.payload.implement(infohash, type, timestamp)
def on_created_e2e(self, messages): for message in messages: cache = self.request_cache.pop(u"e2e-request", message.payload.identifier) shared_secret = self.crypto.verify_and_generate_shared_secret(cache.hop.dh_secret, message.payload.key, message.payload.auth, cache.hop.public_key.key.pk) session_keys = self.crypto.generate_session_keys(shared_secret) _, rp_info = decode(self.crypto.decrypt_str(message.payload.rp_sock_addr, session_keys[EXIT_NODE], session_keys[EXIT_NODE_SALT])) if self.notifier: self.notifier.notify(NTFY_TUNNEL, NTFY_ONCREATED_E2E, cache.info_hash.encode('hex')[:6], rp_info[0]) # Since it is the seeder that chose the rendezvous_point, we're essentially losing 1 hop of anonymity # at the downloader end. To compensate we add an extra hop. self.create_circuit(self.hops[cache.info_hash] + 1, CIRCUIT_TYPE_RENDEZVOUS, callback=lambda circuit, cookie=rp_info[1], session_keys=session_keys, info_hash=cache.info_hash, sock_addr=cache.sock_addr: self.create_link_e2e(circuit, cookie, session_keys, info_hash, sock_addr), required_endpoint=rp_info[0], info_hash=cache.info_hash)
def on_dht_response(self, messages): for message in messages: self.request_cache.pop(u"dht-request", message.payload.identifier) info_hash = message.payload.info_hash _, peers = decode(message.payload.peers) peers = set(peers) self.tunnel_logger.info("Received dht response containing %d peers" % len(peers)) blacklist = self.dht_blacklist[info_hash] if self.notifier: self.notifier.notify(NTFY_TUNNEL, NTFY_DHT_LOOKUP, info_hash.encode('hex')[:6], peers) # cleanup dht_blacklist for i in xrange(len(blacklist) - 1, -1, -1): if time.time() - blacklist[i][0] > 60: blacklist.pop(i) exclude = [rp[2] for rp in self.my_download_points.values()] + [sock_addr for _, sock_addr in blacklist] for peer in peers: if peer not in exclude: self.tunnel_logger.info("Requesting key from dht peer %s", peer) # Blacklist this sock_addr for a period of at least 60s self.dht_blacklist[info_hash].append((time.time(), peer)) self.create_key_request(info_hash, peer)
def _decode_comment(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "text" in dic: raise DropPacket("Missing 'text'") text = dic["text"] if not (isinstance(text, unicode) and len(text) < 1024): raise DropPacket("Invalid 'text' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") reply_to_mid = dic.get("reply-to-mid", None) if reply_to_mid and not (isinstance(reply_to_mid, str) and len(reply_to_mid) == 20): raise DropPacket("Invalid 'reply-to-mid' type or value") reply_to_global_time = dic.get("reply-to-global-time", None) if reply_to_global_time and not isinstance(reply_to_global_time, (int, long)): raise DropPacket("Invalid 'reply-to-global-time' type") reply_after_mid = dic.get("reply-after-mid", None) if reply_after_mid and not (isinstance(reply_after_mid, str) and len(reply_after_mid) == 20): raise DropPacket("Invalid 'reply-after-mid' type or value") reply_after_global_time = dic.get("reply-after-global-time", None) if reply_after_global_time and not isinstance(reply_after_global_time, (int, long)): raise DropPacket("Invalid 'reply-after-global-time' type") playlist_mid = dic.get("playlist-mid", None) if playlist_mid and not (isinstance(playlist_mid, str) and len(playlist_mid) == 20): raise DropPacket("Invalid 'playlist-mid' type or value") playlist_global_time = dic.get("playlist-global-time", None) if playlist_global_time and not isinstance(playlist_global_time, (int, long)): raise DropPacket("Invalid 'playlist-global-time' type") if playlist_mid and playlist_global_time: try: packet_id, packet, message_name = self._get_message(playlist_global_time, playlist_mid) playlist = Packet(self._community.get_meta_message(message_name), packet, packet_id) except DropPacket: members = self._community.dispersy.get_members_from_id(playlist_mid) if not members: raise DelayPacketByMissingMember(self._community, playlist_mid) member = members[0] raise DelayPacketByMissingMessage(self._community, member, playlist_global_time) else: playlist = None infohash = dic.get("infohash", None) if infohash and not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(text, timestamp, reply_to_mid, reply_to_global_time, reply_after_mid, reply_after_global_time, playlist, infohash)
def __init__(self, data=None): super(TrustChainBlock, self).__init__() if data is None: # data self.transaction = {} # identity self.public_key = EMPTY_PK self.sequence_number = GENESIS_SEQ # linked identity self.link_public_key = EMPTY_PK self.link_sequence_number = UNKNOWN_SEQ # validation self.previous_hash = GENESIS_HASH self.signature = EMPTY_SIG # debug stuff self.insert_time = None else: _, self.transaction = decode(str(data[0])) (self.public_key, self.sequence_number, self.link_public_key, self.link_sequence_number, self.previous_hash, self.signature, self.insert_time) = (data[1], data[2], data[3], data[4], data[5], data[6], data[7]) if isinstance(self.public_key, buffer): self.public_key = str(self.public_key) if isinstance(self.link_public_key, buffer): self.link_public_key = str(self.link_public_key) if isinstance(self.previous_hash, buffer): self.previous_hash = str(self.previous_hash) if isinstance(self.signature, buffer): self.signature = str(self.signature)
def _decode_comment(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "text" in dic: raise DropPacket("Missing 'text'") text = dic["text"] if not (isinstance(text, unicode) and len(text) < 1024): raise DropPacket("Invalid 'text' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") reply_to_mid = dic.get("reply-to-mid", None) if reply_to_mid and not (isinstance(reply_to_mid, str) and len(reply_to_mid) == 20): raise DropPacket("Invalid 'reply-to-mid' type or value") reply_to_global_time = dic.get("reply-to-global-time", None) if reply_to_global_time and not isinstance(reply_to_global_time, (int, long)): raise DropPacket("Invalid 'reply-to-global-time' type") reply_after_mid = dic.get("reply-after-mid", None) if reply_after_mid and not (isinstance(reply_after_mid, str) and len(reply_after_mid) == 20): raise DropPacket("Invalid 'reply-after-mid' type or value") reply_after_global_time = dic.get("reply-after-global-time", None) if reply_after_global_time and not isinstance(reply_after_global_time, (int, long)): raise DropPacket("Invalid 'reply-after-global-time' type") playlist_mid = dic.get("playlist-mid", None) if playlist_mid and not (isinstance(playlist_mid, str) and len(playlist_mid) == 20): raise DropPacket("Invalid 'playlist-mid' type or value") playlist_global_time = dic.get("playlist-global-time", None) if playlist_global_time and not isinstance(playlist_global_time, (int, long)): raise DropPacket("Invalid 'playlist-global-time' type") if playlist_mid and playlist_global_time: try: packet_id, packet, message_name = self._get_message(playlist_global_time, playlist_mid) playlist = Packet(self._community.get_meta_message(message_name), packet, packet_id) except DropPacket: member = self._community.get_member(mid=playlist_mid) if not member: raise DelayPacketByMissingMember(self._community, playlist_mid) raise DelayPacketByMissingMessage(self._community, member, playlist_global_time) else: playlist = None infohash = dic.get("infohash", None) if infohash and not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(text, timestamp, reply_to_mid, reply_to_global_time, reply_after_mid, reply_after_global_time, playlist, infohash)
def _decode_channelsearch(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, list): raise DropPacket("Invalid payload type") for keyword in payload: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") return offset, placeholder.meta.payload.implement(payload)
def _decode_channelsearch(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, list): raise DropPacket("Invalid payload type") for keyword in payload: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") return offset, placeholder.meta.payload.implement(payload)
def _decode_moderation(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "text" in dic: raise DropPacket("Missing 'text'") text = dic["text"] if not (isinstance(text, unicode) and len(text) < 1024): raise DropPacket("Invalid 'text' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "severity" in dic: raise DropPacket("Missing 'severity'") severity = dic["severity"] if not isinstance(severity, (int, long)): raise DropPacket("Invalid 'severity' type or value") cause_mid = dic.get("cause-mid", None) if not (isinstance(cause_mid, str) and len(cause_mid) == 20): raise DropPacket("Invalid 'cause-mid' type or value") cause_global_time = dic.get("cause-global-time", None) if not isinstance(cause_global_time, (int, long)): raise DropPacket("Invalid 'cause-global-time' type") try: packet_id, packet, message_name = self._get_message( cause_global_time, cause_mid) cause_packet = Packet( self._community.get_meta_message(message_name), packet, packet_id) except DropPacket: members = self._community.dispersy.get_members_from_id(cause_mid) if not members: raise DelayPacketByMissingMember(self._community, cause_mid) member = members[0] raise DelayPacketByMissingMessage(self._community, member, cause_global_time) return offset, placeholder.meta.payload.implement( text, timestamp, severity, cause_packet)
def _decode_search_request(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decodr 21, 2012 e the search-payload") if len(payload) < 2: raise DropPacket("Invalid payload length") identifier, keywords = payload[:2] if len(identifier) != 2: raise DropPacket( "Unable to decode the search-payload, got %d bytes expected 2" % (len(identifier))) identifier, = unpack_from('!H', identifier) if not isinstance(keywords, list): raise DropPacket("Invalid 'keywords' type") for keyword in keywords: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") if len(payload) > 5: functions, prefix, bytes_ = payload[2:6] if not isinstance(functions, int): raise DropPacket("Invalid functions type") if not 0 < functions: raise DropPacket("Invalid functions value") size = len(bytes_) if not 0 < size: raise DropPacket("Invalid size of bloomfilter") if not size % 8 == 0: raise DropPacket( "Invalid size of bloomfilter, must be a multiple of eight") if not isinstance(prefix, str): raise DropPacket("Invalid prefix type") if not 0 <= len(prefix) < 256: raise DropPacket("Invalid prefix length") bloom_filter = BloomFilter(bytes_, functions, prefix=prefix) else: bloom_filter = None return offset, placeholder.meta.payload.implement( identifier, keywords, bloom_filter)
def _decode_torrent(self, placeholder, offset, data): try: uncompressed_data = zlib.decompress(data[offset:]) except zlib.error: raise DropPacket("Invalid zlib data") offset = len(data) try: _, values = decode(uncompressed_data) except ValueError: raise DropPacket("Unable to decode the torrent-payload") infohash_time, name, files, trackers = values if len(infohash_time) != 28: raise DropPacket( "Unable to decode the torrent-payload, got %d bytes expected 28" % (len(infohash_time))) infohash, timestamp = unpack_from('!20sQ', infohash_time) if not isinstance(name, unicode): raise DropPacket("Invalid 'name' type") if not isinstance(files, tuple): raise DropPacket("Invalid 'files' type") if len(files) == 0: raise DropPacket("Should have at least one file") for file in files: if len(file) != 2: raise DropPacket("Invalid 'file_len' type") path, length = file if not isinstance(path, unicode): raise DropPacket("Invalid 'files_path' type is %s" % type(path)) if not isinstance(length, (int, long)): raise DropPacket("Invalid 'files_length' type is %s" % type(length)) if not isinstance(trackers, tuple): raise DropPacket("Invalid 'trackers' type") for tracker in trackers: if not isinstance(tracker, str): raise DropPacket("Invalid 'tracker' type") return offset, placeholder.meta.payload.implement( infohash, timestamp, name, files, trackers)
def _decode_example(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the example-payload") if not isinstance(payload, tuple): raise DropPacket("Invalid payload type") text, amount = payload if not isinstance(text, str): raise DropPacket("Invalid 'text' type") if not isinstance(amount, int): raise DropPacket("Invalid 'amount' type") return offset, placeholder.meta.payload.implement(text, amount)
def on_key_response(self, messages): for message in messages: if not message.source.startswith(u"circuit_"): cache = self.request_cache.pop(u"key-request", message.payload.identifier) self.tunnel_logger.info( 'On key response: forward message because received over socket' ) meta = self.get_meta_message(u'key-response') relay_message = meta.impl(distribution=(self.global_time, ), payload=(cache.identifier, message.payload.public_key, message.payload.pex_peers)) self.send_packet([Candidate(cache.return_sock_addr, False)], u"key-response", TUNNEL_PREFIX + relay_message.packet) else: # pop key-request cache and notify gui self.tunnel_logger.info("On key response: received keys") cache = self.request_cache.pop(u"key-request", message.payload.identifier) _, pex_peers = decode(message.payload.pex_peers) if self.notifier: self.notifier.notify(NTFY_TUNNEL, NTFY_KEY_RESPONSE, cache.info_hash.encode('hex')[:6], cache.circuit.circuit_id) # Cache this peer and key for pex via key-response self.tunnel_logger.info("Added key to peer exchange cache") self.infohash_pex[cache.info_hash].add( (cache.sock_addr, message.payload.public_key)) # Add received pex_peers to own list of known peers for this infohash for pex_peer in pex_peers: pex_peer_sock, pex_peer_key = pex_peer self.infohash_pex[cache.info_hash].add( (pex_peer_sock, pex_peer_key)) # Initate end-to-end circuits for all known peers in the pex list for peer in self.infohash_pex[cache.info_hash]: peer_sock, peer_key = peer if cache.info_hash not in self.infohash_ip_circuits: self.tunnel_logger.info( "Create end-to-end on pex_peer %s" % repr(peer_sock)) self.create_e2e(cache.circuit, peer_sock, cache.info_hash, peer_key)
def _decode_torrent_request(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the torrent-request") if not isinstance(payload, dict): raise DropPacket("Invalid payload type") for cid, infohashes in payload.iteritems(): if not (isinstance(cid, str) and len(cid) == 20): raise DropPacket("Invalid 'cid' type or value") for infohash in infohashes: if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(payload)
def _decode_channelcast(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, dict): raise DropPacket("Invalid payload type") for cid, infohashes in payload.iteritems(): if not (isinstance(cid, str) and len(cid) == 20): raise DropPacket("Invalid 'cid' type or value") for infohash in infohashes: if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(payload)
def _decode_channel(self, placeholder, offset, data): try: offset, values = decode(data, offset) if len(values) != 2: raise ValueError except ValueError: raise DropPacket("Unable to decode the channel-payload") name = values[0] if not (isinstance(name, unicode) and len(name) < 256): raise DropPacket("Invalid 'name' type or value") description = values[1] if not (isinstance(description, unicode) and len(description) < 1024): raise DropPacket("Invalid 'description' type or value") return offset, placeholder.meta.payload.implement(name, description)
def _decode_channel(self, placeholder, offset, data): try: offset, values = decode(data, offset) if len(values) != 2: raise ValueError except ValueError: raise DropPacket("Unable to decode the channel-payload") name = values[0] if not (isinstance(name, unicode) and len(name) < 256): raise DropPacket("Invalid 'name' type or value") description = values[1] if not (isinstance(description, unicode) and len(description) < 1024): raise DropPacket("Invalid 'description' type or value") return offset, placeholder.meta.payload.implement(name, description)
def unpack(cls, data, offset=0): """ Unpacks a block from a buffer :param data: The buffer to unpack from :param offset: Optionally, the offset at which to start unpacking :return: The TrustChainBlock that was unpacked from the buffer """ ret = TrustChainBlock() (ret.public_key, ret.sequence_number, ret.link_public_key, ret.link_sequence_number, ret.previous_hash, ret.signature) = unpack_from(block_pack_format, data, offset) offset += block_pack_size tx_len, = struct.unpack("!I", data[offset:offset + 4]) offset += 4 _, ret.transaction = decode(data[offset:offset + tx_len]) return ret
def _decode_moderation(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "text" in dic: raise DropPacket("Missing 'text'") text = dic["text"] if not (isinstance(text, unicode) and len(text) < 1024): raise DropPacket("Invalid 'text' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "severity" in dic: raise DropPacket("Missing 'severity'") severity = dic["severity"] if not isinstance(severity, (int, long)): raise DropPacket("Invalid 'severity' type or value") cause_mid = dic.get("cause-mid", None) if not (isinstance(cause_mid, str) and len(cause_mid) == 20): raise DropPacket("Invalid 'cause-mid' type or value") cause_global_time = dic.get("cause-global-time", None) if not isinstance(cause_global_time, (int, long)): raise DropPacket("Invalid 'cause-global-time' type") try: packet_id, packet, message_name = self._get_message(cause_global_time, cause_mid) cause_packet = Packet(self._community.get_meta_message(message_name), packet, packet_id) except DropPacket: members = self._community.dispersy.get_members_from_id(cause_mid) if not members: raise DelayPacketByMissingMember(self._community, cause_mid) member = members[0] raise DelayPacketByMissingMessage(self._community, member, cause_global_time) return offset, placeholder.meta.payload.implement(text, timestamp, severity, cause_packet)
def _decode_search_request(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decodr 21, 2012 e the search-payload") if len(payload) < 2: raise DropPacket("Invalid payload length") identifier, keywords = payload[:2] if len(identifier) != 2: raise DropPacket("Unable to decode the search-payload, got %d bytes expected 2" % (len(identifier))) identifier, = unpack_from('!H', identifier) if not isinstance(keywords, list): raise DropPacket("Invalid 'keywords' type") for keyword in keywords: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") if len(payload) > 5: functions, prefix, bytes_ = payload[2:6] if not isinstance(functions, int): raise DropPacket("Invalid functions type") if not 0 < functions: raise DropPacket("Invalid functions value") size = len(bytes_) if not 0 < size: raise DropPacket("Invalid size of bloomfilter") if not size % 8 == 0: raise DropPacket("Invalid size of bloomfilter, must be a multiple of eight") if not isinstance(prefix, str): raise DropPacket("Invalid prefix type") if not 0 <= len(prefix) < 256: raise DropPacket("Invalid prefix length") bloom_filter = BloomFilter(bytes_, functions, prefix=prefix) else: bloom_filter = None return offset, placeholder.meta.payload.implement(identifier, keywords, bloom_filter)
def _decode_simi_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the simi-payload") identifier, responses = payload[:2] if len(identifier) != 2: raise DropPacket( "Unable to decode the search-response-payload, got %d bytes expected 2" % (len(identifier)) ) identifier, = unpack_from("!H", identifier) prefs = hprefs = None bundled_responses = [] for str_mid, str_prefs, str_hprefs in responses: length = len(str_prefs) if length % 128 != 0: raise DropPacket("Invalid number of bytes available (encr_res)") if length: hashpack = "128s" * (length / 128) hashes = unpack_from("!" + hashpack, str_prefs) hashes = [bytes_to_long(hash) for hash in hashes] length = len(str_hprefs) if length % 20 != 0: raise DropPacket("Invalid number of bytes available (encr_res)") if length: hashpack = "20s" * (length / 20) his_hashes = list(unpack_from("!" + hashpack, str_hprefs)) else: his_hashes = [] if str_mid: str_mid, = unpack_from("!20s", str_mid) bundled_responses.append((str_mid, (hashes, his_hashes))) else: prefs = hashes hprefs = his_hashes return offset, placeholder.meta.payload.implement(identifier, prefs, hprefs, bundled_responses)
def _decode_torrent(self, placeholder, offset, data): uncompressed_data = zlib.decompress(data[offset:]) offset = len(data) try: _, values = decode(uncompressed_data) except ValueError: raise DropPacket("Unable to decode the torrent-payload") infohash_time, name, files, trackers = values if len(infohash_time) != 28: raise DropPacket("Unable to decode the torrent-payload, got %d bytes expected 28" % (len(infohash_time))) infohash, timestamp = unpack_from('!20sQ', infohash_time) if not isinstance(name, unicode): raise DropPacket("Invalid 'name' type") if not isinstance(files, tuple): raise DropPacket("Invalid 'files' type") if len(files) == 0: raise DropPacket("Should have at least one file") for file in files: if len(file) != 2: raise DropPacket("Invalid 'file_len' type") path, length = file if not isinstance(path, unicode): raise DropPacket("Invalid 'files_path' type is %s" % type(path)) if not isinstance(length, (int, long)): raise DropPacket("Invalid 'files_length' type is %s" % type(length)) if not isinstance(trackers, tuple): raise DropPacket("Invalid 'trackers' type") for tracker in trackers: if not isinstance(tracker, str): raise DropPacket("Invalid 'tracker' type") return offset, placeholder.meta.payload.implement(infohash, timestamp, name, files, trackers)
def _decode_channelsearch_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, tuple): raise DropPacket("Invalid payload type") keywords, torrents = payload for keyword in keywords: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") for cid, infohashes in torrents.iteritems(): if not (isinstance(cid, str) and len(cid) == 20): raise DropPacket("Invalid 'cid' type or value") for infohash in infohashes: if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(keywords, torrents)
def _decode_channelsearch_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, tuple): raise DropPacket("Invalid payload type") keywords, torrents = payload for keyword in keywords: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") for cid, infohashes in torrents.iteritems(): if not (isinstance(cid, str) and len(cid) == 20): raise DropPacket("Invalid 'cid' type or value") for infohash in infohashes: if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(keywords, torrents)
def on_key_response(self, messages): for message in messages: if not message.source.startswith(u"circuit_"): cache = self.request_cache.pop(u"key-request", message.payload.identifier) self.tunnel_logger.info('On key response: forward message because received over socket') meta = self.get_meta_message(u'key-response') relay_message = meta.impl(distribution=(self.global_time,), payload=(cache.identifier, message.payload.public_key, message.payload.pex_peers)) self.send_packet([Candidate(cache.return_sock_addr, False)], u"key-response", TUNNEL_PREFIX + relay_message.packet) else: # pop key-request cache and notify gui self.tunnel_logger.info("On key response: received keys") cache = self.request_cache.pop(u"key-request", message.payload.identifier) _, pex_peers = decode(message.payload.pex_peers) if self.notifier: self.notifier.notify(NTFY_TUNNEL, NTFY_KEY_RESPONSE, cache.info_hash.encode('hex')[:6], cache.circuit.circuit_id) # Cache this peer and key for pex via key-response self.tunnel_logger.info("Added key to peer exchange cache") self.infohash_pex[cache.info_hash].add((cache.sock_addr, message.payload.public_key)) # Add received pex_peers to own list of known peers for this infohash for pex_peer in pex_peers: pex_peer_sock, pex_peer_key = pex_peer self.infohash_pex[cache.info_hash].add((pex_peer_sock, pex_peer_key)) # Initate end-to-end circuits for all known peers in the pex list for peer in self.infohash_pex[cache.info_hash]: peer_sock, peer_key = peer if cache.info_hash not in self.infohash_ip_circuits: self.tunnel_logger.info("Create end-to-end on pex_peer %s" % repr(peer_sock)) self.create_e2e(cache.circuit, peer_sock, cache.info_hash, peer_key)
def _decode_payload(self, placeholder, offset, data, types): try: offset, payload = decode(data, offset) except (ValueError, AssertionError, KeyError): raise DropPacket("Unable to decode the payload") args = [] cur_ind = 0 for arg_type in types: try: if arg_type == Price or arg_type == Quantity: # They contain an additional wallet ID args.append( arg_type(payload[cur_ind], INV_ASSET_MAP[payload[cur_ind + 1]])) cur_ind += 2 elif arg_type == str or arg_type == int: args.append(payload[cur_ind]) cur_ind += 1 else: args.append(arg_type(payload[cur_ind])) cur_ind += 1 except (ValueError, KeyError): raise DropPacket("Invalid '" + arg_type.__name__ + "' type") return offset, placeholder.meta.payload.implement(*args)
def test_decode(self): self.assertEqual(decode("a2d3sfoo3sbar3smoo4smilk", 0), (24, { 'foo': 'bar', 'moo': 'milk' }))
def _decode_search_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except (ValueError, KeyError): raise DropPacket("Unable to decode the search-reponse-payload") if len(payload) < 2: raise DropPacket("Invalid payload length") identifier, results = payload[:2] if len(identifier) != 2: raise DropPacket( "Unable to decode the search-response-payload, got %d bytes expected 2" % (len(identifier))) identifier, = unpack_from('!H', identifier) if not isinstance(results, list): raise DropPacket("Invalid 'results' type") for result in results: if not isinstance(result, tuple): raise DropPacket("Invalid result type") if len(result) < 9: raise DropPacket("Invalid result length") infohash, swarmname, length, nrfiles, category_list, creation_date, seeders, leechers, cid = result[: 9] if not isinstance(infohash, str): raise DropPacket("Invalid infohash type") if len(infohash) != 20: raise DropPacket("Invalid infohash length") if not isinstance(swarmname, unicode): raise DropPacket("Invalid swarmname type") if not isinstance(length, long): raise DropPacket("Invalid length type '%s'" % type(length)) if not isinstance(nrfiles, int): raise DropPacket("Invalid nrfiles type") if not isinstance(category_list, list) or not all( isinstance(key, unicode) for key in category_list): raise DropPacket("Invalid category_list type") if not isinstance(creation_date, long): raise DropPacket("Invalid creation_date type") if not isinstance(seeders, int): raise DropPacket("Invalid seeders type '%s'" % type(seeders)) if not isinstance(leechers, int): raise DropPacket("Invalid leechers type '%s'" % type(leechers)) if cid: if not isinstance(cid, str): raise DropPacket("Invalid cid type") if len(cid) != 20: raise DropPacket("Invalid cid length") return offset, placeholder.meta.payload.implement(identifier, results)
def test_decode_wrong_stream_type(self): decode(["a", "b"])
def test_decode_wrong_offset_type(self): decode("abc", "42")
def test_decode_wrong_version_num(self): decode("b2i42", 0)
def _decode_search_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the search-reponse-payload") if len(payload) < 2: raise DropPacket("Invalid payload length") identifier, results = payload[:2] if len(identifier) != 2: raise DropPacket("Unable to decode the search-response-payload, got %d bytes expected 2" % (len(identifier))) identifier, = unpack_from('!H', identifier) if not isinstance(results, list): raise DropPacket("Invalid 'results' type") for result in results: if not isinstance(result, tuple): raise DropPacket("Invalid result type") if len(result) < 11: raise DropPacket("Invalid result length") infohash, swarmname, length, nrfiles, categorykeys, creation_date, seeders, leechers, swift_hash, swift_torrent_hash, cid = result[:11] if not isinstance(infohash, str): raise DropPacket("Invalid infohash type") if len(infohash) != 20: raise DropPacket("Invalid infohash length") if not isinstance(swarmname, unicode): raise DropPacket("Invalid swarmname type") if not isinstance(length, long): raise DropPacket("Invalid length type '%s'" % type(length)) if not isinstance(nrfiles, int): raise DropPacket("Invalid nrfiles type") if not isinstance(categorykeys, list): raise DropPacket("Invalid categorykeys type") if not all(isinstance(key, unicode) for key in categorykeys): raise DropPacket("Invalid categorykey type") if not isinstance(creation_date, long): raise DropPacket("Invalid creation_date type") if not isinstance(seeders, int): raise DropPacket("Invalid seeders type '%s'" % type(seeders)) if not isinstance(leechers, int): raise DropPacket("Invalid leechers type '%s'" % type(leechers)) if swift_hash: if not isinstance(swift_hash, str): raise DropPacket("Invalid swift_hash type '%s'" % type(swift_hash)) if len(swift_hash) != 20: raise DropPacket("Invalid swift_hash length") if swift_torrent_hash: if not isinstance(swift_torrent_hash, str): raise DropPacket("Invalid swift_torrent_hash type") if len(swift_torrent_hash) != 20: raise DropPacket("Invalid swift_torrent_hash length") if cid: if not isinstance(cid, str): raise DropPacket("Invalid cid type") if len(cid) != 20: raise DropPacket("Invalid cid length") return offset, placeholder.meta.payload.implement(identifier, results)