def _decode_encr_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the encr-payload") str_identifier, str_prefs, str_hprefs = payload identifier, = unpack_from('!H', str_identifier) length = len(str_prefs) if length % 128 != 0: raise DropPacket("Invalid number of bytes available (encr_res)") if length: hashpack = '128s' * (length / 128) hashes = unpack_from('!' + hashpack, str_prefs) hashes = [bytes_to_long(hash) for hash in hashes] length = len(str_hprefs) if length % 20 != 0: raise DropPacket("Invalid number of bytes available (encr_res)") if length: hashpack = '20s' * (length / 20) his_hashes = list(unpack_from('!' + hashpack, str_hprefs)) else: his_hashes = [] return offset, placeholder.meta.payload.implement( identifier, hashes, his_hashes)
def _decode_introduction_request(self, placeholder, offset, data): offset, payload = BinaryConversion._decode_introduction_request( self, placeholder, offset, data) if len(data) > offset: if len(data) < offset + 5: raise DropPacket("Insufficient packet size") functions, size = unpack_from('!BH', data, offset) offset += 3 prefix = data[offset] offset += 1 if functions <= 0 or size <= 0 or size % 8 != 0: raise DropPacket("Invalid bloom filter") length = size / 8 if length != len(data) - offset: raise DropPacket( "Invalid number of bytes available (irq) %d, %d, %d" % (length, len(data) - offset, size)) orders_bloom_filter = BloomFilter(data[offset:offset + length], functions, prefix=prefix) offset += length payload.set_orders_bloom_filter(orders_bloom_filter) return offset, payload
def _decode_torrent_collect_request(self, placeholder, offset, data): if len(data) < offset + 4: raise DropPacket("Insufficient packet size") identifier, hashtype = unpack_from('!HH', data, offset) offset += 4 length = len(data) - offset if length % 46 != 0: raise DropPacket("Invalid number of bytes available (tcr)") if length: hashpack = '20s20sHHH' * (length / 46) hashes = unpack_from('!' + hashpack, data, offset) offset += length torrents = [] for i in range(0, len(hashes), 5): torrents.append([ hashes[i], hashes[i + 1], hashes[i + 2], hashes[i + 3], hashes[i + 4] ]) else: torrents = [] return offset, placeholder.meta.payload.implement( identifier, hashtype, torrents)
def _decode_votecast(self, placeholder, offset, data): if len(data) < offset + 26: raise DropPacket("Unable to decode the payload") cid, vote, timestamp = unpack_from('!20shl', data, offset) if not vote in [-1, 0, 2]: raise DropPacket("Invalid 'vote' type or value") return offset + 26, placeholder.meta.payload.implement(cid, vote, timestamp)
def _decode_missing_channel(self, placeholder, offset, data): if len(data) < offset + 1: raise DropPacket("Unable to decode the payload") includeSnapshot, = unpack_from('!B', data, offset) if not (includeSnapshot == 0 or includeSnapshot == 1): raise DropPacket("Unable to decode includeSnapshot") includeSnapshot = bool(includeSnapshot) return offset + 1, placeholder.meta.payload.implement(includeSnapshot)
def _decode_channelsearch(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, list): raise DropPacket("Invalid payload type") for keyword in payload: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") return offset, placeholder.meta.payload.implement(payload)
def _decode_text(self, placeholder, offset, data): if len(data) < offset + 1: raise DropPacket("Insufficient packet size") text_length, = unpack_from("!B", data, offset) offset += 1 try: text = data[offset:offset + text_length].decode("UTF-8") offset += text_length except UnicodeError: raise DropPacket("Unable to decode UTF-8") return offset, placeholder.meta.payload.implement(text)
def _decode_barter_record(self, placeholder, offset, data): if len(data) < offset + 21: raise DropPacket( "Insufficient packet size (_decode_barter_record)") cycle, upload_first_to_second, upload_second_to_first, length = unpack_from( ">LQQB", data, offset) offset += 21 if len(data) < offset + length: raise DropPacket( "Insufficient packet size (_decode_barter_record)") effort = EffortHistory(data[offset:offset + length], cycle * CYCLE_SIZE) offset += length # the following parameters are used for debugging only if len(data) < offset + 104: raise DropPacket( "Insufficient packet size (_decode_barter_record)") (first_timestamp, first_upload, first_download, first_total_up, first_total_down, first_associated_up, first_associated_down, second_timestamp, second_upload, second_download, second_total_up, second_total_down, second_associated_up, second_associated_down) = unpack_from(">LQQQQQQLQQQQQQ", data, offset) offset += 104 return offset, placeholder.meta.payload.implement( cycle, effort, upload_first_to_second, upload_second_to_first, # the following parameters are used for debugging only float(first_timestamp), first_upload, first_download, first_total_up, first_total_down, first_associated_up, first_associated_down, float(second_timestamp), second_upload, second_download, second_total_up, second_total_down, second_associated_up, second_associated_down)
def _decode_ping_pong(self, placeholder, offset, data): if len(data) < offset + 3: raise DropPacket("Insufficient packet size (_decode_ping_pong)") key_length, identifier = self._struct_BH.unpack_from(data, offset) offset += 3 if len(data) < offset + key_length: raise DropPacket("Insufficient packet size (_decode_ping_pong)") try: member = Member(data[offset:offset + key_length]) except: raise DropPacket("Invalid public key (_decode_ping_pong)") offset += key_length return offset, placeholder.meta.payload.Implementation( placeholder.meta.payload, identifier, member)
def _decode_channel(self, placeholder, offset, data): try: offset, values = decode(data, offset) if len(values) != 2: raise ValueError except ValueError: raise DropPacket("Unable to decode the channel-payload") name = values[0] if not (isinstance(name, unicode) and len(name) < 256): raise DropPacket("Invalid 'name' type or value") description = values[1] if not (isinstance(description, unicode) and len(description) < 1024): raise DropPacket("Invalid 'description' type or value") return offset, placeholder.meta.payload.implement(name, description)
def _decode_barter_record(self, placeholder, offset, data): if len(data) < offset + 8: raise DropPacket("Insufficient packet size") first_upload, second_upload = unpack_from("!LL", data, offset) offset += 8 return offset, placeholder.meta.payload.implement(first_upload, second_upload)
def _decode_half_block(placeholder, offset, data): """ Decode an incoming half block message. :param placeholder: :param offset: Start of the HalfBlock message in the data. :param data: ByteStream containing the message. :return: (offset, HalfBlockPayload.impl) """ if len(data) < offset + block_pack_size: raise DropPacket("Unable to decode the payload") try: _, block = TrustChainBlock.unpack(data, offset) except (IndexError, ValueError): raise DropPacket("Invalid block contents") return len(data), placeholder.meta.payload.implement(block)
def _decode_torrent_request(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the torrent-request") if not isinstance(payload, dict): raise DropPacket("Invalid payload type") for cid, infohashes in payload.iteritems(): if not (isinstance(cid, str) and len(cid) == 20): raise DropPacket("Invalid 'cid' type or value") for infohash in infohashes: if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(payload)
def _decode_ping(self, placeholder, offset, data): if len(data) < offset + 6: raise DropPacket("Insufficient packet size") circuit_id, identifier = unpack_from('!IH', data, offset) offset += 6 return offset, placeholder.meta.payload.implement(circuit_id, identifier)
def _decode_destroy(self, placeholder, offset, data): if len(data) < offset + 6: raise DropPacket("Insufficient packet size") circuit_id, reason = unpack_from('!IB', data, offset) offset += 6 return offset, placeholder.meta.payload.implement(circuit_id, reason)
def _decode_contact(self, placeholder, offset, data): if len(data) < offset + 2: raise DropPacket("Insufficient packet size") identifier, = unpack_from("!H", data, offset) offset += 2 return offset, placeholder.meta.payload.implement(identifier)
def _decode_simi_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the simi-payload") identifier, responses = payload[:2] if len(identifier) != 2: raise DropPacket( "Unable to decode the search-response-payload, got %d bytes expected 2" % (len(identifier))) identifier, = unpack_from('!H', identifier) prefs = hprefs = None bundled_responses = [] for str_mid, str_prefs, str_hprefs in responses: length = len(str_prefs) if length % 128 != 0: raise DropPacket( "Invalid number of bytes available (encr_res)") if length: hashpack = '128s' * (length / 128) hashes = unpack_from('!' + hashpack, str_prefs) hashes = [bytes_to_long(hash) for hash in hashes] length = len(str_hprefs) if length % 20 != 0: raise DropPacket( "Invalid number of bytes available (encr_res)") if length: hashpack = '20s' * (length / 20) his_hashes = list(unpack_from('!' + hashpack, str_hprefs)) else: his_hashes = [] if str_mid: str_mid, = unpack_from("!20s", str_mid) bundled_responses.append((str_mid, (hashes, his_hashes))) else: prefs = hashes hprefs = his_hashes return offset, placeholder.meta.payload.implement( identifier, prefs, hprefs, bundled_responses)
def _decode_torrent(self, placeholder, offset, data): uncompressed_data = zlib.decompress(data[offset:]) offset = len(data) try: _, values = decode(uncompressed_data) except ValueError: raise DropPacket("Unable to decode the torrent-payload") infohash_time, name, files, trackers = values if len(infohash_time) != 28: raise DropPacket( "Unable to decode the torrent-payload, got %d bytes expected 28" % (len(infohash_time))) infohash, timestamp = unpack_from('!20sQ', infohash_time) if not isinstance(name, unicode): raise DropPacket("Invalid 'name' type") if not isinstance(files, tuple): raise DropPacket("Invalid 'files' type") if len(files) == 0: raise DropPacket("Should have at least one file") for file in files: if len(file) != 2: raise DropPacket("Invalid 'file_len' type") path, length = file if not isinstance(path, unicode): raise DropPacket("Invalid 'files_path' type is %s" % type(path)) if not isinstance(length, (int, long)): raise DropPacket("Invalid 'files_length' type is %s" % type(length)) if not isinstance(trackers, tuple): raise DropPacket("Invalid 'trackers' type") for tracker in trackers: if not isinstance(tracker, str): raise DropPacket("Invalid 'tracker' type") return offset, placeholder.meta.payload.implement( infohash, timestamp, name, files, trackers)
def _decode_moderation(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "text" in dic: raise DropPacket("Missing 'text'") text = dic["text"] if not (isinstance(text, unicode) and len(text) < 1024): raise DropPacket("Invalid 'text' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "severity" in dic: raise DropPacket("Missing 'severity'") severity = dic["severity"] if not isinstance(severity, (int, long)): raise DropPacket("Invalid 'severity' type or value") cause_mid = dic.get("cause-mid", None) if not (isinstance(cause_mid, str) and len(cause_mid) == 20): raise DropPacket("Invalid 'cause-mid' type or value") cause_global_time = dic.get("cause-global-time", None) if not isinstance(cause_global_time, (int, long)): raise DropPacket("Invalid 'cause-global-time' type") try: packet_id, packet, message_name = self._get_message( cause_global_time, cause_mid) cause_packet = Packet( self._community.get_meta_message(message_name), packet, packet_id) except DropPacket: members = self._community.dispersy.get_members_from_id(cause_mid) if not members: raise DelayPacketByMissingMember(self._community, cause_mid) member = members[0] raise DelayPacketByMissingMessage(self._community, member, cause_global_time) return offset, placeholder.meta.payload.implement( text, timestamp, severity, cause_packet)
def _decode_mark_torrent(self, placeholder, offset, data): try: offset, dic = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the payload") if not "infohash" in dic: raise DropPacket("Missing 'infohash'") infohash = dic["infohash"] if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") if not "timestamp" in dic: raise DropPacket("Missing 'timestamp'") timestamp = dic["timestamp"] if not isinstance(timestamp, (int, long)): raise DropPacket("Invalid 'timestamp' type or value") if not "type" in dic: raise DropPacket("Missing 'type'") type = dic["type"] if not (isinstance(type, unicode) and len(type) < 25): raise DropPacket("Invalid 'type' type or value") return offset, placeholder.meta.payload.implement( infohash, type, timestamp)
def _decode_identifier(self, placeholder, offset, data): if len(data) < offset + 8: raise DropPacket("Insufficient packet size (_decode_identifier)") identifier, = self._struct_Q.unpack_from(data, offset) offset += 8 return offset, placeholder.meta.payload.Implementation( placeholder.meta.payload, identifier)
def _decode_introduction_request(self, placeholder, offset, data): offset, payload = BinaryConversion._decode_introduction_request(self, placeholder, offset, data) # if there's still bytes in this request, treat them as taste_bloom_filter has_stuff = len(data) > offset if has_stuff: if len(data) < offset + 8: raise DropPacket("Insufficient packet size") num_preferences, functions, size = unpack_from('!IBH', data, offset) offset += 7 prefix = data[offset] offset += 1 if not 0 < num_preferences: raise DropPacket("Invalid num_preferences value") if not 0 < functions: raise DropPacket("Invalid functions value") if not 0 < size: raise DropPacket("Invalid size value") if not size % 8 == 0: raise DropPacket("Invalid size value, must be a multiple of eight") length = int(ceil(size / 8)) if not length == len(data) - offset: raise DropPacket("Invalid number of bytes available (irq) %d, %d, %d" % (length, len(data) - offset, size)) taste_bloom_filter = BloomFilter(data[offset:offset + length], functions, prefix=prefix) offset += length payload.set_num_preferences(num_preferences) payload.set_taste_bloom_filter(taste_bloom_filter) return offset, payload
def _decode_half_block(placeholder, offset, data): """ Decode an incoming half block message. :param placeholder: :param offset: Start of the HalfBlock message in the data. :param data: ByteStream containing the message. :return: (offset, HalfBlockPayload.impl) """ if len(data) < offset + block_pack_size: raise DropPacket("Unable to decode the payload") return offset + block_pack_size, placeholder.meta.payload.implement( MultiChainBlock.unpack(data, offset))
def _decode_cell(self, placeholder, offset, data): circuit_id, = unpack_from('!I', data, offset) offset += 4 if not data[offset] in self._decode_message_map: raise DropPacket("Invalid message") message_type = self._decode_message_map[data[offset]].meta.name offset += 1 encrypted_message = data[offset:] offset += len(encrypted_message) return offset, placeholder.meta.payload.implement(circuit_id, message_type, encrypted_message)
def _decode_channelsearch_response(self, placeholder, offset, data): try: offset, payload = decode(data, offset) except ValueError: raise DropPacket("Unable to decode the channelcast-payload") if not isinstance(payload, tuple): raise DropPacket("Invalid payload type") keywords, torrents = payload for keyword in keywords: if not isinstance(keyword, unicode): raise DropPacket("Invalid 'keyword' type") for cid, infohashes in torrents.iteritems(): if not (isinstance(cid, str) and len(cid) == 20): raise DropPacket("Invalid 'cid' type or value") for infohash in infohashes: if not (isinstance(infohash, str) and len(infohash) == 20): raise DropPacket("Invalid 'infohash' type or value") return offset, placeholder.meta.payload.implement(keywords, torrents)
def _decode_signature(placeholder, offset, data): """ Decode an incoming signature message :param placeholder: :param offset: Start of the SIGNATURE message in the data. :param data: ByteStream containing the message. :return: (offset, SIGNATURE.impl) """ if len(data) < offset + signature_size: raise DropPacket("Unable to decode the payload") values = unpack_from(signature_format, data, offset) offset += signature_size return offset, placeholder.meta.payload.implement(*values)
def _decode_crawl_response(placeholder, offset, data): """ Decode an incoming crawl response message. :param placeholder: :param offset: Start of the CrawlResponse message in the data. :param data: ByteStream containing the message. :return: (offset, CrawlResponse.impl) """ if len(data) < offset + crawl_response_size: raise DropPacket("Unable to decode the payload") values = unpack_from(crawl_response_format, data, offset) offset += crawl_response_size return offset, placeholder.meta.payload.implement(*values)
def _decode_payload(self, placeholder, offset, data, types): try: offset, payload = decode(data, offset) except (ValueError, AssertionError, KeyError): raise DropPacket("Unable to decode the payload") args = [] cur_ind = 0 for arg_type in types: try: if arg_type == Price or arg_type == Quantity: # They contain an additional wallet ID args.append( arg_type(payload[cur_ind], INV_ASSET_MAP[payload[cur_ind + 1]])) cur_ind += 2 elif arg_type == str or arg_type == int: args.append(payload[cur_ind]) cur_ind += 1 else: args.append(arg_type(payload[cur_ind])) cur_ind += 1 except (ValueError, KeyError): raise DropPacket("Invalid '" + arg_type.__name__ + "' type") return offset, placeholder.meta.payload.implement(*args)
def _decode_crawl_request(placeholder, offset, data): """ Decode an incoming crawl request message. :param placeholder: :param offset: Start of the CrawlRequest message in the data. :param data: ByteStream containing the message. :return: (offset, CrawlRequest.impl) """ if len(data) < offset + crawl_request_size: raise DropPacket("Unable to decode the payload") who, seq, limit = unpack_from(crawl_request_format, data, offset) return offset + crawl_request_size, \ placeholder.meta.payload.implement(seq)
def _decode_introduction_request(self, placeholder, offset, data): offset, payload = BinaryConversion._decode_introduction_request( self, placeholder, offset, data) #if there's still bytes in this request, treat them as taste_bloom_filter has_stuff = len(data) > offset if has_stuff: length = len(data) - offset if length != 20: raise DropPacket("Invalid number of bytes available (ir)") candidate_mid, = unpack_from('!20s', data, offset) payload.set_introduce_me_to(candidate_mid) offset += length return offset, payload