Exemplo n.º 1
0
 def __init__(self,
              my_peer,
              endpoint,
              network,
              metadata_store,
              notifier=None):
     super(GigaChannelCommunity, self).__init__(my_peer, endpoint, network)
     self.metadata_store = metadata_store
     self.add_message_handler(self.NEWS_PUSH_MESSAGE, self.on_blob)
     self.add_message_handler(self.SEARCH_REQUEST, self.on_search_request)
     self.add_message_handler(self.SEARCH_RESPONSE, self.on_search_response)
     self.request_cache = RequestCache()
     self.notifier = notifier
Exemplo n.º 2
0
    def __init__(self, *args, **kwargs):
        super(PubSubCommunity, self).__init__(*args, **kwargs)
        self.logger = logging.getLogger(self.__class__.__name__)
        self.request_cache = RequestCache()

        # Register messages
        self.decode_map.update({
            chr(MSG_SUBSCRIBE): self.on_subscribe,
            chr(MSG_SUBSCRIPTION): self.on_subscription_status
        })

        # A set of publisher and subscriber.
        # Sends data updates to subscribers, and receives updates from subscribers.
        self.subscribers = set()
        self.publishers = set()
Exemplo n.º 3
0
 def __init__(self, my_peer, endpoint, network, metadata_store, notifier=None):
     super(GigaChannelCommunity, self).__init__(my_peer, endpoint, network)
     self.metadata_store = metadata_store
     self.add_message_handler(self.NEWS_PUSH_MESSAGE, self.on_blob)
     self.add_message_handler(self.SEARCH_REQUEST, self.on_search_request)
     self.add_message_handler(self.SEARCH_RESPONSE, self.on_search_response)
     self.request_cache = RequestCache()
     self.notifier = notifier
Exemplo n.º 4
0
class PubSubCommunity(Community):
    """
    This community is designed as a base community for all othe future communities that desires publish subscribe model
    for content dissemination. It provides a few basic primitives like subscribe/unsubscribe to publisher peers and
    publish/broadcast content to subscriber peers.

    All the derived community should implement publish_next_content() method which is responsible for publishing the
    next available content to all the subscribers.
    """
    def __init__(self, *args, **kwargs):
        super(PubSubCommunity, self).__init__(*args, **kwargs)
        self.logger = logging.getLogger(self.__class__.__name__)
        self.request_cache = RequestCache()

        # Register messages
        self.decode_map.update({
            chr(MSG_SUBSCRIBE):
            self.on_subscribe,
            chr(MSG_SUBSCRIPTION):
            self.on_subscription_status
        })

        # A set of publisher and subscriber.
        # Sends data updates to subscribers, and receives updates from subscribers.
        self.subscribers = set()
        self.publishers = set()

    def start(self):
        """
        Starts the community by subscribing to peers, and periodically publishing the content updates to
        the subscribers.
        """
        # Subscribe peers
        self.subscribe_peers()

        def start_publishing():
            # Update the publisher and subscriber list
            self.refresh_peer_list()

            # publish the new cotent from the content repository
            self.publish_next_content()

        self.register_task("start_publishing",
                           LoopingCall(start_publishing)).start(
                               PUBLISH_INTERVAL, False)

    @inlineCallbacks
    def unload(self):
        self.request_cache.clear()
        self.cancel_pending_task("start_publishing")
        yield super(PubSubCommunity, self).unload()

    def subscribe_peers(self):
        """
        Subscribes to the connected peers. First, the peers are sorted based on the trust score on descending order and
        content subscribe request is sent to the top peers.
        This method is called periodically through refresh_peer_list() in start_publishing() loop so it can fill up for
        the disconnected peers by connecting to new peers.
        Note that, existing publisher peers are not disconnected even if we find new peers with higher trust score but
        only fill up the remaining publisher slots with new top peers.
        """
        num_publishers = len(self.publishers)
        num_peers = len(self.get_peers())
        # If we have some free publisher slots and there are peers available
        if num_publishers < MAX_PUBLISHERS and num_publishers < num_peers:
            available_publishers = [
                peer for peer in self.get_peers()
                if peer not in self.publishers
            ]
            sorted_peers = sorted(
                available_publishers,
                key=lambda _peer: self.trustchain.get_trust(_peer)
                if self.trustchain else 1,
                reverse=True)
            for peer in sorted_peers[:MAX_PUBLISHERS - num_publishers]:
                self.subscribe(peer, subscribe=True)

    def refresh_peer_list(self):
        """
        Updates the publishers and subscribers list by filtering out the disconnected peers. It also calls subscribe
        peers to replenish the available publisher slots if necessary.
        """
        peers = self.get_peers()
        self.publishers = set(
            [peer for peer in self.publishers if peer in peers])
        self.subscribers = set(
            [peer for peer in self.subscribers if peer in peers])

        # subscribe peers if necessary
        self.subscribe_peers()

    def unsubscribe_peers(self):
        """
        Unsubscribes from the existing publishers by sending content subscribe request with subscribe=False. It then
        clears up its publishers list.
        - Called at community unload.
        """
        for peer in copy(self.publishers):
            self.subscribe(peer, subscribe=False)
        self.publishers.clear()

    def subscribe(self, peer, subscribe=True):
        """
        Method to send content subscribe/unsubscribe message. This message is sent to each individual publisher peer we
        want to subscribe/unsubscribe.
        """
        cache = self.request_cache.add(
            ContentRequest(self.request_cache, MSG_SUBSCRIBE, None))
        # Remove the publisher peer already if user is trying to unsubscribe
        if not subscribe:
            self.publishers.remove(peer)

        # Create subscription packet and send it
        subscription = ContentSubscription(cache.number, subscribe)
        packet = self.create_message_packet(MSG_SUBSCRIBE, subscription)
        self.broadcast_message(packet, peer=peer)

    def on_subscribe(self, source_address, data):
        """
        Message handler for content subscribe message. It handles both subscribe and unsubscribe requests.
        Upon successful subscription or unsubscription, it send the confirmation subscription message with status.
        In case of subscription, it also publishes a list of recently checked torrents to the subscriber.
        """
        auth, _, payload = self._ez_unpack_auth(ContentSubscription, data)
        peer = self.get_peer_from_auth(auth, source_address)

        # Subscribe or unsubscribe peer
        subscribed = peer in self.subscribers

        if payload.subscribe and not subscribed:
            if len(self.subscribers) < MAX_SUBSCRIBERS:
                self.subscribers.add(peer)
                subscribed = True

        elif not payload.subscribe and subscribed:
            self.subscribers.remove(peer)
            subscribed = False

        # Send subscription response
        self.send_subscription_status(peer,
                                      payload.identifier,
                                      subscribed=subscribed)

        return subscribed

    def send_subscription_status(self, peer, identifier, subscribed=True):
        """
        Method to send content subscription message. Content subscription message is send in response to content
        subscribe or unsubscribe message.
        """
        if peer not in self.get_peers():
            self.logger.error(ERROR_UNKNOWN_PEER)
            return

        subscription = ContentSubscription(identifier, subscribed)
        packet = self.create_message_packet(MSG_SUBSCRIPTION, subscription)
        self.broadcast_message(packet, peer=peer)

    def on_subscription_status(self, source_address, data):
        """
        Message handler for content subscription message. Content subscription message is sent by the publisher stating
        the status of the subscription in response to subscribe or unsubscribe request.

        If the subscription message has subscribe=True, it means the subscription was successful, so the peer is added
        to the subscriber. In other case, publisher is removed if it is still present in the publishers list.
        """
        auth, _, payload = self._ez_unpack_auth(ContentSubscription, data)
        peer = self.get_peer_from_auth(auth, source_address)

        cache = self.request_cache.pop(u'request', payload.identifier)
        if not cache:
            return

        if payload.subscribe:
            self.publishers.add(peer)
        elif peer in self.publishers:
            self.publishers.remove(peer)

    def create_message_packet(self, message_type, payload):
        """
        Helper method to creates a message packet of given type with provided payload.
        """
        auth = BinMemberAuthenticationPayload(
            self.my_peer.public_key.key_to_bin()).to_pack_list()
        dist = GlobalTimeDistributionPayload(
            self.claim_global_time()).to_pack_list()
        payload = payload if isinstance(payload,
                                        list) else payload.to_pack_list()
        return self._ez_pack(self._prefix, message_type, [auth, dist, payload])

    def broadcast_message(self, packet, peer=None):
        """
        Helper method to broadcast the message packet to a single peer or all the subscribers.
        """
        if peer is not None:
            self.endpoint.send(peer.address, packet)
            return

        for _peer in self.subscribers:
            self.endpoint.send(_peer.address, packet)

    def get_peer_from_auth(self, auth, source_address):
        """
        Get Peer object from the message and auth and source_address.
        It is used for mocking the peer in test.
        """
        return Peer(auth.public_key_bin, source_address)

    def pack_sized(self, payload_list, fit_size, start_index=0):
        """
        Packs a list of Payload objects to fit into given size limit.
        :param payload_list: List<Payload> list of payload objects
        :param fit_size: The maximum allowed size for payload field to fit into UDP packet.
        :param start_index: Index of list to start packing
        :return: packed string
        """
        assert isinstance(payload_list, list)
        serialized_results = ''
        size = 0
        current_index = start_index
        num_payloads = len(payload_list)
        while current_index < num_payloads:
            item = payload_list[current_index]
            packed_item = self.serializer.pack_multiple(item.to_pack_list())
            packed_item_length = len(packed_item)
            if size + packed_item_length > fit_size:
                break
            else:
                size += packed_item_length
                serialized_results += packed_item
            current_index += 1
        return serialized_results, current_index, current_index - start_index

    @abstractmethod
    def publish_next_content(self):
        """ Method responsible for publishing content during periodic push """
        pass
Exemplo n.º 5
0
class GigaChannelCommunity(Community):
    """
    Community to gossip around gigachannels.
    """

    master_peer = Peer(
        unhexlify(
            "4c69624e61434c504b3ab5791362b5e98090310c10194e7406a553134e3e2f88bcc5c8a2e1dd249d323"
            "ebb20ca9528cb8b1b0db890ef876589a6d6ba80ded85e5ebab33acd57c8ead9db"
        ))

    NEWS_PUSH_MESSAGE = 1
    SEARCH_REQUEST = 2
    SEARCH_RESPONSE = 3

    def __init__(self,
                 my_peer,
                 endpoint,
                 network,
                 metadata_store,
                 notifier=None):
        super(GigaChannelCommunity, self).__init__(my_peer, endpoint, network)
        self.metadata_store = metadata_store
        self.add_message_handler(self.NEWS_PUSH_MESSAGE, self.on_blob)
        self.add_message_handler(self.SEARCH_REQUEST, self.on_search_request)
        self.add_message_handler(self.SEARCH_RESPONSE, self.on_search_response)
        self.request_cache = RequestCache()
        self.notifier = notifier

    @inlineCallbacks
    def unload(self):
        self.request_cache.clear()
        yield super(GigaChannelCommunity, self).unload()

    def send_random_to(self, peer):
        """
        Send random entries from our subscribed channels to another peer.

        :param peer: the peer to send to
        :type peer: Peer
        :returns: None
        """
        # Choose some random entries and try to pack them into maximum_payload_size bytes
        md_list = []
        with db_session:
            # TODO: when the health table will be there, send popular torrents instead
            channel_l = list(
                self.metadata_store.ChannelMetadata.get_random_channels(
                    1, only_subscribed=True))
            if not channel_l:
                return
            md_list.extend(channel_l +
                           list(channel_l[0].get_random_torrents(max_entries -
                                                                 1)))
            blob = entries_to_chunk(
                md_list, maximum_payload_size)[0] if md_list else None
        self.endpoint.send(
            peer.address,
            self.ezr_pack(self.NEWS_PUSH_MESSAGE, RawBlobPayload(blob)))

    @lazy_wrapper(RawBlobPayload)
    def on_blob(self, peer, blob):
        """
        Callback for when a MetadataBlob message comes in.

        :param peer: the peer that sent us the blob
        :param blob: payload raw data
        """
        try:
            with db_session:
                try:
                    md_list = self.metadata_store.process_compressed_mdblob(
                        blob.raw_blob)
                except (TransactionIntegrityError, CacheIndexError) as err:
                    self._logger.error(
                        "DB transaction error when tried to process payload: %s",
                        str(err))
                    return
        # Unfortunately, we have to catch the exception twice, because Pony can raise them both on the exit from
        # db_session, and on calling the line of code
        except (TransactionIntegrityError, CacheIndexError) as err:
            self._logger.error(
                "DB transaction error when tried to process payload: %s",
                str(err))
            return

        # Notify the discovered torrents and channels to the GUI
        self.notify_discovered_metadata(md_list)

        # Check if the guy who send us this metadata actually has an older version of this md than
        # we do, and queue to send it back.
        self.respond_with_updated_metadata(peer, md_list)

    def respond_with_updated_metadata(self, peer, md_list):
        """
        Responds the peer with the updated metadata if present in the metadata list.
        :param peer: responding peer
        :param md_list: Metadata list
        :return: None
        """
        with db_session:
            reply_list = [
                md for md, result in md_list
                if (md and (md.metadata_type == CHANNEL_TORRENT)) and (
                    result == GOT_NEWER_VERSION)
            ]
            reply_blob = entries_to_chunk(
                reply_list, maximum_payload_size)[0] if reply_list else None
        if reply_blob:
            self.endpoint.send(
                peer.address,
                self.ezr_pack(self.NEWS_PUSH_MESSAGE,
                              RawBlobPayload(reply_blob)))

    def notify_discovered_metadata(self, md_list):
        """
        Notify about the discovered metadata through event notifier.
        :param md_list: Metadata list
        :return: None
        """
        with db_session:
            new_channels = [(dict(type='channel', **(md.to_simple_dict())))
                            for md, result in md_list
                            if md and md.metadata_type == CHANNEL_TORRENT
                            and result == UNKNOWN_CHANNEL]

        if self.notifier and new_channels:
            self.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None,
                                 {"results": new_channels})

    def send_search_request(self,
                            query_filter,
                            metadata_type='',
                            sort_by=None,
                            sort_asc=0,
                            hide_xxx=True,
                            uuid=None):
        """
        Sends request to max_search_peers from peer list. The request is cached in request cached. The past cache is
        cleared before adding a new search request to prevent incorrect results being pushed to the GUI.
        Returns: request cache number which uniquely identifies each search request
        """
        sort_by = sort_by or "HEALTH"
        search_candidates = self.get_peers()[:max_search_peers]
        search_request_cache = SearchRequestCache(self.request_cache, uuid,
                                                  search_candidates)
        self.request_cache.clear()
        self.request_cache.add(search_request_cache)

        search_request_payload = SearchRequestPayload(
            search_request_cache.number, query_filter.encode('utf8'),
            metadata_type, sort_by, sort_asc, hide_xxx)
        self._logger.info("Started remote search for query:%s", query_filter)

        for peer in search_candidates:
            self.endpoint.send(
                peer.address,
                self.ezr_pack(self.SEARCH_REQUEST, search_request_payload))
        return search_request_cache.number

    @lazy_wrapper(SearchRequestPayload)
    def on_search_request(self, peer, request):
        # Caution: SQL injection
        # Since this string 'query_filter' is passed as it is to fetch the results, there could be a chance for
        # SQL injection. But since we use pony which is supposed to be doing proper variable bindings, it should
        # be relatively safe
        query_filter = request.query_filter.decode('utf8')
        # Check if the query_filter is a simple query
        if not is_simple_match_query(query_filter):
            self.logger.error("Dropping a complex remote search query:%s",
                              query_filter)
            return

        metadata_type = {
            '': [REGULAR_TORRENT, CHANNEL_TORRENT],
            "channel": CHANNEL_TORRENT,
            "torrent": REGULAR_TORRENT
        }.get(request.metadata_type, REGULAR_TORRENT)

        request_dict = {
            "first": 1,
            "last": max_entries,
            "sort_by": request.sort_by,
            "sort_asc": request.sort_asc,
            "query_filter": query_filter,
            "hide_xxx": request.hide_xxx,
            "metadata_type": metadata_type,
            "exclude_legacy": True
        }

        result_blob = None
        with db_session:
            db_results, total = self.metadata_store.TorrentMetadata.get_entries(
                **request_dict)
            if total > 0:
                result_blob = entries_to_chunk(db_results[:max_entries],
                                               maximum_payload_size)[0]
        if result_blob:
            self.endpoint.send(
                peer.address,
                self.ezr_pack(self.SEARCH_RESPONSE,
                              SearchResponsePayload(request.id, result_blob)))

    @lazy_wrapper(SearchResponsePayload)
    def on_search_response(self, peer, response):
        search_request_cache = self.request_cache.get(u"remote-search-request",
                                                      response.id)
        if not search_request_cache or not search_request_cache.process_peer_response(
                peer):
            return

        with db_session:
            try:
                metadata_result = self.metadata_store.process_compressed_mdblob(
                    response.raw_blob)
            except (TransactionIntegrityError, CacheIndexError) as err:
                self._logger.error(
                    "DB transaction error when tried to process search payload: %s",
                    str(err))
                return

            search_results = [(dict(type={
                REGULAR_TORRENT: 'torrent',
                CHANNEL_TORRENT: 'channel'
            }[r.metadata_type],
                                    **(r.to_simple_dict())))
                              for (r, _) in metadata_result
                              if r and (r.metadata_type == CHANNEL_TORRENT
                                        or r.metadata_type == REGULAR_TORRENT)]
        if self.notifier and search_results:
            self.notifier.notify(SIGNAL_GIGACHANNEL_COMMUNITY,
                                 SIGNAL_ON_SEARCH_RESULTS, None, {
                                     "uuid": search_request_cache.uuid,
                                     "results": search_results
                                 })

        # Send the updated metadata if any to the responding peer
        self.respond_with_updated_metadata(peer, metadata_result)
Exemplo n.º 6
0
class PubSubCommunity(Community):
    """
    This community is designed as a base community for all othe future communities that desires publish subscribe model
    for content dissemination. It provides a few basic primitives like subscribe/unsubscribe to publisher peers and
    publish/broadcast content to subscriber peers.

    All the derived community should implement publish_next_content() method which is responsible for publishing the
    next available content to all the subscribers.
    """

    def __init__(self, *args, **kwargs):
        super(PubSubCommunity, self).__init__(*args, **kwargs)
        self.logger = logging.getLogger(self.__class__.__name__)
        self.request_cache = RequestCache()

        # Register messages
        self.decode_map.update({
            chr(MSG_SUBSCRIBE): self.on_subscribe,
            chr(MSG_SUBSCRIPTION): self.on_subscription_status
        })

        # A set of publisher and subscriber.
        # Sends data updates to subscribers, and receives updates from subscribers.
        self.subscribers = set()
        self.publishers = set()

    def start(self):
        """
        Starts the community by subscribing to peers, and periodically publishing the content updates to
        the subscribers.
        """
        # Subscribe peers
        self.subscribe_peers()

        def start_publishing():
            # Update the publisher and subscriber list
            self.refresh_peer_list()

            # publish the new cotent from the content repository
            self.publish_next_content()

        self.register_task("start_publishing", LoopingCall(start_publishing)).start(PUBLISH_INTERVAL, False)

    @inlineCallbacks
    def unload(self):
        self.request_cache.clear()
        self.cancel_pending_task("start_publishing")
        yield super(PubSubCommunity, self).unload()

    def subscribe_peers(self):
        """
        Subscribes to the connected peers. First, the peers are sorted based on the trust score on descending order and
        content subscribe request is sent to the top peers.
        This method is called periodically through refresh_peer_list() in start_publishing() loop so it can fill up for
        the disconnected peers by connecting to new peers.
        Note that, existing publisher peers are not disconnected even if we find new peers with higher trust score but
        only fill up the remaining publisher slots with new top peers.
        """
        num_publishers = len(self.publishers)
        num_peers = len(self.get_peers())
        # If we have some free publisher slots and there are peers available
        if num_publishers < MAX_PUBLISHERS and num_publishers < num_peers:
            available_publishers = [peer for peer in self.get_peers() if peer not in self.publishers]
            sorted_peers = sorted(available_publishers,
                                  key=lambda _peer: self.trustchain.get_trust(_peer) if self.trustchain else 1,
                                  reverse=True)
            for peer in sorted_peers[: MAX_PUBLISHERS - num_publishers]:
                self.subscribe(peer, subscribe=True)

    def refresh_peer_list(self):
        """
        Updates the publishers and subscribers list by filtering out the disconnected peers. It also calls subscribe
        peers to replenish the available publisher slots if necessary.
        """
        peers = self.get_peers()
        self.publishers = set([peer for peer in self.publishers if peer in peers])
        self.subscribers = set([peer for peer in self.subscribers if peer in peers])

        # subscribe peers if necessary
        self.subscribe_peers()

    def unsubscribe_peers(self):
        """
        Unsubscribes from the existing publishers by sending content subscribe request with subscribe=False. It then
        clears up its publishers list.
        - Called at community unload.
        """
        for peer in copy(self.publishers):
            self.subscribe(peer, subscribe=False)
        self.publishers.clear()

    def subscribe(self, peer, subscribe=True):
        """
        Method to send content subscribe/unsubscribe message. This message is sent to each individual publisher peer we
        want to subscribe/unsubscribe.
        """
        cache = self.request_cache.add(ContentRequest(self.request_cache, MSG_SUBSCRIBE, None))
        # Remove the publisher peer already if user is trying to unsubscribe
        if not subscribe:
            self.publishers.remove(peer)

        # Create subscription packet and send it
        subscription = ContentSubscription(cache.number, subscribe)
        packet = self.create_message_packet(MSG_SUBSCRIBE, subscription)
        self.broadcast_message(packet, peer=peer)

    def on_subscribe(self, source_address, data):
        """
        Message handler for content subscribe message. It handles both subscribe and unsubscribe requests.
        Upon successful subscription or unsubscription, it send the confirmation subscription message with status.
        In case of subscription, it also publishes a list of recently checked torrents to the subscriber.
        """
        auth, _, payload = self._ez_unpack_auth(ContentSubscription, data)
        peer = self.get_peer_from_auth(auth, source_address)

        # Subscribe or unsubscribe peer
        subscribed = peer in self.subscribers

        if payload.subscribe and not subscribed:
            if len(self.subscribers) < MAX_SUBSCRIBERS:
                self.subscribers.add(peer)
                subscribed = True

        elif not payload.subscribe and subscribed:
            self.subscribers.remove(peer)
            subscribed = False

        # Send subscription response
        self.send_subscription_status(peer, payload.identifier, subscribed=subscribed)

        return subscribed

    def send_subscription_status(self, peer, identifier, subscribed=True):
        """
        Method to send content subscription message. Content subscription message is send in response to content
        subscribe or unsubscribe message.
        """
        if peer not in self.get_peers():
            self.logger.error(ERROR_UNKNOWN_PEER)
            return

        subscription = ContentSubscription(identifier, subscribed)
        packet = self.create_message_packet(MSG_SUBSCRIPTION, subscription)
        self.broadcast_message(packet, peer=peer)

    def on_subscription_status(self, source_address, data):
        """
        Message handler for content subscription message. Content subscription message is sent by the publisher stating
        the status of the subscription in response to subscribe or unsubscribe request.

        If the subscription message has subscribe=True, it means the subscription was successful, so the peer is added
        to the subscriber. In other case, publisher is removed if it is still present in the publishers list.
        """
        auth, _, payload = self._ez_unpack_auth(ContentSubscription, data)
        peer = self.get_peer_from_auth(auth, source_address)

        cache = self.request_cache.pop(u'request', payload.identifier)
        if not cache:
            return

        if payload.subscribe:
            self.publishers.add(peer)
        elif peer in self.publishers:
            self.publishers.remove(peer)

    def create_message_packet(self, message_type, payload):
        """
        Helper method to creates a message packet of given type with provided payload.
        """
        auth = BinMemberAuthenticationPayload(self.my_peer.public_key.key_to_bin()).to_pack_list()
        dist = GlobalTimeDistributionPayload(self.claim_global_time()).to_pack_list()
        payload = payload if isinstance(payload, list) else payload.to_pack_list()
        return self._ez_pack(self._prefix, message_type, [auth, dist, payload])

    def broadcast_message(self, packet, peer=None):
        """
        Helper method to broadcast the message packet to a single peer or all the subscribers.
        """
        if peer is not None:
            self.endpoint.send(peer.address, packet)
            return

        for _peer in self.subscribers:
            self.endpoint.send(_peer.address, packet)

    def get_peer_from_auth(self, auth, source_address):
        """
        Get Peer object from the message and auth and source_address.
        It is used for mocking the peer in test.
        """
        return Peer(auth.public_key_bin, source_address)

    def pack_sized(self, payload_list, fit_size, start_index=0):
        """
        Packs a list of Payload objects to fit into given size limit.
        :param payload_list: List<Payload> list of payload objects
        :param fit_size: The maximum allowed size for payload field to fit into UDP packet.
        :param start_index: Index of list to start packing
        :return: packed string
        """
        assert isinstance(payload_list, list)
        serialized_results = ''
        size = 0
        current_index = start_index
        num_payloads = len(payload_list)
        while current_index < num_payloads:
            item = payload_list[current_index]
            packed_item = self.serializer.pack_multiple(item.to_pack_list())[0]
            packed_item_length = len(packed_item)
            if size + packed_item_length > fit_size:
                break
            else:
                size += packed_item_length
                serialized_results += packed_item
            current_index += 1
        return serialized_results, current_index, current_index - start_index

    @abstractmethod
    def publish_next_content(self):
        """ Method responsible for publishing content during periodic push """
        pass
Exemplo n.º 7
0
class GigaChannelCommunity(Community):
    """
    Community to gossip around gigachannels.
    """

    master_peer = Peer(unhexlify("4c69624e61434c504b3ab5791362b5e98090310c10194e7406a553134e3e2f88bcc5c8a2e1dd249d323"
                                 "ebb20ca9528cb8b1b0db890ef876589a6d6ba80ded85e5ebab33acd57c8ead9db"))

    NEWS_PUSH_MESSAGE = 1
    SEARCH_REQUEST = 2
    SEARCH_RESPONSE = 3

    def __init__(self, my_peer, endpoint, network, metadata_store, notifier=None):
        super(GigaChannelCommunity, self).__init__(my_peer, endpoint, network)
        self.metadata_store = metadata_store
        self.add_message_handler(self.NEWS_PUSH_MESSAGE, self.on_blob)
        self.add_message_handler(self.SEARCH_REQUEST, self.on_search_request)
        self.add_message_handler(self.SEARCH_RESPONSE, self.on_search_response)
        self.request_cache = RequestCache()
        self.notifier = notifier

    @inlineCallbacks
    def unload(self):
        self.request_cache.clear()
        yield super(GigaChannelCommunity, self).unload()

    def send_random_to(self, peer):
        """
        Send random entries from our subscribed channels to another peer.

        :param peer: the peer to send to
        :type peer: Peer
        :returns: None
        """
        # Choose some random entries and try to pack them into maximum_payload_size bytes
        md_list = []
        with db_session:
            # TODO: when the health table will be there, send popular torrents instead
            channel_l = list(self.metadata_store.ChannelMetadata.get_random_channels(1, only_subscribed=True))
            if not channel_l:
                return
            md_list.extend(channel_l + list(channel_l[0].get_random_torrents(max_entries - 1)))
            blob = entries_to_chunk(md_list, maximum_payload_size)[0] if md_list else None
        self.endpoint.send(peer.address, self.ezr_pack(self.NEWS_PUSH_MESSAGE, RawBlobPayload(blob)))

    @lazy_wrapper(RawBlobPayload)
    def on_blob(self, peer, blob):
        """
        Callback for when a MetadataBlob message comes in.

        :param peer: the peer that sent us the blob
        :param blob: payload raw data
        """
        try:
            with db_session:
                try:
                    md_list = self.metadata_store.process_compressed_mdblob(blob.raw_blob)
                except (TransactionIntegrityError, CacheIndexError) as err:
                    self._logger.error("DB transaction error when tried to process payload: %s", str(err))
                    return
        # Unfortunately, we have to catch the exception twice, because Pony can raise them both on the exit from
        # db_session, and on calling the line of code
        except (TransactionIntegrityError, CacheIndexError) as err:
            self._logger.error("DB transaction error when tried to process payload: %s", str(err))
            return

        # Notify the discovered torrents and channels to the GUI
        self.notify_discovered_metadata(md_list)

        # Check if the guy who send us this metadata actually has an older version of this md than
        # we do, and queue to send it back.
        self.respond_with_updated_metadata(peer, md_list)

    def respond_with_updated_metadata(self, peer, md_list):
        """
        Responds the peer with the updated metadata if present in the metadata list.
        :param peer: responding peer
        :param md_list: Metadata list
        :return: None
        """
        with db_session:
            reply_list = [md for md, result in md_list if
                          (md and (md.metadata_type == CHANNEL_TORRENT)) and (result == GOT_NEWER_VERSION)]
            reply_blob = entries_to_chunk(reply_list, maximum_payload_size)[0] if reply_list else None
        if reply_blob:
            self.endpoint.send(peer.address, self.ezr_pack(self.NEWS_PUSH_MESSAGE, RawBlobPayload(reply_blob)))

    def notify_discovered_metadata(self, md_list):
        """
        Notify about the discovered metadata through event notifier.
        :param md_list: Metadata list
        :return: None
        """
        with db_session:
            new_channels = [(dict(type='channel', **(md.to_simple_dict()))) for md, result in md_list
                            if md and md.metadata_type == CHANNEL_TORRENT and result == UNKNOWN_CHANNEL]

        if self.notifier and new_channels:
            self.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None, {"results": new_channels})

    def send_search_request(self, query_filter, metadata_type='', sort_by=None, sort_asc=0, hide_xxx=True, uuid=None):
        """
        Sends request to max_search_peers from peer list. The request is cached in request cached. The past cache is
        cleared before adding a new search request to prevent incorrect results being pushed to the GUI.
        Returns: request cache number which uniquely identifies each search request
        """
        sort_by = sort_by or "HEALTH"
        search_candidates = self.get_peers()[:max_search_peers]
        search_request_cache = SearchRequestCache(self.request_cache, uuid, search_candidates)
        self.request_cache.clear()
        self.request_cache.add(search_request_cache)

        search_request_payload = SearchRequestPayload(search_request_cache.number, query_filter.encode('utf8'),
                                                      metadata_type, sort_by, sort_asc, hide_xxx)
        self._logger.info("Started remote search for query:%s", query_filter)

        for peer in search_candidates:
            self.endpoint.send(peer.address, self.ezr_pack(self.SEARCH_REQUEST, search_request_payload))
        return search_request_cache.number

    @lazy_wrapper(SearchRequestPayload)
    def on_search_request(self, peer, request):
        # Caution: SQL injection
        # Since this string 'query_filter' is passed as it is to fetch the results, there could be a chance for
        # SQL injection. But since we use pony which is supposed to be doing proper variable bindings, it should
        # be relatively safe
        query_filter = request.query_filter.decode('utf8')
        # Check if the query_filter is a simple query
        if not is_simple_match_query(query_filter):
            self.logger.error("Dropping a complex remote search query:%s", query_filter)
            return

        metadata_type = {'': [REGULAR_TORRENT, CHANNEL_TORRENT],
                         "channel": CHANNEL_TORRENT,
                         "torrent": REGULAR_TORRENT
                        }.get(request.metadata_type, REGULAR_TORRENT)

        request_dict = {
            "first": 1,
            "last": max_entries,
            "sort_by": request.sort_by,
            "sort_asc": request.sort_asc,
            "query_filter": query_filter,
            "hide_xxx": request.hide_xxx,
            "metadata_type": metadata_type,
            "exclude_legacy": True
        }

        result_blob = None
        with db_session:
            db_results, total = self.metadata_store.TorrentMetadata.get_entries(**request_dict)
            if total > 0:
                result_blob = entries_to_chunk(db_results[:max_entries], maximum_payload_size)[0]
        if result_blob:
            self.endpoint.send(peer.address, self.ezr_pack(self.SEARCH_RESPONSE,
                                                           SearchResponsePayload(request.id, result_blob)))

    @lazy_wrapper(SearchResponsePayload)
    def on_search_response(self, peer, response):
        search_request_cache = self.request_cache.get(u"remote-search-request", response.id)
        if not search_request_cache or not search_request_cache.process_peer_response(peer):
            return

        with db_session:
            try:
                metadata_result = self.metadata_store.process_compressed_mdblob(response.raw_blob)
            except (TransactionIntegrityError, CacheIndexError) as err:
                self._logger.error("DB transaction error when tried to process search payload: %s", str(err))
                return

            search_results = [(dict(type={REGULAR_TORRENT: 'torrent', CHANNEL_TORRENT: 'channel'}[r.metadata_type],
                                    **(r.to_simple_dict()))) for (r, _) in metadata_result
                              if r and (r.metadata_type == CHANNEL_TORRENT or r.metadata_type == REGULAR_TORRENT)]
        if self.notifier and search_results:
            self.notifier.notify(SIGNAL_GIGACHANNEL_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS, None,
                                 {"uuid": search_request_cache.uuid, "results": search_results})

        # Send the updated metadata if any to the responding peer
        self.respond_with_updated_metadata(peer, metadata_result)