예제 #1
0
def test_squash_mdblobs_multiple_chunks(metadata_store):
    r = random.Random(123)
    md_list = [
        metadata_store.TorrentMetadata(
            title=''.join(
                r.choice(string.ascii_uppercase + string.digits)
                for _ in range(20)),
            infohash=database_blob(random_infohash()),
            torrent_date=datetime.utcfromtimestamp(100),
        ) for _ in range(0, 10)
    ]
    # Test splitting into multiple chunks
    chunk, index = entries_to_chunk(md_list, chunk_size=900)
    chunk2, _ = entries_to_chunk(md_list, chunk_size=900, start_index=index)
    dict_list = [d.to_dict()["signature"] for d in md_list]
    for d in md_list:
        d.delete()
    assert dict_list[:index] == [
        d.md_obj.to_dict()["signature"]
        for d in metadata_store.process_compressed_mdblob(
            chunk, skip_personal_metadata_payload=False)
    ]

    assert dict_list[index:] == [
        d.md_obj.to_dict()["signature"]
        for d in metadata_store.process_compressed_mdblob(
            chunk2, skip_personal_metadata_payload=False)
    ]
예제 #2
0
 def test_squash_mdblobs_multiple_chunks(self):
     with db_session:
         md_list = [
             self.mds.TorrentMetadata(
                 title=''.join(
                     random.choice(string.ascii_uppercase + string.digits)
                     for _ in range(20)),
                 infohash=database_blob(random_infohash()),
             ) for _ in range(0, 10)
         ]
         # Test splitting into multiple chunks
         chunk, index = entries_to_chunk(md_list, chunk_size=900)
         chunk2, _ = entries_to_chunk(md_list,
                                      chunk_size=900,
                                      start_index=index)
         dict_list = [d.to_dict()["signature"] for d in md_list]
         for d in md_list:
             d.delete()
     self.assertListEqual(
         dict_list[:index],
         [
             d[0].to_dict()["signature"]
             for d in self.mds.process_compressed_mdblob(
                 chunk, skip_personal_metadata_payload=False)
         ],
     )
     self.assertListEqual(
         dict_list[index:],
         [
             d[0].to_dict()["signature"]
             for d in self.mds.process_compressed_mdblob(
                 chunk2, skip_personal_metadata_payload=False)
         ],
     )
예제 #3
0
    def _prepare_gossip_blob_cache(self):
        # Choose some random entries and try to pack them into maximum_payload_size bytes
        with db_session:
            # Generate and cache the gossip blob for the personal channel
            personal_channels = list(
                self.metadata_store.ChannelMetadata.get_my_channels().where(
                    lambda g: g.num_entries > 0).random(1))
            personal_channel = personal_channels[
                0] if personal_channels else None
            md_list = (
                [personal_channel] +
                list(personal_channel.get_random_contents(max_entries - 1))
                if personal_channel else None)
            self.gossip_blob_personal_channel = (
                entries_to_chunk(md_list, maximum_payload_size)[0]
                if md_list and len(md_list) > 1 else None)

            # Generate and cache the gossip blob for a subscribed channel
            # TODO: when the health table will be there, send popular torrents instead
            channel_l = list(
                self.metadata_store.ChannelMetadata.get_random_channels(
                    1, only_subscribed=True, only_downloaded=True))
            md_list = channel_l + list(channel_l[0].get_random_contents(
                max_entries - 1)) if channel_l else None
            self.gossip_blob = entries_to_chunk(
                md_list, maximum_payload_size)[0] if md_list else None
        self.metadata_store.disconnect_thread()
예제 #4
0
def test_mdblob_dont_fit_exception(metadata_store):
    with pytest.raises(Exception):
        md_list = [
            metadata_store.TorrentMetadata(title='test' + str(x),
                                           infohash=random_infohash())
            for x in range(0, 1)
        ]
        entries_to_chunk(md_list, chunk_size=1)
예제 #5
0
 def send_db_results(self, peer, request_payload_id, db_results):
     index = 0
     while index < len(db_results):
         data, index = entries_to_chunk(db_results,
                                        self.settings.maximum_payload_size,
                                        start_index=index)
         self.ez_send(peer, SelectResponsePayload(request_payload_id, data))
예제 #6
0
    def send_db_results(self,
                        peer,
                        request_payload_id,
                        db_results,
                        force_eva_response=False):

        # Special case of empty results list - sending empty lz4 archive
        if len(db_results) == 0:
            self.ez_send(
                peer,
                SelectResponsePayload(request_payload_id, LZ4_EMPTY_ARCHIVE))
            return

        index = 0
        while index < len(db_results):
            transfer_size = (self.eva_protocol.binary_size_limit
                             if force_eva_response else
                             self.settings.maximum_payload_size)
            data, index = entries_to_chunk(db_results,
                                           transfer_size,
                                           start_index=index)
            payload = SelectResponsePayload(request_payload_id, data)
            if force_eva_response or (len(data) >
                                      self.settings.maximum_payload_size):
                self.eva_send_binary(peer, struct.pack('>i',
                                                       request_payload_id),
                                     self.ezr_pack(payload.msg_id, payload))
            else:
                self.ez_send(peer, payload)
예제 #7
0
def gen_have_newer_results_blob(md_list):
    with db_session:
        reply_list = [
            md for md, result in md_list
            if (md and (md.metadata_type == CHANNEL_TORRENT)) and (
                result == GOT_NEWER_VERSION)
        ]
        return entries_to_chunk(
            reply_list, maximum_payload_size)[0] if reply_list else None
예제 #8
0
 def _get_search_results():
     with db_session:
         db_results = self.metadata_store.MetadataNode.get_entries(
             **request_dict)
         result = entries_to_chunk(
             db_results[:max_entries],
             maximum_payload_size)[0] if db_results else None
     self.metadata_store.disconnect_thread()
     return result
예제 #9
0
    async def on_remote_select(self, peer, request):
        request_sanitized = sanitize_query(json.loads(request.json), self.settings.max_response_size)
        db_results = await self.mds.MetadataNode.get_entries_threaded(**request_sanitized)
        if not db_results:
            return

        index = 0
        while index < len(db_results):
            data, index = entries_to_chunk(db_results, self.settings.maximum_payload_size, start_index=index)
            self.ez_send(peer, SelectResponsePayload(request.id, data))
예제 #10
0
def test_data_dont_fit_in_mdblob(metadata_store):
    import random as rng  # pylint: disable=import-outside-toplevel

    rng.seed(123)
    md_list = [
        metadata_store.TorrentMetadata(
            title='test' + str(x),
            infohash=random_infohash(rng),
            id_=rng.randint(0, 100000000),
            timestamp=rng.randint(0, 100000000),
        ) for x in range(0, 1)
    ]
    chunk, index = entries_to_chunk(md_list, chunk_size=1)
    assert index == 1
    assert len(chunk) == 206

    # Test corner case of empty list and/or too big index
    with pytest.raises(Exception):
        entries_to_chunk(md_list, chunk_size=1000, start_index=1000)
    with pytest.raises(Exception):
        entries_to_chunk([], chunk_size=1)
예제 #11
0
def test_squash_mdblobs(metadata_store):
    chunk_size = metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT
    md_list = [
        metadata_store.TorrentMetadata(
            title=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(20)),
            infohash=database_blob(random_infohash()),
            torrent_date=datetime.utcfromtimestamp(100),
        )
        for _ in range(0, 10)
    ]
    chunk, _ = entries_to_chunk(md_list, chunk_size=chunk_size)
    dict_list = [d.to_dict()["signature"] for d in md_list]
    for d in md_list:
        d.delete()
    assert dict_list == [
        d[0].to_dict()["signature"]
        for d in metadata_store.process_compressed_mdblob(chunk, skip_personal_metadata_payload=False)
    ]
예제 #12
0
 def test_squash_mdblobs(self):
     chunk_size = self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT
     md_list = [
         self.mds.TorrentMetadata(
             title=''.join(
                 random.choice(string.ascii_uppercase + string.digits)
                 for _ in range(20)),
             infohash=database_blob(random_infohash()),
         ) for _ in range(0, 10)
     ]
     chunk, _ = entries_to_chunk(md_list, chunk_size=chunk_size)
     dict_list = [d.to_dict()["signature"] for d in md_list]
     for d in md_list:
         d.delete()
     self.assertListEqual(
         dict_list,
         [
             d[0].to_dict()["signature"]
             for d in self.mds.process_compressed_mdblob(
                 chunk, skip_personal_metadata_payload=False)
         ],
     )