Beispiel #1
0
def test_process_payload_reject_older_entry(metadata_store):
    """
    Test rejecting and returning LOCAL_VERSION_NEWER upon receiving an older version
    of an already known metadata entry
    """
    key = default_eccrypto.generate_key("curve25519")
    torrent_old = metadata_store.TorrentMetadata(title='blabla',
                                                 timestamp=11,
                                                 id_=3,
                                                 infohash=random_infohash(),
                                                 sign_with=key)
    payload_old = torrent_old._payload_class(**torrent_old.to_dict())
    torrent_old.delete()

    torrent_updated = metadata_store.TorrentMetadata(
        title='blabla',
        timestamp=12,
        id_=3,
        infohash=random_infohash(),
        sign_with=key)
    # Test rejecting older version of the same entry
    assert metadata_store.process_payload(
        payload_old,
        skip_personal_metadata_payload=False)[0] == ProcessingResult(
            md_obj=torrent_updated, obj_state=ObjState.LOCAL_VERSION_NEWER)
Beispiel #2
0
def add_fake_torrents_channels(metadata_store):
    infohashes = []

    torrents_per_channel = 5
    # Add a few channels
    with db_session:
        for ind in range(10):
            ext_key = default_eccrypto.generate_key('curve25519')
            channel = metadata_store.ChannelMetadata(
                title='channel%d' % ind,
                subscribed=(ind % 2 == 0),
                num_entries=torrents_per_channel,
                infohash=random_infohash(),
                id_=123,
                sign_with=ext_key,
                version=10,
                local_version=(ind % 11),
            )
            for torrent_ind in range(torrents_per_channel):
                rand_infohash = random_infohash()
                infohashes.append(rand_infohash)
                t = metadata_store.TorrentMetadata(origin_id=channel.id_,
                                                   title='torrent%d' %
                                                   torrent_ind,
                                                   infohash=rand_infohash,
                                                   sign_with=ext_key)
                t.health.seeders = int.from_bytes(t.infohash[:2],
                                                  byteorder="big")
                t.health.self_checked = bool(torrent_ind % 2 == 1)
                t.health.last_check = int(
                    time()) - (60 * 60 * 24 * 7 if torrent_ind % 2 else 0)
def test_get_entries_for_infohashes(metadata_store):
    infohash1 = random_infohash()
    infohash2 = random_infohash()
    infohash3 = random_infohash()

    metadata_store.TorrentMetadata(title='title',
                                   infohash=infohash1,
                                   size=0,
                                   sign_with=TEST_PERSONAL_KEY)
    metadata_store.TorrentMetadata(title='title',
                                   infohash=infohash2,
                                   size=0,
                                   sign_with=TEST_PERSONAL_KEY)

    def count(*args, **kwargs):
        return len(metadata_store.get_entries_query(*args, **kwargs))

    # infohash can be passed as a single object
    assert count(infohash=infohash3) == 0
    assert count(infohash=infohash1) == 1

    # infohashes can be passed as a set
    assert count(infohash_set={infohash1, infohash2}) == 2

    # in the case both arguments are used, the function will take to consideration
    # only `infohash_set`
    assert count(infohash=infohash1, infohash_set={infohash1, infohash2}) == 2
Beispiel #4
0
def my_channel(metadata_store, tags_db):
    """
    Generate a channel with some torrents. Also add a few (random) tags to these torrents.
    """
    with db_session:
        chan = metadata_store.ChannelMetadata.create_channel('test', 'test')
        for ind in range(5):
            infohash = random_infohash()
            _ = metadata_store.TorrentMetadata(origin_id=chan.id_,
                                               title='torrent%d' % ind,
                                               status=NEW,
                                               infohash=infohash)
            tag_torrent(infohash, tags_db)
        for ind in range(5, 9):
            infohash = random_infohash()
            _ = metadata_store.TorrentMetadata(origin_id=chan.id_,
                                               title='torrent%d' % ind,
                                               infohash=infohash)
            tag_torrent(infohash, tags_db)

        chan2 = metadata_store.ChannelMetadata.create_channel('test2', 'test2')
        for ind in range(5):
            infohash = random_infohash()
            _ = metadata_store.TorrentMetadata(origin_id=chan2.id_,
                                               title='torrentB%d' % ind,
                                               status=NEW,
                                               infohash=infohash)
            tag_torrent(infohash, tags_db)
        for ind in range(5, 9):
            infohash = random_infohash()
            _ = metadata_store.TorrentMetadata(origin_id=chan2.id_,
                                               title='torrentB%d' % ind,
                                               infohash=random_infohash())
            tag_torrent(infohash, tags_db)
        return chan
Beispiel #5
0
def test_get_num_channels_nodes(metadata_store):
    metadata_store.ChannelMetadata(title='testchan',
                                   id_=0,
                                   infohash=random_infohash())
    metadata_store.ChannelMetadata(title='testchan',
                                   id_=123,
                                   infohash=random_infohash())
    metadata_store.ChannelMetadata(
        title='testchan',
        id_=0,
        public_key=unhexlify('0' * 20),
        signature=unhexlify('0' * 64),
        skip_key_check=True,
        infohash=random_infohash(),
    )
    metadata_store.ChannelMetadata(
        title='testchan',
        id_=0,
        public_key=unhexlify('1' * 20),
        signature=unhexlify('1' * 64),
        skip_key_check=True,
        infohash=random_infohash(),
    )

    _ = [
        metadata_store.TorrentMetadata(title='test' + str(x),
                                       status=NEW,
                                       infohash=random_infohash())
        for x in range(0, 3)
    ]

    assert metadata_store.get_num_channels() == 4
    assert metadata_store.get_num_torrents() == 3
async def test_update_multiple_metadata_entries(metadata_store,
                                                add_fake_torrents_channels,
                                                rest_api):
    """
    Test updating attributes of several metadata entities at once with a PATCH request to REST API
    """
    # Test handling the wrong/empty JSON gracefully
    await do_request(rest_api,
                     'metadata',
                     expected_code=400,
                     request_type='PATCH',
                     post_data='abc')

    # Test trying update a non-existing entry
    await do_request(
        rest_api,
        'metadata',
        post_data=[{
            'public_key': hexlify(b'1' * 64),
            'id': 111
        }],
        expected_code=404,
        request_type='PATCH',
    )
    with db_session:
        md1 = metadata_store.TorrentMetadata(title='old1',
                                             infohash=random_infohash())
        md2 = metadata_store.ChannelMetadata(title='old2',
                                             infohash=random_infohash(),
                                             subscribed=False)

    NEW_NAME1 = "updated1"
    NEW_NAME2 = "updated2"
    patch_data = [
        {
            'public_key': hexlify(md1.public_key),
            'id': md1.id_,
            'title': NEW_NAME1
        },
        {
            'public_key': hexlify(md2.public_key),
            'id': md2.id_,
            'title': NEW_NAME2,
            'subscribed': 1
        },
    ]
    await do_request(rest_api,
                     'metadata',
                     post_data=patch_data,
                     expected_code=200,
                     request_type='PATCH')
    with db_session:
        entry1 = metadata_store.ChannelNode.get(rowid=md1.rowid)
        assert NEW_NAME1 == entry1.title
        assert UPDATED == entry1.status

        entry2 = metadata_store.ChannelNode.get(rowid=md2.rowid)
        assert NEW_NAME2 == entry2.title
        assert UPDATED == entry2.status
        assert entry2.subscribed
async def test_delete_multiple_metadata_entries(rest_api, metadata_store):
    """
    Test deleting multiple entries with JSON REST API
    """
    with db_session:
        md1 = metadata_store.TorrentMetadata(title='old1',
                                             infohash=random_infohash())
        md2 = metadata_store.TorrentMetadata(title='old2',
                                             infohash=random_infohash())
        assert metadata_store.ChannelNode.select().count() == 2

    patch_data = [
        {
            'public_key': hexlify(md1.public_key),
            'id': md1.id_
        },
        {
            'public_key': hexlify(md2.public_key),
            'id': md2.id_
        },
    ]
    await do_request(rest_api,
                     'metadata',
                     post_data=patch_data,
                     expected_code=200,
                     request_type='DELETE')
    with db_session:
        assert metadata_store.ChannelNode.select().count() == 0
Beispiel #8
0
def test_process_payload_reject_older(metadata_store):
    # Check there is no action if the processed payload has a timestamp that is less than the
    # local_version of the corresponding local channel. (I.e. remote peer trying to push back a deleted entry)
    key = default_eccrypto.generate_key("curve25519")
    channel = metadata_store.ChannelMetadata(
        title='bla',
        version=123,
        timestamp=12,
        local_version=12,
        infohash=random_infohash(),
        sign_with=key,
    )
    torrent = metadata_store.TorrentMetadata(title='blabla',
                                             timestamp=11,
                                             origin_id=channel.id_,
                                             infohash=random_infohash(),
                                             sign_with=key)
    payload = torrent._payload_class(**torrent.to_dict())
    torrent.delete()
    assert metadata_store.process_payload(payload) == []
    assert metadata_store.TorrentMetadata.get(title='blabla') is None

    # Now test the same, but for a torrent within a hierarchy of nested channels
    folder_1 = metadata_store.CollectionNode(origin_id=channel.id_,
                                             sign_with=key)
    folder_2 = metadata_store.CollectionNode(origin_id=folder_1.id_,
                                             sign_with=key)

    torrent = metadata_store.TorrentMetadata(title='blabla',
                                             timestamp=11,
                                             origin_id=folder_2.id_,
                                             infohash=random_infohash(),
                                             sign_with=key)
    payload = torrent._payload_class(**torrent.to_dict())
    torrent.delete()
    assert metadata_store.process_payload(payload) == []
    assert metadata_store.TorrentMetadata.get(title='blabla') is None

    # Now test that we still add the torrent for the case of a broken hierarchy
    folder_1 = metadata_store.CollectionNode(origin_id=123123, sign_with=key)
    folder_2 = metadata_store.CollectionNode(origin_id=folder_1.id_,
                                             sign_with=key)
    torrent = metadata_store.TorrentMetadata(title='blabla',
                                             timestamp=11,
                                             origin_id=folder_2.id_,
                                             infohash=random_infohash(),
                                             sign_with=key)
    payload = torrent._payload_class(**torrent.to_dict())
    torrent.delete()
    assert metadata_store.process_payload(
        payload)[0].obj_state == ObjState.NEW_OBJECT
    assert metadata_store.TorrentMetadata.get(title='blabla')
def test_get_magnet(metadata_store):
    """
    Test converting torrent metadata to a magnet link
    """
    torrent_metadata = metadata_store.TorrentMetadata.from_dict(
        {"infohash": random_infohash()})
    assert torrent_metadata.get_magnet()
    torrent_metadata2 = metadata_store.TorrentMetadata.from_dict({
        'title':
        '\U0001f4a9',
        "infohash":
        random_infohash()
    })
    assert torrent_metadata2.get_magnet()
def test_get_entries_health_checked_after(metadata_store):
    # Test querying for torrents last checked after a certain moment in time

    # Add a torrent checked just now
    t1 = metadata_store.TorrentMetadata(infohash=random_infohash())
    t1.health.last_check = int(time())

    # Add a torrent checked awhile ago
    t2 = metadata_store.TorrentMetadata(infohash=random_infohash())
    t2.health.last_check = t1.health.last_check - 10000

    # Check that only the more recently checked torrent is returned, because we limited the selection by time
    torrents = metadata_store.get_entries(
        health_checked_after=t2.health.last_check + 1)
    assert torrents == [t1]
Beispiel #11
0
def needle_in_haystack_mds(metadata_store):
    num_hay = 100
    with db_session:
        _ = metadata_store.ChannelMetadata(title='test',
                                           tags='test',
                                           subscribed=True,
                                           infohash=random_infohash())
        for x in range(0, num_hay):
            metadata_store.TorrentMetadata(title='hay ' + str(x),
                                           infohash=random_infohash())
        metadata_store.TorrentMetadata(title='needle',
                                       infohash=random_infohash())
        metadata_store.TorrentMetadata(title='needle2',
                                       infohash=random_infohash())
    return metadata_store
async def test_get_my_channel_tags_xxx(metadata_store, tags_db,
                                       mock_dlmgr_get_download, my_channel,
                                       rest_api):  # pylint: disable=redefined-outer-name
    """
    Test whether XXX tags are correctly filtered
    """
    with db_session:
        chan = metadata_store.ChannelMetadata.create_channel('test', 'test')
        infohash = random_infohash()
        _ = metadata_store.TorrentMetadata(origin_id=chan.id_,
                                           title='taggedtorrent',
                                           status=NEW,
                                           infohash=infohash)
        default_xxx_filter.xxx_terms = {"wrongterm"}

        # Add a few tags to our new torrent
        tags = ["totally safe", "wrongterm", "wRonGtErM", "a wrongterm b"]
        tag_torrent(infohash, tags_db, tags=tags)

        json_dict = await do_request(
            rest_api,
            'channels/%s/%d?metadata_type=%d&hide_xxx=1' %
            (hexlify(my_channel.public_key), chan.id_, REGULAR_TORRENT),
            expected_code=200,
        )

    assert len(json_dict['results']) == 1
    assert len(json_dict['results'][0]["tags"]) == 1
Beispiel #13
0
async def test_get_channels_peers(rest_api, endpoint, metadata_store,
                                  mock_gigachannel_community):  # pylint: disable=W0621, C0321
    """
    Test getting debug info about the state of channels to peers mapping
    """

    mapping = mock_gigachannel_community.channels_peers = ChannelsPeersMapping(
    )

    peer_key = default_eccrypto.generate_key("curve25519")
    chan_key = default_eccrypto.generate_key("curve25519")
    with db_session:
        chan = metadata_store.ChannelMetadata(sign_with=chan_key,
                                              name="bla",
                                              infohash=random_infohash())

    peer = Peer(peer_key, ("1.2.3.4", 5))
    mapping.add(peer, chan.public_key, chan.id_)

    result = await do_request(
        rest_api,
        'remote_query/channels_peers',
        request_type="GET",
        expected_code=200,
    )
    first_result = result["channels_list"][0]
    assert first_result["channel_name"] == chan.title
    assert first_result["channel_pk"] == hexlify(chan.public_key)
    assert first_result["channel_id"] == chan.id_
    assert first_result["peers"][0][0] == hexlify(peer.mid)
Beispiel #14
0
 def update_properties(self, update_dict):
     # Sanity checks: check that we don't create a recursive dependency or an orphaned channel
     new_origin_id = update_dict.get('origin_id', self.origin_id)
     if new_origin_id not in (0, self.origin_id):
         new_parent = CollectionNode.get(public_key=self.public_key,
                                         id_=new_origin_id)
         if not new_parent:
             raise ValueError("Target collection does not exists")
         root_path = new_parent.get_parent_nodes()
         if new_origin_id == self.id_ or self in root_path[:-1]:
             raise ValueError(
                 "Can't move collection into itself or its descendants!"
             )
         if root_path[0].origin_id != 0:
             raise ValueError(
                 "Tried to move collection into an orphaned hierarchy!")
     updated_self = super().update_properties(update_dict)
     if updated_self.origin_id == 0 and self.metadata_type == COLLECTION_NODE:
         # Coerce to ChannelMetadata
         # ACHTUNG! This is a somewhat awkward way to re-create the entry as an instance of
         # another class. Be very careful with it!
         self_dict = updated_self.to_dict()
         updated_self.delete(recursive=False)
         self_dict.pop("rowid")
         self_dict.pop("metadata_type")
         self_dict.pop("timestamp")
         self_dict['infohash'] = random_infohash()
         self_dict["sign_with"] = self._my_key
         updated_self = db.ChannelMetadata.from_dict(self_dict)
     return updated_self
def test_serialization(metadata_store):
    """
    Test converting channel metadata to serialized data
    """
    channel_metadata = metadata_store.ChannelMetadata.from_dict(
        {"infohash": random_infohash()})
    assert channel_metadata.serialized()
Beispiel #16
0
    async def test_get_known_subscribed_peers_for_node(self):
        key = default_eccrypto.generate_key("curve25519")
        with db_session:
            channel = self.overlay(0).mds.ChannelMetadata(
                origin_id=0, infohash=random_infohash(), sign_with=key)
            folder1 = self.overlay(0).mds.CollectionNode(origin_id=channel.id_,
                                                         sign_with=key)
            folder2 = self.overlay(0).mds.CollectionNode(origin_id=folder1.id_,
                                                         sign_with=key)

            orphan = self.overlay(0).mds.CollectionNode(origin_id=123123,
                                                        sign_with=key)

        source_peer = self.nodes[1].my_peer
        self.overlay(0).channels_peers.add(source_peer, channel.public_key,
                                           channel.id_)
        assert [source_peer
                ] == self.overlay(0).get_known_subscribed_peers_for_node(
                    channel.public_key, channel.id_)
        assert [source_peer
                ] == self.overlay(0).get_known_subscribed_peers_for_node(
                    folder1.public_key, folder1.id_)
        assert [source_peer
                ] == self.overlay(0).get_known_subscribed_peers_for_node(
                    folder2.public_key, folder2.id_)
        assert [] == self.overlay(0).get_known_subscribed_peers_for_node(
            orphan.public_key, orphan.id_)
def test_serialization(metadata_store):
    """
    Test converting torrent metadata to serialized data
    """
    torrent_metadata = metadata_store.TorrentMetadata.from_dict(
        {"infohash": random_infohash()})
    assert torrent_metadata.serialized()
async def test_get_entry(rest_api, metadata_store):
    """
    Test getting an entry with REST API GET request
    """
    for md_type, kwargs in (
        (
            metadata_store.TorrentMetadata,
            {
                "title": "bla",
                "infohash": random_infohash(),
                "tracker_info": "http://sometracker.local/announce"
            },
        ),
        (
            metadata_store.ChannelDescription,
            {
                "text":
                json.dumps({
                    "description_text": "*{{}bla <\\> [)]// /ee2323㋛㋛㋛  ",
                    "channel_thumbnail": "ffffff.jpg"
                })
            },
        ),
    ):
        with db_session:
            md = md_type(**kwargs)
            md.status = COMMITTED
        await do_request(
            rest_api,
            'metadata/%s/%i' % (hexlify(md.public_key), md.id_),
            expected_json=md.to_simple_dict(),
        )
Beispiel #19
0
    async def test_remote_select_query_back(self):
        """
        Test querying back preview contents for previously unknown channels.
        """
        num_channels = 5
        max_received_torrents_per_channel_query_back = 4

        mds0 = self.nodes[0].overlay.mds
        mds1 = self.nodes[1].overlay.mds

        with db_session:
            # Generate channels on Node 0
            for _ in range(0, num_channels):
                chan = mds0.ChannelMetadata.create_channel("channel", "")
                # Generate torrents in each channel
                for i in range(0, max_received_torrents_per_channel_query_back):
                    torrent = mds0.TorrentMetadata(origin_id=chan.id_, infohash=random_infohash())
                    torrent.health.seeders = i

        peer = self.nodes[0].my_peer
        kwargs_dict = {"metadata_type": [CHANNEL_TORRENT]}
        self.nodes[1].overlay.send_remote_select(peer, **kwargs_dict)

        await self.deliver_messages(timeout=0.5)

        with db_session:
            received_channels = list(mds1.ChannelMetadata.select(lambda g: g.title == "channel"))
            assert len(received_channels) == num_channels
            # For each unknown channel that we received, we should have queried the sender for 4 preview torrents.
            received_torrents = list(mds1.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT))
            assert num_channels * max_received_torrents_per_channel_query_back == len(received_torrents)
            seeders = {t.health.seeders for t in received_torrents}
            assert seeders == set(range(max_received_torrents_per_channel_query_back))
Beispiel #20
0
def test_multiple_squashed_commit_and_read(metadata_store):
    """
    Test committing entries into several squashed blobs and reading them back
    """
    metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT = 500

    num_entries = 10
    channel = metadata_store.ChannelMetadata.create_channel('testchan')
    md_list = [
        metadata_store.TorrentMetadata(origin_id=channel.id_,
                                       title='test' + str(x),
                                       status=NEW,
                                       infohash=random_infohash())
        for x in range(0, num_entries)
    ]
    channel.commit_channel_torrent()

    channel.local_version = 0
    for md in md_list:
        md.delete()

    channel_dir = Path(
        metadata_store.ChannelMetadata._channels_dir) / channel.dirname
    assert len(os.listdir(channel_dir)
               ) > 1  # make sure it was broken into more than one .mdblob file
    metadata_store.process_channel_dir(channel_dir,
                                       channel.public_key,
                                       channel.id_,
                                       skip_personal_metadata_payload=False)
    assert num_entries == len(channel.contents)
def rnd_torrent():
    return {
        "title": "",
        "infohash": random_infohash(),
        "torrent_date": datetime(1970, 1, 1),
        "tags": "video"
    }
Beispiel #22
0
def test_squash_mdblobs_multiple_chunks(metadata_store):
    rng = random.Random(123)
    md_list = [
        metadata_store.TorrentMetadata(
            title=''.join(
                rng.choice(string.ascii_uppercase + string.digits)
                for _ in range(20)),
            infohash=random_infohash(rng),
            id_=rng.randint(0, 100000000),
            torrent_date=int2time(rng.randint(0, 4000000)),
            timestamp=rng.randint(0, 100000000),
        ) for _ in range(0, 10)
    ]
    # Test splitting into multiple chunks
    chunk, index = entries_to_chunk(md_list, chunk_size=900)
    chunk2, _ = entries_to_chunk(md_list, chunk_size=900, start_index=index)
    dict_list = [d.to_dict()["signature"] for d in md_list]
    for d in md_list:
        d.delete()
    assert dict_list[:index] == [
        d.md_obj.to_dict()["signature"]
        for d in metadata_store.process_compressed_mdblob(
            chunk, skip_personal_metadata_payload=False)
    ]

    assert dict_list[index:] == [
        d.md_obj.to_dict()["signature"]
        for d in metadata_store.process_compressed_mdblob(
            chunk2, skip_personal_metadata_payload=False)
    ]
 def generate_channel(recurse=False, status=NEW):
     toplevel_channel = metadata_store.ChannelMetadata.create_channel(
         'root', 'test')
     metadata_store.ChannelThumbnail(
         public_key=toplevel_channel.public_key,
         origin_id=toplevel_channel.id_,
         binary_data=os.urandom(20000),
         data_type="image/png",
     )
     metadata_store.ChannelDescription(
         public_key=toplevel_channel.public_key,
         origin_id=toplevel_channel.id_,
         json_text='{"description_text":"foobar"}',
     )
     toplevel_channel.status = status
     for s in status_types:
         metadata_store.TorrentMetadata(infohash=random_infohash(),
                                        origin_id=toplevel_channel.id_,
                                        status=s)
         if recurse:
             for status_combination in all_status_combinations():
                 generate_collection(toplevel_channel,
                                     s,
                                     status_combination,
                                     recurse=recurse)
     metadata_store.ChannelDescription(
         text="foobar",
         origin_id=toplevel_channel.id_,
     )
     return toplevel_channel
Beispiel #24
0
def test_skip_processing_of_received_personal_channel_torrents(metadata_store):
    """
    Test that personal torrent is ignored by default when processing the torrent metadata payload
    """
    channel = metadata_store.ChannelMetadata.create_channel('testchan')
    torrent_md = metadata_store.TorrentMetadata(origin_id=channel.id_,
                                                title='test',
                                                status=NEW,
                                                infohash=random_infohash())
    channel.commit_channel_torrent()
    torrent_md.delete()

    channel_dir = Path(
        metadata_store.ChannelMetadata._channels_dir) / channel.dirname
    assert os.listdir(Path.fix_win_long_file(channel_dir))

    # By default, personal channel torrent metadata processing is skipped so there should be no torrents
    # added to the channel
    channel.local_version = 0
    metadata_store.process_channel_dir(channel_dir, channel.public_key,
                                       channel.id_)
    assert not channel.contents

    # Enable processing of personal channel torrent metadata
    channel.local_version = 0
    metadata_store.process_channel_dir(channel_dir,
                                       channel.public_key,
                                       channel.id_,
                                       skip_personal_metadata_payload=False)
    assert len(channel.contents) == 1
 def new_channel(**kwargs):
     params = dict(subscribed=True,
                   share=True,
                   status=NEW,
                   infohash=random_infohash())
     params.update(kwargs)
     return metadata_store.ChannelMetadata(**params)
Beispiel #26
0
async def test_reject_malformed_channel(gigachannel_manager, metadata_store):  # pylint: disable=unused-argument, redefined-outer-name
    global initiated_download
    with db_session:
        channel = metadata_store.ChannelMetadata(title="bla1",
                                                 public_key=b'123',
                                                 infohash=random_infohash())

    def mock_get_metainfo_bad(*args, **kwargs):
        return succeed({b'info': {b'name': b'bla'}})

    def mock_get_metainfo_good(*args, **kwargs):
        return succeed({b'info': {b'name': channel.dirname.encode('utf-8')}})

    initiated_download = False

    def mock_download_from_tdef(*_, **__):
        global initiated_download
        initiated_download = True
        mock_dl = MockObject()
        mock_dl.future_finished = succeed(None)
        return mock_dl

    gigachannel_manager.download_manager.start_download = mock_download_from_tdef

    # Check that we skip channels with incorrect dirnames
    gigachannel_manager.download_manager.get_metainfo = mock_get_metainfo_bad
    await gigachannel_manager.download_channel(channel)
    assert not initiated_download

    with patch.object(TorrentDef, "__init__", lambda *_, **__: None):
        # Check that we download channels with correct dirname
        gigachannel_manager.download_manager.get_metainfo = mock_get_metainfo_good
        await gigachannel_manager.download_channel(channel)
        assert initiated_download
Beispiel #27
0
def test_process_channel_dir_file(tmpdir, metadata_store):
    """
    Test whether we are able to process files in a directory containing node metadata
    """
    test_node_metadata = metadata_store.TorrentMetadata(
        title='test', infohash=random_infohash())
    metadata_path = tmpdir / 'metadata.data'
    test_node_metadata.to_file(metadata_path)
    # We delete this TorrentMeta info now, it should be added again to the database when loading it
    test_node_metadata.delete()
    loaded_metadata = metadata_store.process_mdblob_file(
        metadata_path, skip_personal_metadata_payload=False)
    assert loaded_metadata[0].md_obj.title == 'test'

    # Test whether we delete existing metadata when loading a DeletedMetadata blob
    metadata = metadata_store.TorrentMetadata(infohash=b'1' * 20)
    public_key = metadata.public_key
    metadata.to_delete_file(metadata_path)
    metadata_sig = metadata.signature
    loaded_metadata = metadata_store.process_mdblob_file(
        metadata_path,
        skip_personal_metadata_payload=False,
        channel_public_key=public_key)
    assert loaded_metadata == []
    # Make sure the original metadata is deleted
    assert metadata_store.TorrentMetadata.get(signature=metadata_sig) is None

    # Test an unknown metadata type, this should raise an exception
    invalid_metadata = tmpdir / 'invalidtype.mdblob'
    make_wrong_payload(invalid_metadata)
    with pytest.raises(UnknownBlobTypeException):
        metadata_store.process_mdblob_file(
            invalid_metadata, skip_personal_metadata_payload=False)
async def test_copy_torrents_to_collection(rest_api, metadata_store):
    """
    Test if we can copy torrents from an external channel(s) to a personal channel/collection
    """
    channel = metadata_store.ChannelMetadata.create_channel('my chan')
    ext_key = default_eccrypto.generate_key("curve25519")
    with db_session:
        external_metadata1 = metadata_store.TorrentMetadata(
            sign_with=ext_key,
            id_=111,
            title="bla1",
            infohash=random_infohash())
        external_metadata2_ffa = metadata_store.TorrentMetadata(
            public_key=b"",
            id_=222,
            title="bla2-ffa",
            infohash=random_infohash())

    request_data = [
        external_metadata1.to_simple_dict(),
        external_metadata2_ffa.to_simple_dict()
    ]
    await do_request(
        rest_api,
        'collections/%s/%i/copy' % (hexlify(channel.public_key), channel.id_),
        post_data=request_data,
        request_type='POST',
    )
    with db_session:
        assert len(channel.contents) == 2

    await do_request(
        rest_api,
        'collections/%s/%i/copy' % (hexlify(b"0" * 64), 777),
        post_data=request_data,
        request_type='POST',
        expected_code=404,
    )

    request_data = [{'public_key': hexlify(b"1" * 64), 'id': 12333}]
    await do_request(
        rest_api,
        'collections/%s/%i/copy' % (hexlify(channel.public_key), channel.id_),
        post_data=request_data,
        request_type='POST',
        expected_code=400,
    )
Beispiel #29
0
    async def test_gigachannel_search(self):
        """
        Test searching several nodes for metadata entries based on title text
        """

        # We do not want the query back mechanism and introduction callback to interfere with this test
        for node in self.nodes:
            node.overlay.rqc_settings.max_channel_query_back = 0

        await self.introduce_nodes()

        U_CHANNEL = "ubuntu channel"
        U_TORRENT = "ubuntu torrent"

        # Add test metadata to node 0
        with db_session:
            self.nodes[0].overlay.mds.ChannelMetadata.create_channel(
                U_CHANNEL, "")
            self.nodes[0].overlay.mds.ChannelMetadata.create_channel(
                "debian channel", "")

        # Add test metadata to node 1
        with db_session:
            self.nodes[1].overlay.mds.TorrentMetadata(
                title=U_TORRENT, infohash=random_infohash())
            self.nodes[1].overlay.mds.TorrentMetadata(
                title="debian torrent", infohash=random_infohash())

        notifier = Notifier(loop=self.loop)
        notifier.notify = Mock()
        self.nodes[2].overlay.notifier = notifier

        self.nodes[2].overlay.send_search_request(**{"txt_filter": "ubuntu*"})

        await self.deliver_messages(timeout=0.5)

        # Check that the notifier callback was called on both entries
        titles = sorted(call.args[1]["results"][0]["name"]
                        for call in notifier.notify.call_args_list)
        assert titles == [U_CHANNEL, U_TORRENT]

        with db_session:
            assert self.nodes[2].overlay.mds.ChannelNode.select().count() == 2
            assert (self.nodes[2].overlay.mds.ChannelNode.select(
                lambda g: g.title in (U_CHANNEL, U_TORRENT)).count() == 2)
def test_list_contents(metadata_store, torrent_template):
    """
    Test whether a correct list with channel content is returned from the database
    """
    metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low')
    channel1 = metadata_store.ChannelMetadata(infohash=random_infohash())
    metadata_store.TorrentMetadata.from_dict(
        dict(torrent_template, origin_id=channel1.id_))

    metadata_store.ChannelNode._my_key = default_eccrypto.generate_key('low')
    channel2 = metadata_store.ChannelMetadata(infohash=random_infohash())
    metadata_store.TorrentMetadata.from_dict(
        dict(torrent_template, infohash=b"1", origin_id=channel2.id_))
    metadata_store.TorrentMetadata.from_dict(
        dict(torrent_template, infohash=b"2", origin_id=channel2.id_))

    assert len(channel1.contents_list) == 1
    assert len(channel2.contents_list) == 2
    assert channel2.contents_len == 2