示例#1
0
    def test_cleanup_pony_experimental_db(self):
        # Create a Pony database of older experimental version
        pony_db = os.path.join(self.session_base_dir, 'pony.db')
        pony_db_bak = os.path.join(self.session_base_dir, 'pony2.db')
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        mds.shutdown()
        shutil.copyfile(pony_db, pony_db_bak)

        connection = sqlite3.connect(pony_db)
        with connection:
            cursor = connection.cursor()
            cursor.execute("DROP TABLE MiscData")
        connection.close()

        # Assert older experimental version is deleted
        self.assertFalse(cleanup_pony_experimental_db(pony_db))
        self.assertFalse(os.path.exists(pony_db))

        # Assert recent database version is left untouched
        self.assertFalse(cleanup_pony_experimental_db(pony_db_bak))
        self.assertTrue(os.path.exists(pony_db_bak))

        # Assert True is returned for a garbled db and nothing is done with it
        garbled_db = os.path.join(self.session_base_dir, 'garbled.db')
        with open(garbled_db, 'w') as f:
            f.write("123")
        self.assertRaises(sqlite3.DatabaseError, cleanup_pony_experimental_db,
                          garbled_db)
        self.assertTrue(os.path.exists(garbled_db))
示例#2
0
    def setUp(self):
        yield super(TestMetadataStore, self).setUp()
        my_key = default_eccrypto.generate_key(u"curve25519")

        self.mds = MetadataStore(
            os.path.join(self.session_base_dir, 'test.db'),
            self.session_base_dir, my_key)
示例#3
0
    def test_cleanup_pony_experimental_db(self):
        # Create a Pony database of older experimental version
        pony_db = os.path.join(self.session_base_dir, 'pony.db')
        pony_db_bak = os.path.join(self.session_base_dir, 'pony2.db')
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        mds.shutdown()
        shutil.copyfile(pony_db, pony_db_bak)

        connection = sqlite3.connect(pony_db)
        with connection:
            cursor = connection.cursor()
            cursor.execute("DROP TABLE MiscData")
        connection.close()

        # Assert older experimental version is deleted
        self.assertFalse(cleanup_pony_experimental_db(pony_db))
        self.assertFalse(os.path.exists(pony_db))

        # Assert recent database version is left untouched
        self.assertFalse(cleanup_pony_experimental_db(pony_db_bak))
        self.assertTrue(os.path.exists(pony_db_bak))

        # Assert True is returned for a garbled db and nothing is done with it
        garbled_db = os.path.join(self.session_base_dir, 'garbled.db')
        with open(garbled_db, 'w') as f:
            f.write("123")
        self.assertRaises(sqlite3.DatabaseError, cleanup_pony_experimental_db, garbled_db)
        self.assertTrue(os.path.exists(garbled_db))
示例#4
0
class TestTrackerState(TriblerCoreTest):
    """
    Contains various tests for the TrackerState class.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(TestTrackerState, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(":memory:", self.session_base_dir,
                                 self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestTrackerState, self).tearDown()

    @db_session
    def test_create_tracker_state(self):
        ts = self.mds.TrackerState(
            url='http://tracker.tribler.org:80/announce')
        self.assertEqual(list(self.mds.TrackerState.select())[0], ts)

    @db_session
    def test_canonicalize_tracker_state(self):
        ts = self.mds.TrackerState(
            url='http://tracker.tribler.org:80/announce/')
        self.assertEqual(
            self.mds.TrackerState.get(
                url='http://tracker.tribler.org/announce'), ts)

    @db_session
    def test_canonicalize_raise_on_malformed_url(self):
        self.assertRaises(MalformedTrackerURLException,
                          self.mds.TrackerState,
                          url='udp://tracker.tribler.org/announce/')
示例#5
0
class TestTrackerState(TriblerCoreTest):
    """
    Contains various tests for the TrackerState class.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(TestTrackerState, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(":memory:", self.session_base_dir, self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestTrackerState, self).tearDown()

    @db_session
    def test_create_tracker_state(self):
        ts = self.mds.TrackerState(url='http://tracker.tribler.org:80/announce')
        self.assertEqual(list(self.mds.TrackerState.select())[0], ts)

    @db_session
    def test_canonicalize_tracker_state(self):
        ts = self.mds.TrackerState(url='http://tracker.tribler.org:80/announce/')
        self.assertEqual(self.mds.TrackerState.get(url='http://tracker.tribler.org/announce'), ts)

    @db_session
    def test_canonicalize_raise_on_malformed_url(self):
        self.assertRaises(MalformedTrackerURLException, self.mds.TrackerState,
                          url='udp://tracker.tribler.org/announce/')
示例#6
0
    def setUp(self):
        yield super(TestUpgradeDB72ToPony, self).setUp()

        self.my_key = default_eccrypto.generate_key(u"curve25519")
        mds_db = os.path.join(self.session_base_dir, 'test.db')
        mds_channels_dir = self.session_base_dir

        self.mds = MetadataStore(mds_db, mds_channels_dir, self.my_key)
        self.m = DispersyToPonyMigration(OLD_DB_SAMPLE)
        self.m.initialize(self.mds)
示例#7
0
 def setUp(self):
     yield super(TestChannelMetadata, self).setUp()
     self.torrent_template = {
         "title": "",
         "infohash": "",
         "torrent_date": datetime(1970, 1, 1),
         "tags": "video"
     }
     self.my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(":memory:", self.session_base_dir,
                              self.my_key)
示例#8
0
    def setUp(self):
        yield super(TestContentRepository, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(':memory:', self.session_base_dir, self.my_key)
        self.content_repository = ContentRepository(mds)

        # Add some content to the metadata database
        with db_session:
            mds.ChannelMetadata.create_channel('test', 'test')
            for torrent_ind in xrange(5):
                torrent = mds.TorrentMetadata(title='torrent%d' % torrent_ind, infohash=('%d' % torrent_ind) * 20)
                torrent.health.seeders = torrent_ind + 1
示例#9
0
 def setUp(self):
     yield super(TestTorrentMetadata, self).setUp()
     self.torrent_template = {
         "title": "",
         "infohash": "",
         "torrent_date": datetime(1970, 1, 1),
         "tags": "video"
     }
     self.my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(
         os.path.join(self.session_base_dir, 'test.db'),
         self.session_base_dir, self.my_key)
示例#10
0
 def test_upgrade_72_to_pony(self):
     OLD_DB_SAMPLE = os.path.abspath(os.path.join(os.path.abspath(
         os.path.dirname(os.path.realpath(__file__))), '..', 'data', 'upgrade_databases', 'tribler_v29.sdb'))
     old_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb')
     new_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db')
     channels_dir = os.path.join(self.session.config.get_chant_channels_dir())
     shutil.copyfile(OLD_DB_SAMPLE, old_database_path)
     self.upgrader.upgrade_72_to_pony()
     yield self.upgrader.finished_deferred
     mds = MetadataStore(new_database_path, channels_dir, self.session.trustchain_keypair)
     with db_session:
         self.assertEqual(mds.TorrentMetadata.select().count(), 24)
     mds.shutdown()
示例#11
0
    def test_new_db_version_ok(self):
        pony_db = os.path.join(self.session_base_dir, 'pony.db')
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        mds.shutdown()

        self.assertTrue(new_db_version_ok(pony_db))

        connection = sqlite3.connect(pony_db)
        with connection:
            cursor = connection.cursor()
            cursor.execute("UPDATE MiscData SET value = 12313512 WHERE name == 'db_version'")
        self.assertFalse(new_db_version_ok(pony_db))
示例#12
0
文件: upgrade.py 项目: zippav/tribler
    def upgrade_72_to_pony(self):
        old_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb')
        new_database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db')
        channels_dir = os.path.join(self.session.config.get_chant_channels_dir())

        self.pony_upgrader = DispersyToPonyMigration(old_database_path, self.update_status, logger=self._logger)
        if not should_upgrade(old_database_path, new_database_path, logger=self._logger):
            return
        # We have to create the Metadata Store object because the LaunchManyCore has not been started yet
        mds = MetadataStore(new_database_path, channels_dir, self.session.trustchain_keypair)
        self.pony_upgrader.initialize(mds)
        self.finished_deferred = self.pony_upgrader.do_migration()
        mds.shutdown()
示例#13
0
    def test_new_db_version_ok(self):
        pony_db = os.path.join(self.session_base_dir, 'pony.db')
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        mds.shutdown()

        self.assertTrue(new_db_version_ok(pony_db))

        connection = sqlite3.connect(pony_db)
        with connection:
            cursor = connection.cursor()
            cursor.execute(
                "UPDATE MiscData SET value = 12313512 WHERE name == 'db_version'"
            )
        self.assertFalse(new_db_version_ok(pony_db))
示例#14
0
    def create_node(self, *args, **kwargs):
        mds = MetadataStore(
            os.path.join(self.temporary_directory(), 'test.db'),
            self.temporary_directory(), self.shared_key)

        # Add some content to the metadata database
        with db_session:
            mds.ChannelMetadata.create_channel('test', 'test')
            for torrent_ind in xrange(5):
                torrent = mds.TorrentMetadata(title='torrent%d' % torrent_ind,
                                              infohash=('%d' % torrent_ind) *
                                              20)
                torrent.health.seeders = torrent_ind + 1

        return MockIPv8(u"curve25519", PopularityCommunity, metadata_store=mds)
示例#15
0
 def test_store_clock(self):
     my_key = default_eccrypto.generate_key(u"curve25519")
     mds2 = MetadataStore(os.path.join(self.session_base_dir, 'test.db'),
                          self.session_base_dir, my_key)
     tick = mds2.clock.tick()
     mds2.shutdown()
     mds2 = MetadataStore(os.path.join(self.session_base_dir, 'test.db'),
                          self.session_base_dir, my_key)
     self.assertEqual(mds2.clock.clock, tick)
     mds2.shutdown()
示例#16
0
    def test_already_upgraded(self):
        pony_db = os.path.join(self.session_base_dir, 'pony.db')
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        mds.shutdown()

        self.assertFalse(already_upgraded(pony_db))

        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        with db_session:
            mds.MiscData(name=CONVERSION_FROM_72, value=CONVERSION_FINISHED)
        mds.shutdown()

        self.assertTrue(already_upgraded(pony_db))
示例#17
0
 def create_node(self, *args, **kwargs):
     metadata_store = MetadataStore(
         os.path.join(self.temporary_directory(), "%d.db" % self.count),
         self.temporary_directory(),
         default_eccrypto.generate_key(u"curve25519"))
     kwargs['metadata_store'] = metadata_store
     node = super(TestGigaChannelUnits, self).create_node(*args, **kwargs)
     self.count += 1
     return node
示例#18
0
 def setUp(self):
     yield super(TestChannelMetadata, self).setUp()
     self.torrent_template = {
         "title": "",
         "infohash": "",
         "torrent_date": datetime(1970, 1, 1),
         "tags": "video"
     }
     self.my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(":memory:", self.session_base_dir, self.my_key)
示例#19
0
    def setUp(self):
        yield super(TestUpgradeDB72ToPony, self).setUp()

        self.my_key = default_eccrypto.generate_key(u"curve25519")
        mds_db = os.path.join(self.session_base_dir, 'test.db')
        mds_channels_dir = self.session_base_dir

        self.mds = MetadataStore(mds_db, mds_channels_dir, self.my_key)
        self.m = DispersyToPonyMigration(OLD_DB_SAMPLE)
        self.m.initialize(self.mds)
示例#20
0
 def setUp(self):
     yield super(TestTorrentMetadata, self).setUp()
     self.torrent_template = {
         "title": "",
         "infohash": "",
         "torrent_date": datetime(1970, 1, 1),
         "tags": "video"
     }
     self.my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir,
                              self.my_key)
示例#21
0
    def setUp(self):
        yield super(TestGigaChannelManager, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        my_key = default_eccrypto.generate_key(u"curve25519")
        self.mock_session = MockObject()
        self.mock_session.lm = MockObject()
        self.mock_session.lm.mds = MetadataStore(":memory:", self.session_base_dir, my_key)

        self.chanman = GigaChannelManager(self.mock_session)
        self.torrents_added = 0
示例#22
0
    def setUp(self):
        yield super(TestStatisticsEndpoint, self).setUp()

        self.mock_ipv8 = MockIPv8(
            u"low",
            TrustChainCommunity,
            working_directory=self.session.config.get_state_dir())
        self.mock_ipv8.overlays = [self.mock_ipv8.overlay]
        self.mock_ipv8.endpoint.bytes_up = 100
        self.mock_ipv8.endpoint.bytes_down = 20
        self.session.lm.ipv8 = self.mock_ipv8
        self.session.config.set_ipv8_enabled(True)
        my_key = default_eccrypto.generate_key(u"curve25519")
        self.session.lm.mds = MetadataStore(
            os.path.join(self.session_base_dir, 'test.db'),
            self.session_base_dir, my_key)
示例#23
0
    def test_already_upgraded(self):
        pony_db = os.path.join(self.session_base_dir, 'pony.db')
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        mds.shutdown()

        self.assertFalse(already_upgraded(pony_db))

        mds = MetadataStore(pony_db, self.session_base_dir, my_key)
        with db_session:
            mds.MiscData(name=CONVERSION_FROM_72, value=CONVERSION_FINISHED)
        mds.shutdown()

        self.assertTrue(already_upgraded(pony_db))
示例#24
0
class TestMetadataStore(TriblerCoreTest):
    """
    This class contains tests for the metadata store.
    """
    DATA_DIR = os.path.join(
        os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..',
        '..', 'data')
    CHANNEL_DIR = os.path.join(
        DATA_DIR, 'sample_channel',
        'd24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46')
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel',
                                    'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadataStore, self).setUp()
        my_key = default_eccrypto.generate_key(u"curve25519")

        self.mds = MetadataStore(
            os.path.join(self.session_base_dir, 'test.db'),
            self.session_base_dir, my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadataStore, self).tearDown()

    @db_session
    def test_process_channel_dir_file(self):
        """
        Test whether we are able to process files in a directory containing torrent metadata
        """

        test_torrent_metadata = self.mds.TorrentMetadata(title='test')
        metadata_path = os.path.join(self.session_base_dir, 'metadata.data')
        test_torrent_metadata.to_file(metadata_path)
        # We delete this TorrentMeta info now, it should be added again to the database when loading it
        test_torrent_metadata.delete()
        loaded_metadata = self.mds.process_mdblob_file(metadata_path)
        self.assertEqual(loaded_metadata[0].title, 'test')

        # Test whether we delete existing metadata when loading a DeletedMetadata blob
        metadata = self.mds.TorrentMetadata(infohash='1' * 20)
        metadata.to_delete_file(metadata_path)
        loaded_metadata = self.mds.process_mdblob_file(metadata_path)
        # Make sure the original metadata is deleted
        self.assertListEqual(loaded_metadata, [])
        self.assertIsNone(self.mds.TorrentMetadata.get(infohash='1' * 20))

        # Test an unknown metadata type, this should raise an exception
        invalid_metadata = os.path.join(self.session_base_dir,
                                        'invalidtype.mdblob')
        make_wrong_payload(invalid_metadata)
        self.assertRaises(UnknownBlobTypeException,
                          self.mds.process_mdblob_file, invalid_metadata)

    @db_session
    def test_squash_mdblobs(self):
        chunk_size = self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT
        md_list = [
            self.mds.TorrentMetadata(title='test' + str(x))
            for x in xrange(0, 10)
        ]
        chunk, _ = entries_to_chunk(md_list, chunk_size=chunk_size)
        self.assertItemsEqual(md_list, self.mds.process_squashed_mdblob(chunk))

        # Test splitting into multiple chunks
        chunk, index = entries_to_chunk(md_list, chunk_size=1000)
        chunk += entries_to_chunk(md_list, chunk_size=1000,
                                  start_index=index)[0]
        self.assertItemsEqual(md_list, self.mds.process_squashed_mdblob(chunk))

    @db_session
    def test_multiple_squashed_commit_and_read(self):
        """
        Test committing entries into several squashed blobs and reading them back
        """
        self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT = 500

        num_entries = 10
        channel = self.mds.ChannelMetadata(title='testchan')
        md_list = [
            self.mds.TorrentMetadata(title='test' + str(x))
            for x in xrange(0, num_entries)
        ]
        channel.commit_channel_torrent()

        channel.local_version = 0
        for md in md_list:
            md.delete()

        channel_dir = os.path.join(self.mds.channels_dir, channel.dir_name)
        self.assertTrue(
            len(os.listdir(channel_dir)) >
            1)  # make sure it was broken into more than one .mdblob file
        self.mds.process_channel_dir(channel_dir, channel.public_key)
        self.assertEqual(num_entries, len(channel.contents))

    @db_session
    def test_process_channel_dir(self):
        """
        Test processing a directory containing metadata blobs
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel = self.mds.ChannelMetadata.process_channel_metadata_payload(
            payload)
        self.assertFalse(channel.contents_list)
        self.mds.process_channel_dir(self.CHANNEL_DIR, channel.public_key)
        self.assertEqual(len(channel.contents_list), 3)
        self.assertEqual(channel.local_version, 3)
示例#25
0
class TestTorrentMetadata(TriblerCoreTest):
    """
    Contains various tests for the torrent metadata type.
    """

    @inlineCallbacks
    def setUp(self):
        yield super(TestTorrentMetadata, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir,
                                 self.my_key)
    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestTorrentMetadata, self).tearDown()

    @db_session
    def test_serialization(self):
        """
        Test converting torrent metadata to serialized data
        """
        torrent_metadata = self.mds.TorrentMetadata.from_dict({})
        self.assertTrue(torrent_metadata.serialized())

    @db_session
    def test_get_magnet(self):
        """
        Test converting torrent metadata to a magnet link
        """
        torrent_metadata = self.mds.TorrentMetadata.from_dict({})
        self.assertTrue(torrent_metadata.get_magnet())

    @db_session
    def test_search_keyword(self):
        """
        Test searching in a database with some torrent metadata inserted
        """
        torrent1 = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="foo bar 123", tags="video"))
        torrent2 = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="eee 123", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="xoxoxo bar", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="xoxoxo bar", tags="audio"))

        # Search for torrents with the keyword 'foo', it should return one result
        results = self.mds.TorrentMetadata.search_keyword("foo")
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].rowid, torrent1.rowid)

        # Search for torrents with the keyword 'eee', it should return one result
        results = self.mds.TorrentMetadata.search_keyword("eee")
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].rowid, torrent2.rowid)

        # Search for torrents with the keyword '123', it should return two results
        results = self.mds.TorrentMetadata.search_keyword("123")
        self.assertEqual(len(results), 2)

        # Search for torrents with the keyword 'video', it should return three results
        results = self.mds.TorrentMetadata.search_keyword("video")
        self.assertEqual(len(results), 3)

    def test_search_empty_query(self):
        """
        Test whether an empty query returns nothing
        """
        self.assertFalse(self.mds.TorrentMetadata.search_keyword(None))

    @db_session
    def test_unicode_search(self):
        """
        Test searching in the database with unicode characters
        """
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, title=u"я маленький апельсин"))
        results = self.mds.TorrentMetadata.search_keyword(u"маленький")
        self.assertEqual(1, len(results))

    @db_session
    def test_wildcard_search(self):
        """
        Test searching in the database with a wildcard
        """
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, title="foobar 123"))
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, title="foobla 123"))
        self.assertEqual(0, len(self.mds.TorrentMetadata.search_keyword("*")))
        self.assertEqual(1, len(self.mds.TorrentMetadata.search_keyword("foobl*")))
        self.assertEqual(2, len(self.mds.TorrentMetadata.search_keyword("foo*")))

    @db_session
    def test_stemming_search(self):
        """
        Test searching in the database with stemmed words
        """
        torrent = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="mountains sheep", tags="video"))

        # Search with the word 'mountain' should return the torrent with 'mountains' in the title
        results = self.mds.TorrentMetadata.search_keyword("mountain")
        self.assertEqual(torrent.rowid, results[0].rowid)

        # Search with the word 'sheeps' should return the torrent with 'sheep' in the title
        results = self.mds.TorrentMetadata.search_keyword("sheeps")
        self.assertEqual(torrent.rowid, results[0].rowid)

    @db_session
    def test_get_autocomplete_terms(self):
        """
        Test fetching autocompletion terms from the database
        """
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="mountains sheep", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="regular sheepish guy", tags="video"))

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms("shee", 10)
        self.assertIn('sheep', autocomplete_terms)

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms("shee", 10)
        self.assertIn('sheepish', autocomplete_terms)

    @db_session
    def test_get_autocomplete_terms_max(self):
        """
        Test fetching autocompletion terms from the database with a maximum number of terms
        """
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="mountains sheeps wolf", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="lakes sheep", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="regular sheepish guy", tags="video"))

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms("sheep", 2)
        self.assertEqual(len(autocomplete_terms), 2)
示例#26
0
    def init(self):
        # Wallets
        if self.session.config.get_bitcoinlib_enabled():
            try:
                from Tribler.Core.Modules.wallet.btc_wallet import BitcoinWallet, BitcoinTestnetWallet
                wallet_path = os.path.join(self.session.config.get_state_dir(),
                                           'wallet')
                btc_wallet = BitcoinWallet(wallet_path)
                btc_testnet_wallet = BitcoinTestnetWallet(wallet_path)
                self.wallets[btc_wallet.get_identifier()] = btc_wallet
                self.wallets[
                    btc_testnet_wallet.get_identifier()] = btc_testnet_wallet
            except Exception as exc:
                self._logger.error("bitcoinlib library cannot be loaded: %s",
                                   exc)

        if self.session.config.get_chant_enabled():
            channels_dir = os.path.join(
                self.session.config.get_chant_channels_dir())
            database_path = os.path.join(self.session.config.get_state_dir(),
                                         'sqlite', 'metadata.db')
            self.mds = MetadataStore(database_path, channels_dir,
                                     self.session.trustchain_keypair)

        if self.session.config.get_dummy_wallets_enabled():
            # For debugging purposes, we create dummy wallets
            dummy_wallet1 = DummyWallet1()
            self.wallets[dummy_wallet1.get_identifier()] = dummy_wallet1

            dummy_wallet2 = DummyWallet2()
            self.wallets[dummy_wallet2.get_identifier()] = dummy_wallet2

        if self.ipv8:
            self.ipv8_start_time = time.time()
            self.load_ipv8_overlays()
            self.enable_ipv8_statistics()

        tunnel_community_ports = self.session.config.get_tunnel_community_socks5_listen_ports(
        )
        self.session.config.set_anon_proxy_settings(
            2, ("127.0.0.1", tunnel_community_ports))

        if self.session.config.get_libtorrent_enabled():
            self.session.readable_status = STATE_START_LIBTORRENT
            from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr
            self.ltmgr = LibtorrentMgr(self.session)
            self.ltmgr.initialize()
            for port, protocol in self.upnp_ports:
                self.ltmgr.add_upnp_mapping(port, protocol)

        if self.session.config.get_chant_enabled():
            self.gigachannel_manager = GigaChannelManager(self.session)
            self.gigachannel_manager.start()

        # add task for tracker checking
        if self.session.config.get_torrent_checking_enabled():
            self.session.readable_status = STATE_START_TORRENT_CHECKER
            self.torrent_checker = TorrentChecker(self.session)
            self.torrent_checker.initialize()

        if self.api_manager:
            self.session.readable_status = STATE_START_API_ENDPOINTS
            self.api_manager.root_endpoint.start_endpoints()

        if self.session.config.get_watch_folder_enabled():
            self.session.readable_status = STATE_START_WATCH_FOLDER
            self.watch_folder = WatchFolder(self.session)
            self.watch_folder.start()

        if self.session.config.get_credit_mining_enabled():
            self.session.readable_status = STATE_START_CREDIT_MINING
            from Tribler.Core.CreditMining.CreditMiningManager import CreditMiningManager
            self.credit_mining_manager = CreditMiningManager(self.session)

        if self.session.config.get_resource_monitor_enabled():
            self.resource_monitor = ResourceMonitor(self.session)
            self.resource_monitor.start()

        if self.session.config.get_version_checker_enabled():
            self.version_check_manager = VersionCheckManager(self.session)
            self.version_check_manager.start()

        self.session.set_download_states_callback(self.sesscb_states_callback)

        if self.session.config.get_ipv8_enabled(
        ) and self.session.config.get_trustchain_enabled():
            self.payout_manager = PayoutManager(self.trustchain_community,
                                                self.dht_community)

        self.initComplete = True
示例#27
0
class TestMetadata(TriblerCoreTest):
    """
    Contains various tests for the ChannelNode type.
    """

    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadata, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(':memory:', self.session_base_dir, self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadata, self).tearDown()

    @db_session
    def test_to_dict(self):
        """
        Test whether converting metadata to a dictionary works
        """
        metadata = self.mds.ChannelNode.from_dict({})
        self.assertTrue(metadata.to_dict())

    @db_session
    def test_serialization(self):
        """
        Test converting metadata to serialized data and back
        """
        metadata1 = self.mds.ChannelNode.from_dict({})
        serialized1 = metadata1.serialized()
        metadata1.delete()
        orm.flush()

        metadata2 = self.mds.ChannelNode.from_payload(ChannelNodePayload.from_signed_blob(serialized1))
        serialized2 = metadata2.serialized()
        self.assertEqual(serialized1, serialized2)

        # Test no signature exception
        metadata2_dict = metadata2.to_dict()
        metadata2_dict.pop("signature")
        self.assertRaises(InvalidSignatureException, ChannelNodePayload, **metadata2_dict)

        serialized3 = serialized2[:-5] + "\xee" * 5
        self.assertRaises(InvalidSignatureException, ChannelNodePayload.from_signed_blob, serialized3)
        # Test bypass signature check
        ChannelNodePayload.from_signed_blob(serialized3, check_signature=False)


    @db_session
    def test_key_mismatch_exception(self):
        mismatched_key = default_eccrypto.generate_key(u"curve25519")
        metadata = self.mds.ChannelNode.from_dict({})
        self.assertRaises(KeysMismatchException, metadata.serialized, key=mismatched_key)

    @db_session
    def test_to_file(self):
        """
        Test writing metadata to a file
        """
        metadata = self.mds.ChannelNode.from_dict({})
        file_path = os.path.join(self.session_base_dir, 'metadata.file')
        metadata.to_file(file_path)
        self.assertTrue(os.path.exists(file_path))

    @db_session
    def test_has_valid_signature(self):
        """
        Test whether a signature can be validated correctly
        """
        metadata = self.mds.ChannelNode.from_dict({})
        self.assertTrue(metadata.has_valid_signature())

        md_dict = metadata.to_dict()

        # Mess with the signature
        metadata.signature = 'a'
        self.assertFalse(metadata.has_valid_signature())

        # Create metadata with wrong key
        metadata.delete()
        md_dict.update(public_key=database_blob("aaa"))
        md_dict.pop("rowid")

        metadata = self.mds.ChannelNode(skip_key_check=True, **md_dict)
        self.assertFalse(metadata.has_valid_signature())

        key = default_eccrypto.generate_key(u"curve25519")
        metadata2 = self.mds.ChannelNode(sign_with=key, **md_dict)
        self.assertTrue(database_blob(key.pub().key_to_bin()[10:]), metadata2.public_key)
        md_dict2 = metadata2.to_dict()
        md_dict2["signature"] = md_dict["signature"]
        self.assertRaises(InvalidSignatureException, self.mds.ChannelNode, **md_dict2)

    @db_session
    def test_from_payload(self):
        """
        Test converting a metadata payload to a metadata object
        """
        metadata = self.mds.ChannelNode.from_dict({})
        metadata_dict = metadata.to_dict()
        metadata.delete()
        orm.flush()
        metadata_payload = ChannelNodePayload(**metadata_dict)
        self.assertTrue(self.mds.ChannelNode.from_payload(metadata_payload))
示例#28
0
class TestTorrentMetadata(TriblerCoreTest):
    """
    Contains various tests for the torrent metadata type.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(TestTorrentMetadata, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(':memory:', self.session_base_dir,
                                 self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestTorrentMetadata, self).tearDown()

    @db_session
    def test_serialization(self):
        """
        Test converting torrent metadata to serialized data
        """
        torrent_metadata = self.mds.TorrentMetadata.from_dict(
            {"infohash": str(random.getrandbits(160))})
        self.assertTrue(torrent_metadata.serialized())

    @db_session
    def test_get_magnet(self):
        """
        Test converting torrent metadata to a magnet link
        """
        torrent_metadata = self.mds.TorrentMetadata.from_dict(
            {"infohash": str(random.getrandbits(160))})
        self.assertTrue(torrent_metadata.get_magnet())
        torrent_metadata2 = self.mds.TorrentMetadata.from_dict({
            'title':
            u'\U0001f4a9',
            "infohash":
            str(random.getrandbits(160))
        })
        self.assertTrue(torrent_metadata2.get_magnet())

    @db_session
    def test_search_keyword(self):
        """
        Test searching in a database with some torrent metadata inserted
        """
        torrent1 = self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="foo bar 123"))
        torrent2 = self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="eee 123"))
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="xoxoxo bar"))
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="xoxoxo bar"))
        self.mds.TorrentMetadata.from_dict(dict(rnd_torrent(), title=u"\""))
        self.mds.TorrentMetadata.from_dict(dict(rnd_torrent(), title=u"\'"))
        orm.flush()

        # Search for torrents with the keyword 'foo', it should return one result
        results = self.mds.TorrentMetadata.search_keyword("foo")[:]
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].rowid, torrent1.rowid)

        # Search for torrents with the keyword 'eee', it should return one result
        results = self.mds.TorrentMetadata.search_keyword("eee")[:]
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].rowid, torrent2.rowid)

        # Search for torrents with the keyword '123', it should return two results
        results = self.mds.TorrentMetadata.search_keyword("123")[:]
        self.assertEqual(len(results), 2)

    def test_search_empty_query(self):
        """
        Test whether an empty query returns nothing
        """
        self.assertFalse(self.mds.TorrentMetadata.search_keyword(None)[:])

    @db_session
    def test_unicode_search(self):
        """
        Test searching in the database with unicode characters
        """
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title=u"я маленький апельсин"))
        results = self.mds.TorrentMetadata.search_keyword(u"маленький")[:]
        self.assertEqual(1, len(results))

    @db_session
    def test_wildcard_search(self):
        """
        Test searching in the database with a wildcard
        """
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="foobar 123"))
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="foobla 123"))
        self.assertEqual(0,
                         len(self.mds.TorrentMetadata.search_keyword("*")[:]))
        self.assertEqual(
            1, len(self.mds.TorrentMetadata.search_keyword("foobl*")[:]))
        self.assertEqual(
            2, len(self.mds.TorrentMetadata.search_keyword("foo*")[:]))
        self.assertEqual(
            1,
            len(
                self.mds.TorrentMetadata.search_keyword(
                    "(\"12\"* AND \"foobl\"*)")[:]))

    @db_session
    def test_stemming_search(self):
        """
        Test searching in the database with stemmed words
        """
        torrent = self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="mountains sheep", tags="video"))

        # Search with the word 'mountain' should return the torrent with 'mountains' in the title
        results = self.mds.TorrentMetadata.search_keyword("mountain")[:]
        self.assertEqual(torrent.rowid, results[0].rowid)

        # Search with the word 'sheeps' should return the torrent with 'sheep' in the title
        results = self.mds.TorrentMetadata.search_keyword("sheeps")[:]
        self.assertEqual(torrent.rowid, results[0].rowid)

    @db_session
    def test_get_autocomplete_terms(self):
        """
        Test fetching autocompletion terms from the database
        """
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="mountains sheep", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="regular sheepish guy", tags="video"))

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "shee", 10)
        self.assertIn('sheep', autocomplete_terms)

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "shee", 10)
        self.assertIn('sheepish', autocomplete_terms)

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "", 10)
        self.assertEqual([], autocomplete_terms)

    @db_session
    def test_get_autocomplete_terms_max(self):
        """
        Test fetching autocompletion terms from the database with a maximum number of terms
        """
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="mountains sheeps wolf", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="lakes sheep", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(rnd_torrent(), title="regular sheepish guy", tags="video"))

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "sheep", 2)
        self.assertEqual(len(autocomplete_terms), 2)
        # Check that we can chew the special character "."
        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            ".", 2)

    @db_session
    def test_get_entries(self):
        """
        Test whether we can get torrents
        """

        # First we create a few channels and add some torrents to these channels
        tlist = []
        for ind in xrange(5):
            self.mds.ChannelNode._my_key = default_eccrypto.generate_key(
                'curve25519')
            _ = self.mds.ChannelMetadata(title='channel%d' % ind,
                                         subscribed=(ind % 2 == 0),
                                         infohash=str(random.getrandbits(160)))
            tlist.extend([
                self.mds.TorrentMetadata(title='torrent%d' % torrent_ind,
                                         infohash=str(random.getrandbits(160)))
                for torrent_ind in xrange(5)
            ])
        tlist[-1].xxx = 1
        tlist[-2].status = TODELETE

        torrents, count = self.mds.TorrentMetadata.get_entries(first=1, last=5)
        self.assertEqual(5, len(torrents))
        self.assertEqual(25, count)

        # Test fetching torrents in a channel
        channel_pk = self.mds.ChannelNode._my_key.pub().key_to_bin()[10:]
        torrents, count = self.mds.TorrentMetadata.get_entries(
            first=1, last=10, sort_by='title', channel_pk=channel_pk)
        self.assertEqual(5, len(torrents))
        self.assertEqual(5, count)

        torrents, count = self.mds.TorrentMetadata.get_entries(
            channel_pk=channel_pk, hide_xxx=True, exclude_deleted=True)[:]

        self.assertListEqual(tlist[-5:-2], list(torrents))
        self.assertEqual(count, 3)

    @db_session
    def test_metadata_conflicting(self):
        tdict = dict(rnd_torrent(),
                     title="lakes sheep",
                     tags="video",
                     infohash='\x00\xff')
        md = self.mds.TorrentMetadata.from_dict(tdict)
        self.assertFalse(md.metadata_conflicting(tdict))
        self.assertTrue(md.metadata_conflicting(dict(tdict, title="bla")))
        tdict.pop('title')
        self.assertFalse(md.metadata_conflicting(tdict))

    @db_session
    def test_update_properties(self):
        """
        Test the updating of several properties of a TorrentMetadata object
        """
        metadata = self.mds.TorrentMetadata(title='torrent',
                                            infohash=str(
                                                random.getrandbits(160)))
        self.assertRaises(NotImplementedError, metadata.update_properties, {
            "status": 3,
            "name": "bla"
        })
        self.assertRaises(NotImplementedError, metadata.update_properties,
                          {"name": "bla"})

        # Test updating the status only
        metadata.update_properties({"status": 456})
        self.assertEqual(metadata.status, 456)
示例#29
0
class TestMetadataStore(TriblerCoreTest):
    """
    This class contains tests for the metadata store.
    """
    DATA_DIR = os.path.join(
        os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..',
        '..', 'data')
    SAMPLE_DIR = os.path.join(DATA_DIR, 'sample_channel')
    # Just get the first and only subdir there, and assume it is the sample channel dir
    CHANNEL_DIR = [
        os.path.join(SAMPLE_DIR, subdir) for subdir in os.listdir(SAMPLE_DIR)
        if os.path.isdir(os.path.join(SAMPLE_DIR, subdir))
        and len(subdir) == CHANNEL_DIR_NAME_LENGTH
    ][0]
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel',
                                    'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadataStore, self).setUp()
        my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(":memory:", self.session_base_dir, my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadataStore, self).tearDown()

    def test_store_clock(self):
        my_key = default_eccrypto.generate_key(u"curve25519")
        mds2 = MetadataStore(os.path.join(self.session_base_dir, 'test.db'),
                             self.session_base_dir, my_key)
        tick = mds2.clock.tick()
        mds2.shutdown()
        mds2 = MetadataStore(os.path.join(self.session_base_dir, 'test.db'),
                             self.session_base_dir, my_key)
        self.assertEqual(mds2.clock.clock, tick)
        mds2.shutdown()

    @db_session
    def test_process_channel_dir_file(self):
        """
        Test whether we are able to process files in a directory containing node metadata
        """

        test_node_metadata = self.mds.TorrentMetadata(title='test',
                                                      infohash=database_blob(
                                                          os.urandom(20)))
        metadata_path = os.path.join(self.session_base_dir, 'metadata.data')
        test_node_metadata.to_file(metadata_path)
        # We delete this TorrentMeta info now, it should be added again to the database when loading it
        test_node_metadata.delete()
        loaded_metadata = self.mds.process_mdblob_file(metadata_path)
        self.assertEqual(loaded_metadata[0][0].title, 'test')

        # Test whether we delete existing metadata when loading a DeletedMetadata blob
        metadata = self.mds.TorrentMetadata(infohash='1' * 20)
        metadata.to_delete_file(metadata_path)
        loaded_metadata = self.mds.process_mdblob_file(metadata_path)
        # Make sure the original metadata is deleted
        self.assertEqual(loaded_metadata[0], (None, 6))
        self.assertIsNone(self.mds.TorrentMetadata.get(infohash='1' * 20))

        # Test an unknown metadata type, this should raise an exception
        invalid_metadata = os.path.join(self.session_base_dir,
                                        'invalidtype.mdblob')
        make_wrong_payload(invalid_metadata)
        self.assertRaises(UnknownBlobTypeException,
                          self.mds.process_mdblob_file, invalid_metadata)

    @db_session
    def test_squash_mdblobs(self):
        chunk_size = self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT
        md_list = [
            self.mds.TorrentMetadata(title=''.join(
                random.choice(string.ascii_uppercase + string.digits)
                for _ in range(20)),
                                     infohash=database_blob(os.urandom(20)))
            for _ in range(0, 10)
        ]
        chunk, _ = entries_to_chunk(md_list, chunk_size=chunk_size)
        dict_list = [d.to_dict()["signature"] for d in md_list]
        for d in md_list:
            d.delete()
        self.assertListEqual(dict_list, [
            d[0].to_dict()["signature"]
            for d in self.mds.process_compressed_mdblob(chunk)
        ])

    @db_session
    def test_squash_mdblobs_multiple_chunks(self):
        md_list = [
            self.mds.TorrentMetadata(title=''.join(
                random.choice(string.ascii_uppercase + string.digits)
                for _ in range(20)),
                                     infohash=database_blob(os.urandom(20)))
            for _ in range(0, 10)
        ]
        # Test splitting into multiple chunks
        chunk, index = entries_to_chunk(md_list, chunk_size=900)
        chunk2, _ = entries_to_chunk(md_list,
                                     chunk_size=900,
                                     start_index=index)
        dict_list = [d.to_dict()["signature"] for d in md_list]
        for d in md_list:
            d.delete()
        self.assertListEqual(dict_list[:index], [
            d[0].to_dict()["signature"]
            for d in self.mds.process_compressed_mdblob(chunk)
        ])
        self.assertListEqual(dict_list[index:], [
            d[0].to_dict()["signature"]
            for d in self.mds.process_compressed_mdblob(chunk2)
        ])

    @db_session
    def test_multiple_squashed_commit_and_read(self):
        """
        Test committing entries into several squashed blobs and reading them back
        """
        self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT = 500

        num_entries = 10
        channel = self.mds.ChannelMetadata(title='testchan',
                                           infohash=database_blob(
                                               os.urandom(20)))
        md_list = [
            self.mds.TorrentMetadata(title='test' + str(x),
                                     status=NEW,
                                     infohash=database_blob(os.urandom(20)))
            for x in range(0, num_entries)
        ]
        channel.commit_channel_torrent()

        channel.local_version = 0
        for md in md_list:
            md.delete()

        channel_dir = os.path.join(self.mds.channels_dir, channel.dir_name)
        self.assertTrue(
            len(os.listdir(channel_dir)) >
            1)  # make sure it was broken into more than one .mdblob file
        self.mds.process_channel_dir(channel_dir, channel.public_key)
        self.assertEqual(num_entries, len(channel.contents))

    @db_session
    def test_process_invalid_compressed_mdblob(self):
        """
        Test whether processing an invalid compressed mdblob does not crash Tribler
        """
        self.assertFalse(self.mds.process_compressed_mdblob("abcdefg"))

    @db_session
    def test_process_channel_dir(self):
        """
        Test processing a directory containing metadata blobs
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel = self.mds.ChannelMetadata.process_channel_metadata_payload(
            payload)
        self.assertFalse(channel.contents_list)
        self.mds.process_channel_dir(self.CHANNEL_DIR, channel.public_key)
        self.assertEqual(len(channel.contents_list), 3)
        self.assertEqual(channel.timestamp, 1551110113007)
        self.assertEqual(channel.local_version, channel.timestamp)

    @db_session
    def test_process_payload(self):
        def get_payloads(entity_class):
            c = entity_class(infohash=database_blob(os.urandom(20)))
            payload = c._payload_class.from_signed_blob(c.serialized())
            deleted_payload = DeletedMetadataPayload.from_signed_blob(
                c.serialized_delete())
            return c, payload, deleted_payload

        _, node_payload, node_deleted_payload = get_payloads(
            self.mds.ChannelNode)

        self.assertFalse(self.mds.process_payload(node_payload))
        self.assertEqual([(None, DELETED_METADATA)],
                         self.mds.process_payload(node_deleted_payload))
        # Do nothing in case it is unknown/abstract payload type, like ChannelNode
        self.assertFalse(self.mds.process_payload(node_payload))

        # Check if node metadata object is properly created on payload processing
        node, node_payload, node_deleted_payload = get_payloads(
            self.mds.TorrentMetadata)
        node_dict = node.to_dict()
        node.delete()
        result = self.mds.process_payload(node_payload)
        self.assertEqual(UNKNOWN_TORRENT, result[0][1])
        self.assertEqual(node_dict['metadata_type'],
                         result[0][0].to_dict()['metadata_type'])

        # Check the same for a channel
        node, node_payload, node_deleted_payload = get_payloads(
            self.mds.ChannelMetadata)
        node_dict = node.to_dict()
        node.delete()

        # Check that there is no action if the signature on the delete object is unknown
        self.assertFalse(self.mds.process_payload(node_deleted_payload))
        result = self.mds.process_payload(node_payload)
        self.assertEqual(UNKNOWN_CHANNEL, result[0][1])
        self.assertEqual(node_dict['metadata_type'],
                         result[0][0].to_dict()['metadata_type'])

    @db_session
    def test_process_payload_merge_entries(self):
        # Check the corner case where the new entry must replace two old entries: one with a matching infohash, and
        # another one with a non-matching id
        node = self.mds.TorrentMetadata(infohash=database_blob(os.urandom(20)))
        node_dict = node.to_dict()
        node.delete()

        node2 = self.mds.TorrentMetadata(
            infohash=database_blob(os.urandom(20)))
        node2_dict = node2.to_dict()
        node2.delete()

        node_updated = self.mds.TorrentMetadata(
            infohash=node_dict["infohash"],
            id_=node2_dict["id_"],
            timestamp=node2_dict["timestamp"] + 1)
        node_updated_payload = node_updated._payload_class.from_signed_blob(
            node_updated.serialized())
        node_updated.delete()

        self.mds.TorrentMetadata(**node_dict)
        self.mds.TorrentMetadata(**node2_dict)

        result = self.mds.process_payload(node_updated_payload)
        self.assertIn((None, DELETED_METADATA), result)
        self.assertIn((self.mds.TorrentMetadata.get(), UPDATED_OUR_VERSION),
                      result)
        self.assertEqual(
            database_blob(self.mds.TorrentMetadata.select()[:][0].signature),
            database_blob(node_updated_payload.signature))

    @db_session
    def test_process_payload_reject_older(self):
        # Check there is no action if the processed payload has a timestamp that is less than the
        # local_version of the corresponding local channel. (I.e. remote peer trying to push back a deleted entry)
        channel = self.mds.ChannelMetadata(title='bla',
                                           version=123,
                                           local_version=12,
                                           infohash=database_blob(
                                               os.urandom(20)))
        torrent = self.mds.TorrentMetadata(title='blabla',
                                           timestamp=11,
                                           origin_id=channel.id_,
                                           infohash=database_blob(
                                               os.urandom(20)))
        payload = torrent._payload_class(**torrent.to_dict())
        torrent.delete()
        self.assertFalse(self.mds.process_payload(payload))

    @db_session
    def test_process_payload_reject_older_entry_with_known_infohash_or_merge(
            self):
        # Check there is no action if the processed payload has a timestamp that is less than the
        # local_version of the corresponding local channel. (I.e. remote peer trying to push back a deleted entry)
        torrent = self.mds.TorrentMetadata(title='blabla',
                                           timestamp=10,
                                           id_=10,
                                           infohash=database_blob(
                                               os.urandom(20)))
        payload = torrent._payload_class(**torrent.to_dict())
        torrent.delete()

        torrent2 = self.mds.TorrentMetadata(title='blabla',
                                            timestamp=11,
                                            id_=3,
                                            infohash=payload.infohash)
        payload2 = torrent._payload_class(**torrent2.to_dict())
        torrent2.delete()

        torrent3 = self.mds.TorrentMetadata(title='blabla',
                                            timestamp=12,
                                            id_=4,
                                            infohash=payload.infohash)
        payload3 = torrent._payload_class(**torrent3.to_dict())
        torrent3.delete()

        self.mds.process_payload(payload2)
        self.assertEqual(GOT_NEWER_VERSION,
                         self.mds.process_payload(payload)[0][1])

        # In this corner case the newly arrived payload contains a newer node
        # that has the same infohash as the one that is already there.
        # The older one should be deleted, and the newer one should be installed instead.
        results = self.mds.process_payload(payload3)
        self.assertIn((None, DELETED_METADATA), results)
        self.assertIn((self.mds.TorrentMetadata.get(), UNKNOWN_TORRENT),
                      results)

    @db_session
    def test_get_num_channels_nodes(self):
        self.mds.ChannelMetadata(title='testchan',
                                 id_=0,
                                 infohash=database_blob(os.urandom(20)))
        self.mds.ChannelMetadata(title='testchan',
                                 id_=123,
                                 infohash=database_blob(os.urandom(20)))
        self.mds.ChannelMetadata(title='testchan',
                                 id_=0,
                                 public_key=unhexlify('0' * 20),
                                 signature=unhexlify('0' * 64),
                                 skip_key_check=True,
                                 infohash=database_blob(os.urandom(20)))
        self.mds.ChannelMetadata(title='testchan',
                                 id_=0,
                                 public_key=unhexlify('1' * 20),
                                 signature=unhexlify('1' * 64),
                                 skip_key_check=True,
                                 infohash=database_blob(os.urandom(20)))

        _ = [
            self.mds.TorrentMetadata(title='test' + str(x),
                                     status=NEW,
                                     infohash=database_blob(os.urandom(20)))
            for x in range(0, 3)
        ]

        self.assertEqual(4, self.mds.get_num_channels())
        self.assertEqual(3, self.mds.get_num_torrents())
示例#30
0
    def setUp(self):
        yield super(TestMetadataStore, self).setUp()
        my_key = default_eccrypto.generate_key(u"curve25519")

        self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir,
                                 my_key)
示例#31
0
class TestMetadata(TriblerCoreTest):
    """
    Contains various tests for the Metadata type.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadata, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(
            os.path.join(self.session_base_dir, 'test.db'),
            self.session_base_dir, self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadata, self).tearDown()

    @db_session
    def test_to_dict(self):
        """
        Test whether converting metadata to a dictionary works
        """
        metadata = self.mds.Metadata.from_dict({})
        self.assertTrue(metadata.to_dict())

    @db_session
    def test_serialization(self):
        """
        Test converting metadata to serialized data and back
        """
        metadata1 = self.mds.Metadata.from_dict({})
        serialized1 = metadata1.serialized()
        metadata1.delete()

        metadata2 = self.mds.Metadata.from_payload(
            MetadataPayload.from_signed_blob(serialized1))
        serialized2 = metadata2.serialized()
        self.assertEqual(serialized1, serialized2)

    @db_session
    def test_key_mismatch_exception(self):
        mismatched_key = default_eccrypto.generate_key(u"curve25519")
        metadata = self.mds.Metadata.from_dict({})
        self.assertRaises(KeysMismatchException,
                          metadata.serialized,
                          key=mismatched_key)

    @db_session
    def test_to_file(self):
        """
        Test writing metadata to a file
        """
        metadata = self.mds.Metadata.from_dict({})
        file_path = os.path.join(self.session_base_dir, 'metadata.file')
        metadata.to_file(file_path)
        self.assertTrue(os.path.exists(file_path))

    @db_session
    def test_has_valid_signature(self):
        """
        Test whether a signature can be validated correctly
        """
        metadata = self.mds.Metadata.from_dict({})
        self.assertTrue(metadata.has_valid_signature())

        saved_key = metadata.public_key
        # Mess with the public key
        metadata.public_key = 'a'
        self.assertFalse(metadata.has_valid_signature())

        # Mess with the signature
        metadata.public_key = saved_key
        metadata.signature = 'a'
        self.assertFalse(metadata.has_valid_signature())

    @db_session
    def test_from_payload(self):
        """
        Test converting a metadata payload to a metadata object
        """
        metadata = self.mds.Metadata.from_dict({})
        metadata_dict = metadata.to_dict()
        metadata.delete()
        metadata_payload = MetadataPayload(**metadata_dict)
        self.assertTrue(self.mds.Metadata.from_payload(metadata_payload))
示例#32
0
class TestUpgradeDB72ToPony(TriblerCoreTest):
    @inlineCallbacks
    def setUp(self):
        yield super(TestUpgradeDB72ToPony, self).setUp()

        self.my_key = default_eccrypto.generate_key(u"curve25519")
        mds_db = os.path.join(self.session_base_dir, 'test.db')
        mds_channels_dir = self.session_base_dir

        self.mds = MetadataStore(mds_db, mds_channels_dir, self.my_key)
        self.m = DispersyToPonyMigration(OLD_DB_SAMPLE)
        self.m.initialize(self.mds)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestUpgradeDB72ToPony, self).tearDown()

    def test_get_personal_channel_title(self):
        self.assertTrue(self.m.personal_channel_title)

    def test_get_old_torrents_count(self):
        self.assertEqual(self.m.get_old_torrents_count(), 19)

    def test_get_personal_torrents_count(self):
        self.assertEqual(self.m.get_personal_channel_torrents_count(), 2)

    def test_convert_personal_channel(self):
        def check_channel():
            self.m.convert_personal_channel()
            my_channel = self.mds.ChannelMetadata.get_my_channel()
            self.assertEqual(len(my_channel.contents_list), 2)
            self.assertEqual(my_channel.num_entries, 2)
            for t in my_channel.contents_list:
                self.assertTrue(t.has_valid_signature())
            self.assertTrue(my_channel.has_valid_signature())
            self.assertEqual(self.m.personal_channel_title[:200],
                             my_channel.title)

        check_channel()

        # Now check the case where previous conversion of the personal channel had failed
        with db_session:
            self.mds.MiscData.get_for_update(
                name=CONVERSION_FROM_72_PERSONAL).value = CONVERSION_STARTED
        check_channel()

    @db_session
    def test_convert_all_channels(self):
        def check_conversion():
            self.m.convert_discovered_torrents()
            self.m.convert_discovered_channels()
            chans = self.mds.ChannelMetadata.get_entries()

            self.assertEqual(len(chans[0]), 2)
            for c in chans[0]:
                self.assertNotEqual(self.m.personal_channel_title[:200],
                                    c.title[:200])
                self.assertEqual(c.status, LEGACY_ENTRY)
                self.assertTrue(c.contents_list)
                for t in c.contents_list:
                    self.assertEqual(t.status, LEGACY_ENTRY)

        check_conversion()

        # Now check the case where the previous conversion failed at channels conversion
        with db_session:
            self.mds.MiscData.get_for_update(
                name=CONVERSION_FROM_72_CHANNELS).value = CONVERSION_STARTED
        check_conversion()

        # Now check the case where the previous conversion stopped at torrents conversion
        with db_session:
            self.mds.MiscData.get_for_update(
                name=CONVERSION_FROM_72_CHANNELS).delete()
            self.mds.MiscData.get_for_update(
                name=CONVERSION_FROM_72_DISCOVERED).value = CONVERSION_STARTED
            for d in self.mds.TorrentMetadata.select()[:10][:10]:
                d.delete()
        check_conversion()

    @db_session
    def test_update_trackers(self):
        tr = self.mds.TrackerState(
            url="http://ipv6.torrent.ubuntu.com:6969/announce")
        self.m.update_trackers_info()
        self.assertEqual(tr.failures, 2)
        self.assertEqual(tr.alive, True)
        self.assertEqual(tr.last_check, 1548776649)
示例#33
0
class TestChannelMetadata(TriblerCoreTest):
    """
    Contains various tests for the channel metadata type.
    """
    DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..', '..', 'data')
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel', 'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestChannelMetadata, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(":memory:", self.session_base_dir, self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestChannelMetadata, self).tearDown()

    @staticmethod
    def get_sample_torrent_dict(my_key):
        """
        Utility method to return a dictionary with torrent information.
        """
        return {
            "infohash": database_blob("1" * 20),
            "size": 123,
            "torrent_date": datetime.utcnow(),
            "tags": "bla",
            "id_": 123,
            "public_key": database_blob(my_key.pub().key_to_bin()[10:]),
            "title": "lalala"
        }

    @staticmethod
    def get_sample_channel_dict(my_key):
        """
        Utility method to return a dictionary with a channel information.
        """
        return dict(TestChannelMetadata.get_sample_torrent_dict(my_key), votes=222, subscribed=False, timestamp=1)

    @db_session
    def test_serialization(self):
        """
        Test converting channel metadata to serialized data
        """
        channel_metadata = self.mds.ChannelMetadata.from_dict({"infohash": str(random.getrandbits(160))})
        self.assertTrue(channel_metadata.serialized())

    @db_session
    def test_list_contents(self):
        """
        Test whether a correct list with channel content is returned from the database
        """
        self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low')
        channel1 = self.mds.ChannelMetadata(infohash=str(random.getrandbits(160)))
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template))

        self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low')
        channel2 = self.mds.ChannelMetadata(infohash=str(random.getrandbits(160)))
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, infohash="1"))
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, infohash="2"))

        self.assertEqual(1, len(channel1.contents_list))
        self.assertEqual(2, len(channel2.contents_list))
        self.assertEqual(2, channel2.contents_len)

    @db_session
    def test_create_channel(self):
        """
        Test whether creating a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')

        self.assertTrue(channel_metadata)
        self.assertRaises(DuplicateChannelIdError,
                          self.mds.ChannelMetadata.create_channel, 'test', 'test')

    @db_session
    def test_update_metadata(self):
        """
        Test whether metadata is correctly updated and signed
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict)
        self.mds.TorrentMetadata.from_dict(self.torrent_template)
        update_dict = {
            "id_": 222,
            "tags": "eee",
            "title": "qqq"
        }
        channel_metadata.update_metadata(update_dict=update_dict)
        self.assertDictContainsSubset(update_dict, channel_metadata.to_dict())

    @db_session
    def test_process_channel_metadata_payload(self):
        """
        Test whether a channel metadata payload is processed correctly
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertTrue(channel_metadata)

        # Check that we do not add it again
        self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

        # Check that we always take the latest version
        channel_metadata.timestamp -= 1
        self.assertEqual(channel_metadata.timestamp, 1551110113006)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertEqual(channel_metadata.timestamp, 1551110113007)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

    @db_session
    def test_get_dirname(self):
        """
        Test whether the correct directory name is returned for channel metadata
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict)

        self.assertEqual(len(channel_metadata.dir_name), CHANNEL_DIR_NAME_LENGTH)

    @db_session
    def test_get_channel_with_dirname(self):
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict)
        dirname = channel_metadata.dir_name
        channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(dirname)
        self.assertEqual(channel_metadata, channel_result)

        # Test for corner-case of channel PK starting with zeroes
        channel_metadata.public_key = database_blob(unhexlify('0' * 128))
        channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(channel_metadata.dir_name)
        self.assertEqual(channel_metadata, channel_result)

    @db_session
    def test_get_channel_with_id(self):
        """
        Test retrieving a channel with a specific ID
        """
        self.assertIsNone(self.mds.ChannelMetadata.get_channel_with_id('a' * 20))
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        self.assertIsNotNone(self.mds.ChannelMetadata.get_channel_with_id(channel_metadata.public_key))

    @db_session
    def test_add_metadata_to_channel(self):
        """
        Test whether adding new torrents to a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        original_channel = channel_metadata.to_dict()
        md = self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, status=NEW))
        channel_metadata.commit_channel_torrent()

        self.assertEqual(channel_metadata.id_, ROOT_CHANNEL_ID)
        self.assertLess(original_channel["timestamp"], channel_metadata.timestamp)
        self.assertLess(md.timestamp, channel_metadata.timestamp)
        self.assertEqual(channel_metadata.num_entries, 1)

    @db_session
    def test_add_torrent_to_channel(self):
        """
        Test adding a torrent to your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        channel_metadata.add_torrent_to_channel(tdef, {'description': 'blabla'})
        self.assertTrue(channel_metadata.contents_list)
        self.assertRaises(DuplicateTorrentFileError, channel_metadata.add_torrent_to_channel, tdef, None)

    @db_session
    def test_restore_torrent_in_channel(self):
        """
        Test if the torrent scheduled for deletion is restored/updated after the user tries to re-add it.
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        md = channel_metadata.add_torrent_to_channel(tdef, None)

        # Check correct re-add
        md.status = TODELETE
        md_updated = channel_metadata.add_torrent_to_channel(tdef, None)
        self.assertEqual(md.status, COMMITTED)
        self.assertEqual(md_updated, md)
        self.assertTrue(md.has_valid_signature)

        # Check update of torrent properties from a new tdef
        md.status = TODELETE
        new_tracker_address = u'http://tribler.org/announce'
        tdef.torrent_parameters['announce'] = new_tracker_address
        md_updated = channel_metadata.add_torrent_to_channel(tdef, None)
        self.assertEqual(md_updated, md)
        self.assertEqual(md.status, NEW)
        self.assertEqual(md.tracker_info, new_tracker_address)
        self.assertTrue(md.has_valid_signature)
        # In addition, check that the trackers table was properly updated
        self.assertEqual(len(md.health.trackers), 2)

    @db_session
    def test_delete_torrent_from_channel(self):
        """
        Test deleting a torrent from your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # Check that nothing is committed when deleting uncommited torrent metadata
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.delete_torrent(tdef.get_infohash())
        self.assertEqual(0, len(channel_metadata.contents_list))

        # Check append-only deletion process
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.commit_channel_torrent()
        self.assertEqual(1, len(channel_metadata.contents_list))
        channel_metadata.delete_torrent(tdef.get_infohash())
        channel_metadata.commit_channel_torrent()
        self.assertEqual(0, len(channel_metadata.contents_list))

    @db_session
    def test_commit_channel_torrent(self):
        channel = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        channel.add_torrent_to_channel(tdef, None)
        # The first run should return the infohash, the second should return None, because nothing was really done
        self.assertTrue(channel.commit_channel_torrent())
        self.assertFalse(channel.commit_channel_torrent())

    @db_session
    def test_consolidate_channel_torrent(self):
        """
        Test completely re-commit your channel
        """
        channel = self.mds.ChannelMetadata.create_channel('test', 'test')
        my_dir = os.path.abspath(os.path.join(self.mds.channels_dir, channel.dir_name))
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # 1st torrent
        channel.add_torrent_to_channel(tdef, None)
        channel.commit_channel_torrent()

        # 2nd torrent
        md = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=channel.public_key, status=NEW))
        channel.commit_channel_torrent()

        # Delete entry
        channel.delete_torrent(tdef.get_infohash())
        channel.commit_channel_torrent()

        self.assertEqual(1, len(channel.contents_list))
        self.assertEqual(3, len(os.listdir(my_dir)))
        channel.consolidate_channel_torrent()
        self.assertEqual(1, len(os.listdir(my_dir)))

    def test_mdblob_dont_fit_exception(self):
        with db_session:
            md_list = [self.mds.TorrentMetadata(title='test' + str(x), infohash=str(random.getrandbits(160))) for x in
                       xrange(0, 1)]
        self.assertRaises(Exception, entries_to_chunk, md_list, chunk_size=1)

    @db_session
    def test_get_channels(self):
        """
        Test whether we can get channels
        """

        # First we create a few channels
        for ind in xrange(10):
            self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low')
            _ = self.mds.ChannelMetadata(title='channel%d' % ind, subscribed=(ind % 2 == 0),
                                         infohash=str(random.getrandbits(160)))
        channels = self.mds.ChannelMetadata.get_entries(first=1, last=5)
        self.assertEqual(len(channels[0]), 5)
        self.assertEqual(channels[1], 10)

        # Test filtering
        channels = self.mds.ChannelMetadata.get_entries(first=1, last=5, query_filter='channel5')
        self.assertEqual(len(channels[0]), 1)

        # Test sorting
        channels = self.mds.ChannelMetadata.get_entries(first=1, last=10, sort_by='title', sort_asc=False)
        self.assertEqual(len(channels[0]), 10)
        self.assertEqual(channels[0][0].title, 'channel9')

        # Test fetching subscribed channels
        channels = self.mds.ChannelMetadata.get_entries(first=1, last=10, sort_by='title', subscribed=True)
        self.assertEqual(len(channels[0]), 5)

    @db_session
    def test_get_channel_name(self):
        infohash = "\x00" * 20
        title = "testchan"
        chan = self.mds.ChannelMetadata(title=title, infohash=database_blob(infohash))
        dirname = chan.dir_name

        self.assertEqual(title, self.mds.ChannelMetadata.get_channel_name(dirname, infohash))
        chan.infohash = "\x11" * 20
        self.assertEqual("OLD:" + title, self.mds.ChannelMetadata.get_channel_name(dirname, infohash))
        chan.delete()
        self.assertEqual(dirname, self.mds.ChannelMetadata.get_channel_name(dirname, infohash))

    @db_session
    def check_add(self, torrents_in_dir, errors, recursive):
        TEST_TORRENTS_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
                                         '..', '..', '..', 'data', 'linux_torrents')
        chan = self.mds.ChannelMetadata.create_channel(title='testchan')
        torrents, e = chan.add_torrents_from_dir(TEST_TORRENTS_DIR, recursive)
        self.assertEqual(torrents_in_dir, len(torrents))
        self.assertEqual(errors, len(e))
        with db_session:
            q = self.mds.TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT)
            self.assertEqual(torrents_in_dir - len(e), q.count())

    def test_add_torrents_from_dir(self):
        self.check_add(9, 0, recursive=False)

    def test_add_torrents_from_dir_recursive(self):
        self.check_add(11, 1, recursive=True)
示例#34
0
class TestMetadata(TriblerCoreTest):
    """
    Contains various tests for the Metadata type.
    """

    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadata, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir,
                                 self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadata, self).tearDown()

    @db_session
    def test_to_dict(self):
        """
        Test whether converting metadata to a dictionary works
        """
        metadata = self.mds.Metadata.from_dict({})
        self.assertTrue(metadata.to_dict())

    @db_session
    def test_serialization(self):
        """
        Test converting metadata to serialized data and back
        """
        metadata1 = self.mds.Metadata.from_dict({})
        serialized1 = metadata1.serialized()
        metadata1.delete()

        metadata2 = self.mds.Metadata.from_payload(MetadataPayload.from_signed_blob(serialized1))
        serialized2 = metadata2.serialized()
        self.assertEqual(serialized1, serialized2)

    @db_session
    def test_key_mismatch_exception(self):
        mismatched_key = default_eccrypto.generate_key(u"curve25519")
        metadata = self.mds.Metadata.from_dict({})
        self.assertRaises(KeysMismatchException, metadata.serialized, key=mismatched_key)

    @db_session
    def test_to_file(self):
        """
        Test writing metadata to a file
        """
        metadata = self.mds.Metadata.from_dict({})
        file_path = os.path.join(self.session_base_dir, 'metadata.file')
        metadata.to_file(file_path)
        self.assertTrue(os.path.exists(file_path))

    @db_session
    def test_has_valid_signature(self):
        """
        Test whether a signature can be validated correctly
        """
        metadata = self.mds.Metadata.from_dict({})
        self.assertTrue(metadata.has_valid_signature())

        saved_key = metadata.public_key
        # Mess with the public key
        metadata.public_key = 'a'
        self.assertFalse(metadata.has_valid_signature())

        # Mess with the signature
        metadata.public_key = saved_key
        metadata.signature = 'a'
        self.assertFalse(metadata.has_valid_signature())

    @db_session
    def test_from_payload(self):
        """
        Test converting a metadata payload to a metadata object
        """
        metadata = self.mds.Metadata.from_dict({})
        metadata_dict = metadata.to_dict()
        metadata.delete()
        metadata_payload = MetadataPayload(**metadata_dict)
        self.assertTrue(self.mds.Metadata.from_payload(metadata_payload))
示例#35
0
class TestChannelMetadata(TriblerCoreTest):
    """
    Contains various tests for the channel metadata type.
    """
    DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..', '..', 'data')
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel', 'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestChannelMetadata, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir,
                                 self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestChannelMetadata, self).tearDown()

    @staticmethod
    def get_sample_torrent_dict(my_key):
        """
        Utility method to return a dictionary with torrent information.
        """
        return {
            "infohash": buffer("1" * 20),
            "size": 123,
            "timestamp": datetime.utcnow(),
            "torrent_date": datetime.utcnow(),
            "tags": "bla",
            "tc_pointer": 123,
            "public_key": buffer(my_key.pub().key_to_bin()),
            "title": "lalala"
        }

    @staticmethod
    def get_sample_channel_dict(my_key):
        """
        Utility method to return a dictionary with a channel information.
        """
        return dict(TestChannelMetadata.get_sample_torrent_dict(my_key), votes=222, subscribed=False, version=1)

    @db_session
    def test_serialization(self):
        """
        Test converting channel metadata to serialized data
        """
        channel_metadata = self.mds.ChannelMetadata.from_dict({})
        self.assertTrue(channel_metadata.serialized())

    @db_session
    def test_list_contents(self):
        """
        Test whether a correct list with channel content is returned from the database
        """
        pub_key1 = default_eccrypto.generate_key('low').pub().key_to_bin()
        pub_key2 = default_eccrypto.generate_key('low').pub().key_to_bin()

        channel1 = self.mds.ChannelMetadata(public_key=pub_key1)
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, public_key=pub_key1))

        channel2 = self.mds.ChannelMetadata(public_key=pub_key2)
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, public_key=pub_key2))
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template, public_key=pub_key2))

        self.assertEqual(1, len(channel1.contents_list))
        self.assertEqual(2, len(channel2.contents_list))
        self.assertEqual(2, channel2.contents_len)


    @db_session
    def test_create_channel(self):
        """
        Test whether creating a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')

        self.assertTrue(channel_metadata)
        self.assertRaises(DuplicateChannelNameError,
                          self.mds.ChannelMetadata.create_channel, 'test', 'test')

    @db_session
    def test_update_metadata(self):
        """
        Test whether metadata is correctly updated and signed
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict)
        self.mds.TorrentMetadata.from_dict(self.torrent_template)
        update_dict = {
            "tc_pointer": 222,
            "tags": "eee",
            "title": "qqq"
        }
        channel_metadata.update_metadata(update_dict=update_dict)
        self.assertDictContainsSubset(update_dict, channel_metadata.to_dict())

    @db_session
    def test_process_channel_metadata_payload(self):
        """
        Test whether a channel metadata payload is processed correctly
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertTrue(channel_metadata)

        # Check that we do not add it again
        self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

        # Check that we always take the latest version
        channel_metadata.version -= 1
        self.assertEqual(channel_metadata.version, 2)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertEqual(channel_metadata.version, 3)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

    @db_session
    def test_get_dirname(self):
        """
        Test whether the correct directory name is returned for channel metadata
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(sample_channel_dict)

        self.assertEqual(len(channel_metadata.dir_name), 60)

    @db_session
    def test_get_channel_with_id(self):
        """
        Test retrieving a channel with a specific ID
        """
        self.assertIsNone(self.mds.ChannelMetadata.get_channel_with_id('a' * 20))
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        self.assertIsNotNone(self.mds.ChannelMetadata.get_channel_with_id(channel_metadata.public_key))

    @db_session
    def test_add_metadata_to_channel(self):
        """
        Test whether adding new torrents to a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=channel_metadata.public_key))
        channel_metadata.commit_channel_torrent()

        self.assertEqual(channel_metadata.version, 1)
        self.assertEqual(channel_metadata.size, 1)

    @db_session
    def test_add_torrent_to_channel(self):
        """
        Test adding a torrent to your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        channel_metadata.add_torrent_to_channel(tdef, None)
        self.assertTrue(channel_metadata.contents_list)
        self.assertRaises(DuplicateTorrentFileError, channel_metadata.add_torrent_to_channel, tdef, None)

    @db_session
    def test_delete_torrent_from_channel(self):
        """
        Test deleting a torrent from your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # Check that nothing is committed when deleting uncommited torrent metadata
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.delete_torrent_from_channel(tdef.get_infohash())
        self.assertEqual(0, len(channel_metadata.contents_list))

        # Check append-only deletion process
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.commit_channel_torrent()
        self.assertEqual(1, len(channel_metadata.contents_list))
        channel_metadata.delete_torrent_from_channel(tdef.get_infohash())
        channel_metadata.commit_channel_torrent()
        self.assertEqual(0, len(channel_metadata.contents_list))

    @db_session
    def test_consolidate_channel_torrent(self):
        """
        Test completely re-commit your channel
        """
        channel = self.mds.ChannelMetadata.create_channel('test', 'test')
        my_dir = os.path.abspath(os.path.join(self.mds.channels_dir, channel.dir_name))
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # 1st torrent
        channel.add_torrent_to_channel(tdef, None)
        channel.commit_channel_torrent()

        # 2nd torrent
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=channel.public_key))
        channel.commit_channel_torrent()

        # Delete entry
        channel.delete_torrent_from_channel(tdef.get_infohash())
        channel.commit_channel_torrent()

        self.assertEqual(1, len(channel.contents_list))
        self.assertEqual(3, len(os.listdir(my_dir)))
        channel.consolidate_channel_torrent()
        self.assertEqual(1, len(os.listdir(my_dir)))

    def test_mdblob_dont_fit_exception(self):
        with db_session:
            md_list = [self.mds.TorrentMetadata(title='test' + str(x)) for x in xrange(0, 1)]
        self.assertRaises(Exception, entries_to_chunk, md_list, chunk_size=1)
示例#36
0
    my_channel.commit_channel_torrent()

    t2 = my_channel.add_torrent_to_channel(TorrentDef.load(TORRENT_VIDEO_FILE),
                                           None)
    _ = mds.TorrentMetadata.from_dict(gen_random_entry())
    _ = mds.TorrentMetadata.from_dict(gen_random_entry())
    my_channel.commit_channel_torrent()

    my_channel.delete_torrent(t2.infohash)
    my_channel.commit_channel_torrent()

    # Rename files to stable names
    mdblob_name = os.path.join(SAMPLE_DIR, my_channel.dir_name + ".mdblob")
    torrent_name = os.path.join(SAMPLE_DIR, my_channel.dir_name + ".torrent")

    os.rename(mdblob_name, CHANNEL_METADATA)
    os.rename(torrent_name, CHANNEL_TORRENT)

    # Update channel
    _ = mds.TorrentMetadata.from_dict(gen_random_entry())
    my_channel.commit_channel_torrent()

    # Rename updated files to stable names
    os.rename(mdblob_name, CHANNEL_METADATA_UPDATED)
    os.rename(torrent_name, CHANNEL_TORRENT_UPDATED)


if __name__ == "__main__":
    mds = MetadataStore(":memory:", SAMPLE_DIR, my_key)
    gen_sample_channel(mds)
示例#37
0
    def init(self):
        if self.dispersy:
            from Tribler.dispersy.community import HardKilledCommunity

            self._logger.info("lmc: Starting Dispersy...")

            self.session.readable_status = STATE_STARTING_DISPERSY
            now = timemod.time()
            success = self.dispersy.start(self.session.autoload_discovery)

            diff = timemod.time() - now
            if success:
                self._logger.info("lmc: Dispersy started successfully in %.2f seconds [port: %d]",
                                  diff, self.dispersy.wan_address[1])
            else:
                self._logger.info("lmc: Dispersy failed to start in %.2f seconds", diff)

            self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP'))

            from Tribler.dispersy.crypto import M2CryptoSK
            private_key = self.dispersy.crypto.key_to_bin(
                M2CryptoSK(filename=self.session.config.get_permid_keypair_filename()))
            self.session.dispersy_member = blockingCallFromThread(reactor, self.dispersy.get_member,
                                                                  private_key=private_key)

            blockingCallFromThread(reactor, self.dispersy.define_auto_load, HardKilledCommunity,
                                   self.session.dispersy_member, load=True)

            if self.session.config.get_megacache_enabled():
                self.dispersy.database.attach_commit_callback(self.session.sqlite_db.commit_now)

            # notify dispersy finished loading
            self.session.notifier.notify(NTFY_DISPERSY, NTFY_STARTED, None)

            self.session.readable_status = STATE_LOADING_COMMUNITIES

        # We should load the mainline DHT before loading the IPv8 overlays since the DHT is used for the tunnel overlay.
        if self.session.config.get_mainline_dht_enabled():
            self.session.readable_status = STATE_START_MAINLINE_DHT
            from Tribler.Core.DecentralizedTracking import mainlineDHT
            self.mainline_dht = mainlineDHT.init(('127.0.0.1', self.session.config.get_mainline_dht_port()),
                                                 self.session.config.get_state_dir())
            self.upnp_ports.append((self.session.config.get_mainline_dht_port(), 'UDP'))

        # Wallets
        if self.session.config.get_bitcoinlib_enabled():
            try:
                from Tribler.Core.Modules.wallet.btc_wallet import BitcoinWallet, BitcoinTestnetWallet
                wallet_path = os.path.join(self.session.config.get_state_dir(), 'wallet')
                btc_wallet = BitcoinWallet(wallet_path)
                btc_testnet_wallet = BitcoinTestnetWallet(wallet_path)
                self.wallets[btc_wallet.get_identifier()] = btc_wallet
                self.wallets[btc_testnet_wallet.get_identifier()] = btc_testnet_wallet
            except ImportError:
                self._logger.error("bitcoinlib library cannot be found, Bitcoin wallet not available!")

        if self.session.config.get_dummy_wallets_enabled():
            # For debugging purposes, we create dummy wallets
            dummy_wallet1 = DummyWallet1()
            self.wallets[dummy_wallet1.get_identifier()] = dummy_wallet1

            dummy_wallet2 = DummyWallet2()
            self.wallets[dummy_wallet2.get_identifier()] = dummy_wallet2

        if self.ipv8:
            self.ipv8_start_time = time.time()
            self.load_ipv8_overlays()
            self.enable_ipv8_statistics()

        if self.dispersy:
            self.load_dispersy_communities()

        tunnel_community_ports = self.session.config.get_tunnel_community_socks5_listen_ports()
        self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", tunnel_community_ports))

        if self.session.config.get_channel_search_enabled() and self.session.config.get_dispersy_enabled():
            self.session.readable_status = STATE_INITIALIZE_CHANNEL_MGR
            from Tribler.Core.Modules.channel.channel_manager import ChannelManager
            self.channel_manager = ChannelManager(self.session)
            self.channel_manager.initialize()

        if self.session.config.get_libtorrent_enabled():
            self.session.readable_status = STATE_START_LIBTORRENT
            from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr
            self.ltmgr = LibtorrentMgr(self.session)
            self.ltmgr.initialize()
            for port, protocol in self.upnp_ports:
                self.ltmgr.add_upnp_mapping(port, protocol)

        # add task for tracker checking
        if self.session.config.get_torrent_checking_enabled():
            self.session.readable_status = STATE_START_TORRENT_CHECKER
            self.torrent_checker = TorrentChecker(self.session)
            self.torrent_checker.initialize()

        if self.rtorrent_handler and self.session.config.get_dispersy_enabled():
            self.session.readable_status = STATE_START_REMOTE_TORRENT_HANDLER
            self.rtorrent_handler.initialize()

        if self.api_manager:
            self.session.readable_status = STATE_START_API_ENDPOINTS
            self.api_manager.root_endpoint.start_endpoints()

        if self.session.config.get_watch_folder_enabled():
            self.session.readable_status = STATE_START_WATCH_FOLDER
            self.watch_folder = WatchFolder(self.session)
            self.watch_folder.start()

        if self.session.config.get_credit_mining_enabled():
            self.session.readable_status = STATE_START_CREDIT_MINING
            from Tribler.Core.CreditMining.CreditMiningManager import CreditMiningManager
            self.credit_mining_manager = CreditMiningManager(self.session)

        if self.session.config.get_resource_monitor_enabled():
            self.resource_monitor = ResourceMonitor(self.session)
            self.resource_monitor.start()

        if self.session.config.get_version_checker_enabled():
            self.version_check_manager = VersionCheckManager(self.session)
            self.version_check_manager.start()

        if self.session.config.get_chant_enabled():
            channels_dir = os.path.join(self.session.config.get_chant_channels_dir())
            database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db')
            self.mds = MetadataStore(database_path, channels_dir, self.session.trustchain_keypair)

        self.session.set_download_states_callback(self.sesscb_states_callback)

        if self.session.config.get_ipv8_enabled() and self.session.config.get_trustchain_enabled():
            self.payout_manager = PayoutManager(self.trustchain_community, self.dht_community)

        self.initComplete = True
示例#38
0
class TestMetadataStore(TriblerCoreTest):
    """
    This class contains tests for the metadata store.
    """
    DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..', '..', 'data')
    CHANNEL_DIR = os.path.join(DATA_DIR, 'sample_channel',
                               'd24941643ff471e40d7761c71f4e3a4c21a4a5e89b0281430d01e78a4e46')
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel', 'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadataStore, self).setUp()
        my_key = default_eccrypto.generate_key(u"curve25519")

        self.mds = MetadataStore(os.path.join(self.session_base_dir, 'test.db'), self.session_base_dir,
                                 my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadataStore, self).tearDown()

    @db_session
    def test_process_channel_dir_file(self):
        """
        Test whether we are able to process files in a directory containing torrent metadata
        """

        test_torrent_metadata = self.mds.TorrentMetadata(title='test')
        metadata_path = os.path.join(self.session_base_dir, 'metadata.data')
        test_torrent_metadata.to_file(metadata_path)
        # We delete this TorrentMeta info now, it should be added again to the database when loading it
        test_torrent_metadata.delete()
        loaded_metadata = self.mds.process_mdblob_file(metadata_path)
        self.assertEqual(loaded_metadata[0].title, 'test')

        # Test whether we delete existing metadata when loading a DeletedMetadata blob
        metadata = self.mds.TorrentMetadata(infohash='1' * 20)
        metadata.to_delete_file(metadata_path)
        loaded_metadata = self.mds.process_mdblob_file(metadata_path)
        # Make sure the original metadata is deleted
        self.assertListEqual(loaded_metadata, [])
        self.assertIsNone(self.mds.TorrentMetadata.get(infohash='1' * 20))

        # Test an unknown metadata type, this should raise an exception
        invalid_metadata = os.path.join(self.session_base_dir, 'invalidtype.mdblob')
        make_wrong_payload(invalid_metadata)
        self.assertRaises(UnknownBlobTypeException, self.mds.process_mdblob_file, invalid_metadata)

    @db_session
    def test_squash_mdblobs(self):
        chunk_size = self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT
        md_list = [self.mds.TorrentMetadata(title='test' + str(x)) for x in xrange(0, 10)]
        chunk, _ = entries_to_chunk(md_list, chunk_size=chunk_size)
        self.assertItemsEqual(md_list, self.mds.process_squashed_mdblob(chunk))

        # Test splitting into multiple chunks
        chunk, index = entries_to_chunk(md_list, chunk_size=1000)
        chunk += entries_to_chunk(md_list, chunk_size=1000, start_index=index)[0]
        self.assertItemsEqual(md_list, self.mds.process_squashed_mdblob(chunk))

    @db_session
    def test_multiple_squashed_commit_and_read(self):
        """
        Test committing entries into several squashed blobs and reading them back
        """
        self.mds.ChannelMetadata._CHUNK_SIZE_LIMIT = 500

        num_entries = 10
        channel = self.mds.ChannelMetadata(title='testchan')
        md_list = [self.mds.TorrentMetadata(title='test' + str(x)) for x in xrange(0, num_entries)]
        channel.commit_channel_torrent()

        channel.local_version = 0
        for md in md_list:
            md.delete()

        channel_dir = os.path.join(self.mds.channels_dir, channel.dir_name)
        self.assertTrue(len(os.listdir(channel_dir)) > 1)  # make sure it was broken into more than one .mdblob file
        self.mds.process_channel_dir(channel_dir, channel.public_key)
        self.assertEqual(num_entries, len(channel.contents))

    @db_session
    def test_process_channel_dir(self):
        """
        Test processing a directory containing metadata blobs
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel = self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertFalse(channel.contents_list)
        self.mds.process_channel_dir(self.CHANNEL_DIR, channel.public_key)
        self.assertEqual(len(channel.contents_list), 3)
        self.assertEqual(channel.local_version, 3)
示例#39
0
class TestUpgradeDB72ToPony(TriblerCoreTest):
    @inlineCallbacks
    def setUp(self):
        yield super(TestUpgradeDB72ToPony, self).setUp()

        self.my_key = default_eccrypto.generate_key(u"curve25519")
        mds_db = os.path.join(self.session_base_dir, 'test.db')
        mds_channels_dir = self.session_base_dir

        self.mds = MetadataStore(mds_db, mds_channels_dir, self.my_key)
        self.m = DispersyToPonyMigration(OLD_DB_SAMPLE)
        self.m.initialize(self.mds)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestUpgradeDB72ToPony, self).tearDown()

    def test_get_personal_channel_title(self):
        self.assertTrue(self.m.personal_channel_title)

    def test_get_old_torrents_count(self):
        self.assertEqual(self.m.get_old_torrents_count(), 19)

    def test_get_personal_torrents_count(self):
        self.assertEqual(self.m.get_personal_channel_torrents_count(), 2)

    def test_convert_personal_channel(self):
        def check_channel():
            self.m.convert_personal_channel()
            my_channel = self.mds.ChannelMetadata.get_my_channel()
            self.assertEqual(len(my_channel.contents_list), 2)
            self.assertEqual(my_channel.num_entries, 2)
            for t in my_channel.contents_list:
                self.assertTrue(t.has_valid_signature())
            self.assertTrue(my_channel.has_valid_signature())
            self.assertEqual(self.m.personal_channel_title[:200], my_channel.title)

        check_channel()

        # Now check the case where previous conversion of the personal channel had failed
        with db_session:
            self.mds.MiscData.get_for_update(name=CONVERSION_FROM_72_PERSONAL).value = CONVERSION_STARTED
        check_channel()

    @db_session
    def test_convert_all_channels(self):
        def check_conversion():
            self.m.convert_discovered_torrents()
            self.m.convert_discovered_channels()
            chans = self.mds.ChannelMetadata.get_entries()

            self.assertEqual(len(chans[0]), 2)
            for c in chans[0]:
                self.assertNotEqual(self.m.personal_channel_title[:200], c.title[:200])
                self.assertEqual(c.status, LEGACY_ENTRY)
                self.assertTrue(c.contents_list)
                for t in c.contents_list:
                    self.assertEqual(t.status, LEGACY_ENTRY)
        check_conversion()

        # Now check the case where the previous conversion failed at channels conversion
        with db_session:
            self.mds.MiscData.get_for_update(name=CONVERSION_FROM_72_CHANNELS).value = CONVERSION_STARTED
        check_conversion()

        # Now check the case where the previous conversion stopped at torrents conversion
        with db_session:
            self.mds.MiscData.get_for_update(name=CONVERSION_FROM_72_CHANNELS).delete()
            self.mds.MiscData.get_for_update(name=CONVERSION_FROM_72_DISCOVERED).value = CONVERSION_STARTED
            for d in self.mds.TorrentMetadata.select()[:10][:10]:
                d.delete()
        check_conversion()

    @db_session
    def test_update_trackers(self):
        tr = self.mds.TrackerState(url="http://ipv6.torrent.ubuntu.com:6969/announce")
        self.m.update_trackers_info()
        self.assertEqual(tr.failures, 2)
        self.assertEqual(tr.alive, True)
        self.assertEqual(tr.last_check, 1548776649)
示例#40
0
 def setUp(self):
     yield super(TestTorrentMetadata, self).setUp()
     self.my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(':memory:', self.session_base_dir,
                              self.my_key)
示例#41
0
class TestMetadata(TriblerCoreTest):
    """
    Contains various tests for the ChannelNode type.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(TestMetadata, self).setUp()
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(':memory:', self.session_base_dir,
                                 self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestMetadata, self).tearDown()

    @db_session
    def test_to_dict(self):
        """
        Test whether converting metadata to a dictionary works
        """
        metadata = self.mds.ChannelNode.from_dict({})
        self.assertTrue(metadata.to_dict())

    @db_session
    def test_serialization(self):
        """
        Test converting metadata to serialized data and back
        """
        metadata1 = self.mds.ChannelNode.from_dict({})
        serialized1 = metadata1.serialized()
        metadata1.delete()
        orm.flush()

        metadata2 = self.mds.ChannelNode.from_payload(
            ChannelNodePayload.from_signed_blob(serialized1))
        serialized2 = metadata2.serialized()
        self.assertEqual(serialized1, serialized2)

        # Test no signature exception
        metadata2_dict = metadata2.to_dict()
        metadata2_dict.pop("signature")
        self.assertRaises(InvalidSignatureException, ChannelNodePayload,
                          **metadata2_dict)

        serialized3 = serialized2[:-5] + "\xee" * 5
        self.assertRaises(InvalidSignatureException,
                          ChannelNodePayload.from_signed_blob, serialized3)
        # Test bypass signature check
        ChannelNodePayload.from_signed_blob(serialized3, check_signature=False)

    @db_session
    def test_key_mismatch_exception(self):
        mismatched_key = default_eccrypto.generate_key(u"curve25519")
        metadata = self.mds.ChannelNode.from_dict({})
        self.assertRaises(KeysMismatchException,
                          metadata.serialized,
                          key=mismatched_key)

    @db_session
    def test_to_file(self):
        """
        Test writing metadata to a file
        """
        metadata = self.mds.ChannelNode.from_dict({})
        file_path = os.path.join(self.session_base_dir, 'metadata.file')
        metadata.to_file(file_path)
        self.assertTrue(os.path.exists(file_path))

    @db_session
    def test_has_valid_signature(self):
        """
        Test whether a signature can be validated correctly
        """
        metadata = self.mds.ChannelNode.from_dict({})
        self.assertTrue(metadata.has_valid_signature())

        md_dict = metadata.to_dict()

        # Mess with the signature
        metadata.signature = 'a'
        self.assertFalse(metadata.has_valid_signature())

        # Create metadata with wrong key
        metadata.delete()
        md_dict.update(public_key=database_blob("aaa"))
        md_dict.pop("rowid")

        metadata = self.mds.ChannelNode(skip_key_check=True, **md_dict)
        self.assertFalse(metadata.has_valid_signature())

        key = default_eccrypto.generate_key(u"curve25519")
        metadata2 = self.mds.ChannelNode(sign_with=key, **md_dict)
        self.assertTrue(database_blob(key.pub().key_to_bin()[10:]),
                        metadata2.public_key)
        md_dict2 = metadata2.to_dict()
        md_dict2["signature"] = md_dict["signature"]
        self.assertRaises(InvalidSignatureException, self.mds.ChannelNode,
                          **md_dict2)

    @db_session
    def test_from_payload(self):
        """
        Test converting a metadata payload to a metadata object
        """
        metadata = self.mds.ChannelNode.from_dict({})
        metadata_dict = metadata.to_dict()
        metadata.delete()
        orm.flush()
        metadata_payload = ChannelNodePayload(**metadata_dict)
        self.assertTrue(self.mds.ChannelNode.from_payload(metadata_payload))
示例#42
0
 def setUp(self):
     yield super(TestMetadataStore, self).setUp()
     my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(":memory:", self.session_base_dir, my_key)
示例#43
0
class TestChannelMetadata(TriblerCoreTest):
    """
    Contains various tests for the channel metadata type.
    """
    DATA_DIR = os.path.join(
        os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..',
        '..', 'data')
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel',
                                    'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestChannelMetadata, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(":memory:", self.session_base_dir,
                                 self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestChannelMetadata, self).tearDown()

    @staticmethod
    def get_sample_torrent_dict(my_key):
        """
        Utility method to return a dictionary with torrent information.
        """
        return {
            "infohash": database_blob("1" * 20),
            "size": 123,
            "torrent_date": datetime.utcnow(),
            "tags": "bla",
            "id_": 123,
            "public_key": database_blob(my_key.pub().key_to_bin()[10:]),
            "title": "lalala"
        }

    @staticmethod
    def get_sample_channel_dict(my_key):
        """
        Utility method to return a dictionary with a channel information.
        """
        return dict(TestChannelMetadata.get_sample_torrent_dict(my_key),
                    votes=222,
                    subscribed=False,
                    timestamp=1)

    @db_session
    def test_serialization(self):
        """
        Test converting channel metadata to serialized data
        """
        channel_metadata = self.mds.ChannelMetadata.from_dict(
            {"infohash": str(random.getrandbits(160))})
        self.assertTrue(channel_metadata.serialized())

    @db_session
    def test_list_contents(self):
        """
        Test whether a correct list with channel content is returned from the database
        """
        self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low')
        channel1 = self.mds.ChannelMetadata(
            infohash=str(random.getrandbits(160)))
        self.mds.TorrentMetadata.from_dict(dict(self.torrent_template))

        self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low')
        channel2 = self.mds.ChannelMetadata(
            infohash=str(random.getrandbits(160)))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, infohash="1"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, infohash="2"))

        self.assertEqual(1, len(channel1.contents_list))
        self.assertEqual(2, len(channel2.contents_list))
        self.assertEqual(2, channel2.contents_len)

    @db_session
    def test_create_channel(self):
        """
        Test whether creating a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')

        self.assertTrue(channel_metadata)
        self.assertRaises(DuplicateChannelIdError,
                          self.mds.ChannelMetadata.create_channel, 'test',
                          'test')

    @db_session
    def test_update_metadata(self):
        """
        Test whether metadata is correctly updated and signed
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(
            self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(
            sample_channel_dict)
        self.mds.TorrentMetadata.from_dict(self.torrent_template)
        update_dict = {"id_": 222, "tags": "eee", "title": "qqq"}
        channel_metadata.update_metadata(update_dict=update_dict)
        self.assertDictContainsSubset(update_dict, channel_metadata.to_dict())

    @db_session
    def test_process_channel_metadata_payload(self):
        """
        Test whether a channel metadata payload is processed correctly
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(
            payload)
        self.assertTrue(channel_metadata)

        # Check that we do not add it again
        self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

        # Check that we always take the latest version
        channel_metadata.timestamp -= 1
        self.assertEqual(channel_metadata.timestamp, 1551110113006)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(
            payload)
        self.assertEqual(channel_metadata.timestamp, 1551110113007)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

    @db_session
    def test_get_dirname(self):
        """
        Test whether the correct directory name is returned for channel metadata
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(
            self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(
            sample_channel_dict)

        self.assertEqual(len(channel_metadata.dir_name),
                         CHANNEL_DIR_NAME_LENGTH)

    @db_session
    def test_get_channel_with_dirname(self):
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(
            self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(
            sample_channel_dict)
        dirname = channel_metadata.dir_name
        channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(
            dirname)
        self.assertEqual(channel_metadata, channel_result)

        # Test for corner-case of channel PK starting with zeroes
        channel_metadata.public_key = database_blob(unhexlify('0' * 128))
        channel_result = self.mds.ChannelMetadata.get_channel_with_dirname(
            channel_metadata.dir_name)
        self.assertEqual(channel_metadata, channel_result)

    @db_session
    def test_get_channel_with_id(self):
        """
        Test retrieving a channel with a specific ID
        """
        self.assertIsNone(
            self.mds.ChannelMetadata.get_channel_with_id('a' * 20))
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        self.assertIsNotNone(
            self.mds.ChannelMetadata.get_channel_with_id(
                channel_metadata.public_key))

    @db_session
    def test_add_metadata_to_channel(self):
        """
        Test whether adding new torrents to a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        original_channel = channel_metadata.to_dict()
        md = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, status=NEW))
        channel_metadata.commit_channel_torrent()

        self.assertEqual(channel_metadata.id_, ROOT_CHANNEL_ID)
        self.assertLess(original_channel["timestamp"],
                        channel_metadata.timestamp)
        self.assertLess(md.timestamp, channel_metadata.timestamp)
        self.assertEqual(channel_metadata.num_entries, 1)

    @db_session
    def test_add_torrent_to_channel(self):
        """
        Test adding a torrent to your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        channel_metadata.add_torrent_to_channel(tdef,
                                                {'description': 'blabla'})
        self.assertTrue(channel_metadata.contents_list)
        self.assertRaises(DuplicateTorrentFileError,
                          channel_metadata.add_torrent_to_channel, tdef, None)

    @db_session
    def test_restore_torrent_in_channel(self):
        """
        Test if the torrent scheduled for deletion is restored/updated after the user tries to re-add it.
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        md = channel_metadata.add_torrent_to_channel(tdef, None)

        # Check correct re-add
        md.status = TODELETE
        md_updated = channel_metadata.add_torrent_to_channel(tdef, None)
        self.assertEqual(md.status, COMMITTED)
        self.assertEqual(md_updated, md)
        self.assertTrue(md.has_valid_signature)

        # Check update of torrent properties from a new tdef
        md.status = TODELETE
        new_tracker_address = u'http://tribler.org/announce'
        tdef.torrent_parameters['announce'] = new_tracker_address
        md_updated = channel_metadata.add_torrent_to_channel(tdef, None)
        self.assertEqual(md_updated, md)
        self.assertEqual(md.status, NEW)
        self.assertEqual(md.tracker_info, new_tracker_address)
        self.assertTrue(md.has_valid_signature)
        # In addition, check that the trackers table was properly updated
        self.assertEqual(len(md.health.trackers), 2)

    @db_session
    def test_delete_torrent_from_channel(self):
        """
        Test deleting a torrent from your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # Check that nothing is committed when deleting uncommited torrent metadata
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.delete_torrent(tdef.get_infohash())
        self.assertEqual(0, len(channel_metadata.contents_list))

        # Check append-only deletion process
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.commit_channel_torrent()
        self.assertEqual(1, len(channel_metadata.contents_list))
        channel_metadata.delete_torrent(tdef.get_infohash())
        channel_metadata.commit_channel_torrent()
        self.assertEqual(0, len(channel_metadata.contents_list))

    @db_session
    def test_commit_channel_torrent(self):
        channel = self.mds.ChannelMetadata.create_channel('test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        channel.add_torrent_to_channel(tdef, None)
        # The first run should return the infohash, the second should return None, because nothing was really done
        self.assertTrue(channel.commit_channel_torrent())
        self.assertFalse(channel.commit_channel_torrent())

    @db_session
    def test_consolidate_channel_torrent(self):
        """
        Test completely re-commit your channel
        """
        channel = self.mds.ChannelMetadata.create_channel('test', 'test')
        my_dir = os.path.abspath(
            os.path.join(self.mds.channels_dir, channel.dir_name))
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # 1st torrent
        channel.add_torrent_to_channel(tdef, None)
        channel.commit_channel_torrent()

        # 2nd torrent
        md = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template,
                 public_key=channel.public_key,
                 status=NEW))
        channel.commit_channel_torrent()

        # Delete entry
        channel.delete_torrent(tdef.get_infohash())
        channel.commit_channel_torrent()

        self.assertEqual(1, len(channel.contents_list))
        self.assertEqual(3, len(os.listdir(my_dir)))
        channel.consolidate_channel_torrent()
        self.assertEqual(1, len(os.listdir(my_dir)))

    def test_mdblob_dont_fit_exception(self):
        with db_session:
            md_list = [
                self.mds.TorrentMetadata(title='test' + str(x),
                                         infohash=str(random.getrandbits(160)))
                for x in xrange(0, 1)
            ]
        self.assertRaises(Exception, entries_to_chunk, md_list, chunk_size=1)

    @db_session
    def test_get_channels(self):
        """
        Test whether we can get channels
        """

        # First we create a few channels
        for ind in xrange(10):
            self.mds.ChannelNode._my_key = default_eccrypto.generate_key('low')
            _ = self.mds.ChannelMetadata(title='channel%d' % ind,
                                         subscribed=(ind % 2 == 0),
                                         infohash=str(random.getrandbits(160)))
        channels = self.mds.ChannelMetadata.get_entries(first=1, last=5)
        self.assertEqual(len(channels[0]), 5)
        self.assertEqual(channels[1], 10)

        # Test filtering
        channels = self.mds.ChannelMetadata.get_entries(
            first=1, last=5, query_filter='channel5')
        self.assertEqual(len(channels[0]), 1)

        # Test sorting
        channels = self.mds.ChannelMetadata.get_entries(first=1,
                                                        last=10,
                                                        sort_by='title',
                                                        sort_asc=False)
        self.assertEqual(len(channels[0]), 10)
        self.assertEqual(channels[0][0].title, 'channel9')

        # Test fetching subscribed channels
        channels = self.mds.ChannelMetadata.get_entries(first=1,
                                                        last=10,
                                                        sort_by='title',
                                                        subscribed=True)
        self.assertEqual(len(channels[0]), 5)

    @db_session
    def test_get_channel_name(self):
        infohash = "\x00" * 20
        title = "testchan"
        chan = self.mds.ChannelMetadata(title=title,
                                        infohash=database_blob(infohash))
        dirname = chan.dir_name

        self.assertEqual(
            title,
            self.mds.ChannelMetadata.get_channel_name(dirname, infohash))
        chan.infohash = "\x11" * 20
        self.assertEqual(
            "OLD:" + title,
            self.mds.ChannelMetadata.get_channel_name(dirname, infohash))
        chan.delete()
        self.assertEqual(
            dirname,
            self.mds.ChannelMetadata.get_channel_name(dirname, infohash))

    @db_session
    def check_add(self, torrents_in_dir, errors, recursive):
        TEST_TORRENTS_DIR = os.path.join(
            os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..',
            '..', '..', 'data', 'linux_torrents')
        chan = self.mds.ChannelMetadata.create_channel(title='testchan')
        torrents, e = chan.add_torrents_from_dir(TEST_TORRENTS_DIR, recursive)
        self.assertEqual(torrents_in_dir, len(torrents))
        self.assertEqual(errors, len(e))
        with db_session:
            q = self.mds.TorrentMetadata.select(
                lambda g: g.metadata_type == REGULAR_TORRENT)
            self.assertEqual(torrents_in_dir - len(e), q.count())

    def test_add_torrents_from_dir(self):
        self.check_add(9, 0, recursive=False)

    def test_add_torrents_from_dir_recursive(self):
        self.check_add(11, 1, recursive=True)
示例#44
0
class TriblerLaunchMany(TaskManager):

    def __init__(self):
        """ Called only once (unless we have multiple Sessions) by MainThread """
        super(TriblerLaunchMany, self).__init__()

        self.initComplete = False
        self.registered = False
        self.dispersy = None
        self.ipv8 = None
        self.ipv8_start_time = 0
        self.state_cb_count = 0
        self.previous_active_downloads = []
        self.download_states_lc = None
        self.get_peer_list = []

        self._logger = logging.getLogger(self.__class__.__name__)

        self.downloads = {}
        self.upnp_ports = []

        self.session = None
        self.session_lock = None
        self.sessdoneflag = Event()

        self.shutdownstarttime = None

        # modules
        self.torrent_store = None
        self.metadata_store = None
        self.rtorrent_handler = None
        self.tftp_handler = None
        self.api_manager = None
        self.watch_folder = None
        self.version_check_manager = None
        self.resource_monitor = None

        self.category = None
        self.peer_db = None
        self.torrent_db = None
        self.mypref_db = None
        self.votecast_db = None
        self.channelcast_db = None

        self.search_manager = None
        self.channel_manager = None

        self.video_server = None

        self.mainline_dht = None
        self.ltmgr = None
        self.tracker_manager = None
        self.torrent_checker = None
        self.tunnel_community = None
        self.trustchain_community = None
        self.wallets = {}
        self.popularity_community = None

        self.startup_deferred = Deferred()

        self.credit_mining_manager = None
        self.market_community = None
        self.dht_community = None
        self.payout_manager = None
        self.mds = None

    def register(self, session, session_lock):
        assert isInIOThread()
        if not self.registered:
            self.registered = True

            self.session = session
            self.session_lock = session_lock

            # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root
            # certificates provided by the system trust store.
            if sys.platform == 'darwin':
                os.environ['SSL_CERT_FILE'] = os.path.join(get_lib_path(), 'root_certs_mac.pem')

            if self.session.config.get_torrent_store_enabled():
                from Tribler.Core.leveldbstore import LevelDbStore
                self.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir())
                if not self.torrent_store.get_db():
                    raise RuntimeError("Torrent store (leveldb) is None which should not normally happen")

            if self.session.config.get_metadata_enabled():
                from Tribler.Core.leveldbstore import LevelDbStore
                self.metadata_store = LevelDbStore(self.session.config.get_metadata_store_dir())
                if not self.metadata_store.get_db():
                    raise RuntimeError("Metadata store (leveldb) is None which should not normally happen")

            # torrent collecting: RemoteTorrentHandler
            if self.session.config.get_torrent_collecting_enabled() and self.session.config.get_dispersy_enabled():
                from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler
                self.rtorrent_handler = RemoteTorrentHandler(self.session)

            # TODO(emilon): move this to a megacache component or smth
            if self.session.config.get_megacache_enabled():
                from Tribler.Core.CacheDB.SqliteCacheDBHandler import (PeerDBHandler, TorrentDBHandler,
                                                                       MyPreferenceDBHandler, VoteCastDBHandler,
                                                                       ChannelCastDBHandler)
                from Tribler.Core.Category.Category import Category

                self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir())

                self.category = Category()

                # create DBHandlers
                self.peer_db = PeerDBHandler(self.session)
                self.torrent_db = TorrentDBHandler(self.session)
                self.mypref_db = MyPreferenceDBHandler(self.session)
                self.votecast_db = VoteCastDBHandler(self.session)
                self.channelcast_db = ChannelCastDBHandler(self.session)

                # initializes DBHandlers
                self.peer_db.initialize()
                self.torrent_db.initialize()
                self.mypref_db.initialize()
                self.votecast_db.initialize()
                self.channelcast_db.initialize()

                from Tribler.Core.Modules.tracker_manager import TrackerManager
                self.tracker_manager = TrackerManager(self.session)

            if self.session.config.get_video_server_enabled():
                self.video_server = VideoServer(self.session.config.get_video_server_port(), self.session)
                self.video_server.start()

            # IPv8
            if self.session.config.get_ipv8_enabled():
                from Tribler.pyipv8.ipv8.configuration import get_default_configuration
                ipv8_config = get_default_configuration()
                ipv8_config['port'] = self.session.config.get_dispersy_port()
                ipv8_config['address'] = self.session.config.get_ipv8_address()
                ipv8_config['overlays'] = []
                ipv8_config['keys'] = []  # We load the keys ourselves

                if self.session.config.get_ipv8_bootstrap_override():
                    import Tribler.pyipv8.ipv8.deprecated.community as community_file
                    community_file._DEFAULT_ADDRESSES = [self.session.config.get_ipv8_bootstrap_override()]
                    community_file._DNS_ADDRESSES = []

                self.ipv8 = IPv8(ipv8_config, enable_statistics=self.session.config.get_ipv8_statistics())

                self.session.config.set_anon_proxy_settings(2, ("127.0.0.1",
                                                                self.session.
                                                                config.get_tunnel_community_socks5_listen_ports()))
            # Dispersy
            self.tftp_handler = None
            if self.session.config.get_dispersy_enabled():
                from Tribler.dispersy.dispersy import Dispersy
                from Tribler.dispersy.endpoint import MIMEndpoint
                from Tribler.dispersy.endpoint import IPv8toDispersyAdapter

                # set communication endpoint
                if self.session.config.get_ipv8_enabled():
                    dispersy_endpoint = IPv8toDispersyAdapter(self.ipv8.endpoint)
                else:
                    dispersy_endpoint = MIMEndpoint(self.session.config.get_dispersy_port())

                working_directory = unicode(self.session.config.get_state_dir())
                self.dispersy = Dispersy(dispersy_endpoint, working_directory)
                self.dispersy.statistics.enable_debug_statistics(False)

                # register TFTP service
                from Tribler.Core.TFTP.handler import TftpHandler
                self.tftp_handler = TftpHandler(self.session, dispersy_endpoint, "fffffffd".decode('hex'),
                                                block_size=1024)
                self.tftp_handler.initialize()

            # Torrent search
            if self.session.config.get_torrent_search_enabled() or self.session.config.get_channel_search_enabled():
                self.search_manager = SearchManager(self.session)
                self.search_manager.initialize()

        if not self.initComplete:
            self.init()

        self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED])
        self.session.notifier.notify(NTFY_TRIBLER, NTFY_STARTED, None)
        return self.startup_deferred

    def on_tribler_started(self, subject, changetype, objectID, *args):
        reactor.callFromThread(self.startup_deferred.callback, None)

    def load_ipv8_overlays(self):
        # Discovery Community
        with open(self.session.config.get_permid_keypair_filename(), 'r') as key_file:
            content = key_file.read()
        content = content[31:-30].replace('\n', '').decode("BASE64")
        peer = Peer(M2CryptoSK(keystring=content))
        discovery_community = DiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network)
        discovery_community.resolve_dns_bootstrap_addresses()
        self.ipv8.overlays.append(discovery_community)
        self.ipv8.strategies.append((RandomChurn(discovery_community), -1))

        if not self.session.config.get_dispersy_enabled():
            self.ipv8.strategies.append((RandomWalk(discovery_community), 20))

        if self.session.config.get_testnet():
            peer = Peer(self.session.trustchain_testnet_keypair)
        else:
            peer = Peer(self.session.trustchain_keypair)

        # TrustChain Community
        if self.session.config.get_trustchain_enabled():
            from Tribler.pyipv8.ipv8.attestation.trustchain.community import TrustChainCommunity, \
                TrustChainTestnetCommunity

            community_cls = TrustChainTestnetCommunity if self.session.config.get_testnet() else TrustChainCommunity
            self.trustchain_community = community_cls(peer, self.ipv8.endpoint,
                                                      self.ipv8.network,
                                                      working_directory=self.session.config.get_state_dir())
            self.ipv8.overlays.append(self.trustchain_community)
            self.ipv8.strategies.append((EdgeWalk(self.trustchain_community), 20))

            tc_wallet = TrustchainWallet(self.trustchain_community)
            self.wallets[tc_wallet.get_identifier()] = tc_wallet

        # DHT Community
        if self.session.config.get_dht_enabled():
            from Tribler.pyipv8.ipv8.dht.discovery import DHTDiscoveryCommunity

            self.dht_community = DHTDiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network)
            self.ipv8.overlays.append(self.dht_community)
            self.ipv8.strategies.append((RandomWalk(self.dht_community), 20))

        # Tunnel Community
        if self.session.config.get_tunnel_community_enabled():

            from Tribler.community.triblertunnel.community import TriblerTunnelCommunity, TriblerTunnelTestnetCommunity
            community_cls = TriblerTunnelTestnetCommunity if self.session.config.get_testnet() else \
                TriblerTunnelCommunity

            if self.mainline_dht:
                dht_provider = MainlineDHTProvider(self.mainline_dht, self.session.config.get_dispersy_port())
            else:
                dht_provider = DHTCommunityProvider(self.dht_community, self.session.config.get_dispersy_port())

            self.tunnel_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network,
                                                  tribler_session=self.session,
                                                  dht_provider=dht_provider,
                                                  bandwidth_wallet=self.wallets["MB"])
            self.ipv8.overlays.append(self.tunnel_community)
            self.ipv8.strategies.append((RandomWalk(self.tunnel_community), 20))

        # Market Community
        if self.session.config.get_market_community_enabled() and self.session.config.get_dht_enabled():
            from Tribler.community.market.community import MarketCommunity, MarketTestnetCommunity

            community_cls = MarketTestnetCommunity if self.session.config.get_testnet() else MarketCommunity
            self.market_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network,
                                                  tribler_session=self.session,
                                                  trustchain=self.trustchain_community,
                                                  dht=self.dht_community,
                                                  wallets=self.wallets,
                                                  working_directory=self.session.config.get_state_dir())

            self.ipv8.overlays.append(self.market_community)

            self.ipv8.strategies.append((RandomWalk(self.market_community), 20))

        # Popular Community
        if self.session.config.get_popularity_community_enabled():
            from Tribler.community.popularity.community import PopularityCommunity

            self.popularity_community = PopularityCommunity(peer, self.ipv8.endpoint, self.ipv8.network,
                                                            torrent_db=self.session.lm.torrent_db, session=self.session)

            self.ipv8.overlays.append(self.popularity_community)

            self.ipv8.strategies.append((RandomWalk(self.popularity_community), 20))

            self.popularity_community.start()

    def enable_ipv8_statistics(self):
        if self.session.config.get_ipv8_statistics():
            for overlay in self.ipv8.overlays:
                self.ipv8.endpoint.enable_community_statistics(overlay.get_prefix(), True)

    def load_dispersy_communities(self):
        self._logger.info("tribler: Preparing Dispersy communities...")
        now_time = timemod.time()
        default_kwargs = {'tribler_session': self.session}

        # Search Community
        if self.session.config.get_torrent_search_enabled() and self.dispersy:
            from Tribler.community.search.community import SearchCommunity
            self.dispersy.define_auto_load(SearchCommunity, self.session.dispersy_member, load=True,
                                           kargs=default_kwargs)

        # AllChannel Community
        if self.session.config.get_channel_search_enabled() and self.dispersy:
            from Tribler.community.allchannel.community import AllChannelCommunity
            self.dispersy.define_auto_load(AllChannelCommunity, self.session.dispersy_member, load=True,
                                           kargs=default_kwargs)

        # Channel Community
        if self.session.config.get_channel_community_enabled() and self.dispersy:
            from Tribler.community.channel.community import ChannelCommunity
            self.dispersy.define_auto_load(ChannelCommunity,
                                           self.session.dispersy_member, load=True, kargs=default_kwargs)

        # PreviewChannel Community
        if self.session.config.get_preview_channel_community_enabled() and self.dispersy:
            from Tribler.community.channel.preview import PreviewChannelCommunity
            self.dispersy.define_auto_load(PreviewChannelCommunity,
                                           self.session.dispersy_member, kargs=default_kwargs)

        self._logger.info("tribler: communities are ready in %.2f seconds", timemod.time() - now_time)

    def init(self):
        if self.dispersy:
            from Tribler.dispersy.community import HardKilledCommunity

            self._logger.info("lmc: Starting Dispersy...")

            self.session.readable_status = STATE_STARTING_DISPERSY
            now = timemod.time()
            success = self.dispersy.start(self.session.autoload_discovery)

            diff = timemod.time() - now
            if success:
                self._logger.info("lmc: Dispersy started successfully in %.2f seconds [port: %d]",
                                  diff, self.dispersy.wan_address[1])
            else:
                self._logger.info("lmc: Dispersy failed to start in %.2f seconds", diff)

            self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP'))

            from Tribler.dispersy.crypto import M2CryptoSK
            private_key = self.dispersy.crypto.key_to_bin(
                M2CryptoSK(filename=self.session.config.get_permid_keypair_filename()))
            self.session.dispersy_member = blockingCallFromThread(reactor, self.dispersy.get_member,
                                                                  private_key=private_key)

            blockingCallFromThread(reactor, self.dispersy.define_auto_load, HardKilledCommunity,
                                   self.session.dispersy_member, load=True)

            if self.session.config.get_megacache_enabled():
                self.dispersy.database.attach_commit_callback(self.session.sqlite_db.commit_now)

            # notify dispersy finished loading
            self.session.notifier.notify(NTFY_DISPERSY, NTFY_STARTED, None)

            self.session.readable_status = STATE_LOADING_COMMUNITIES

        # We should load the mainline DHT before loading the IPv8 overlays since the DHT is used for the tunnel overlay.
        if self.session.config.get_mainline_dht_enabled():
            self.session.readable_status = STATE_START_MAINLINE_DHT
            from Tribler.Core.DecentralizedTracking import mainlineDHT
            self.mainline_dht = mainlineDHT.init(('127.0.0.1', self.session.config.get_mainline_dht_port()),
                                                 self.session.config.get_state_dir())
            self.upnp_ports.append((self.session.config.get_mainline_dht_port(), 'UDP'))

        # Wallets
        if self.session.config.get_bitcoinlib_enabled():
            try:
                from Tribler.Core.Modules.wallet.btc_wallet import BitcoinWallet, BitcoinTestnetWallet
                wallet_path = os.path.join(self.session.config.get_state_dir(), 'wallet')
                btc_wallet = BitcoinWallet(wallet_path)
                btc_testnet_wallet = BitcoinTestnetWallet(wallet_path)
                self.wallets[btc_wallet.get_identifier()] = btc_wallet
                self.wallets[btc_testnet_wallet.get_identifier()] = btc_testnet_wallet
            except ImportError:
                self._logger.error("bitcoinlib library cannot be found, Bitcoin wallet not available!")

        if self.session.config.get_dummy_wallets_enabled():
            # For debugging purposes, we create dummy wallets
            dummy_wallet1 = DummyWallet1()
            self.wallets[dummy_wallet1.get_identifier()] = dummy_wallet1

            dummy_wallet2 = DummyWallet2()
            self.wallets[dummy_wallet2.get_identifier()] = dummy_wallet2

        if self.ipv8:
            self.ipv8_start_time = time.time()
            self.load_ipv8_overlays()
            self.enable_ipv8_statistics()

        if self.dispersy:
            self.load_dispersy_communities()

        tunnel_community_ports = self.session.config.get_tunnel_community_socks5_listen_ports()
        self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", tunnel_community_ports))

        if self.session.config.get_channel_search_enabled() and self.session.config.get_dispersy_enabled():
            self.session.readable_status = STATE_INITIALIZE_CHANNEL_MGR
            from Tribler.Core.Modules.channel.channel_manager import ChannelManager
            self.channel_manager = ChannelManager(self.session)
            self.channel_manager.initialize()

        if self.session.config.get_libtorrent_enabled():
            self.session.readable_status = STATE_START_LIBTORRENT
            from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr
            self.ltmgr = LibtorrentMgr(self.session)
            self.ltmgr.initialize()
            for port, protocol in self.upnp_ports:
                self.ltmgr.add_upnp_mapping(port, protocol)

        # add task for tracker checking
        if self.session.config.get_torrent_checking_enabled():
            self.session.readable_status = STATE_START_TORRENT_CHECKER
            self.torrent_checker = TorrentChecker(self.session)
            self.torrent_checker.initialize()

        if self.rtorrent_handler and self.session.config.get_dispersy_enabled():
            self.session.readable_status = STATE_START_REMOTE_TORRENT_HANDLER
            self.rtorrent_handler.initialize()

        if self.api_manager:
            self.session.readable_status = STATE_START_API_ENDPOINTS
            self.api_manager.root_endpoint.start_endpoints()

        if self.session.config.get_watch_folder_enabled():
            self.session.readable_status = STATE_START_WATCH_FOLDER
            self.watch_folder = WatchFolder(self.session)
            self.watch_folder.start()

        if self.session.config.get_credit_mining_enabled():
            self.session.readable_status = STATE_START_CREDIT_MINING
            from Tribler.Core.CreditMining.CreditMiningManager import CreditMiningManager
            self.credit_mining_manager = CreditMiningManager(self.session)

        if self.session.config.get_resource_monitor_enabled():
            self.resource_monitor = ResourceMonitor(self.session)
            self.resource_monitor.start()

        if self.session.config.get_version_checker_enabled():
            self.version_check_manager = VersionCheckManager(self.session)
            self.version_check_manager.start()

        if self.session.config.get_chant_enabled():
            channels_dir = os.path.join(self.session.config.get_chant_channels_dir())
            database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db')
            self.mds = MetadataStore(database_path, channels_dir, self.session.trustchain_keypair)

        self.session.set_download_states_callback(self.sesscb_states_callback)

        if self.session.config.get_ipv8_enabled() and self.session.config.get_trustchain_enabled():
            self.payout_manager = PayoutManager(self.trustchain_community, self.dht_community)

        self.initComplete = True

    def on_channel_download_finished(self, download, channel_id, finished_deferred=None):
        if download.get_channel_download():
            channel_dirname = os.path.join(self.session.lm.mds.channels_dir, download.get_def().get_name())
            self.mds.process_channel_dir(channel_dirname, channel_id)
            if finished_deferred:
                finished_deferred.callback(download)

    @db_session
    def update_channel(self, payload):
        """
        We received some channel metadata, possibly over the network.
        Validate the signature, update the local metadata store and start downloading this channel if needed.
        :param payload: The channel metadata, in serialized form.
        """
        if not payload.has_valid_signature():
            raise InvalidSignatureException("The signature of the channel metadata is invalid.")

        channel = self.mds.ChannelMetadata.get_channel_with_id(payload.public_key)
        if channel:
            if float2time(payload.timestamp) > channel.timestamp:
                # Update the channel that is already there.
                self._logger.info("Updating channel metadata %s ts %s->%s", str(channel.public_key).encode("hex"),
                                  str(channel.timestamp), str(float2time(payload.timestamp)))
                channel.set(**ChannelMetadataPayload.to_dict(payload))
        else:
            # Add new channel object to DB
            channel = self.mds.ChannelMetadata.from_payload(payload)
            channel.subscribed = True

        if channel.version > channel.local_version:
            self._logger.info("Downloading new channel version %s ver %i->%i", str(channel.public_key).encode("hex"),
                              channel.local_version, channel.version)
        #TODO: handle the case where the local version is the same as the new one and is not seeded
        return self.download_channel(channel)

    def download_channel(self, channel):
        """
        Download a channel with a given infohash and title.
        :param channel: The channel metadata ORM object.
        """
        finished_deferred = Deferred()

        dcfg = DownloadStartupConfig()
        dcfg.set_dest_dir(self.mds.channels_dir)
        dcfg.set_channel_download(True)
        tdef = TorrentDefNoMetainfo(infohash=str(channel.infohash), name=channel.title)
        download = self.session.start_download_from_tdef(tdef, dcfg)
        channel_id = channel.public_key
        download.finished_callback = lambda dl: self.on_channel_download_finished(dl, channel_id, finished_deferred)
        return download, finished_deferred

    def updated_my_channel(self, new_torrent_path):
        """
        Notify the core that we updated our channel.
        :param new_torrent_path: path to the new torrent file
        """
        # Start the new download
        tdef = TorrentDef.load(new_torrent_path)
        dcfg = DownloadStartupConfig()
        dcfg.set_dest_dir(self.mds.channels_dir)
        dcfg.set_channel_download(True)
        self.add(tdef, dcfg)

    def add(self, tdef, dscfg, pstate=None, setupDelay=0, hidden=False,
            share_mode=False, checkpoint_disabled=False):
        """ Called by any thread """
        d = None
        with self.session_lock:
            if not isinstance(tdef, TorrentDefNoMetainfo) and not tdef.is_finalized():
                raise ValueError("TorrentDef not finalized")

            infohash = tdef.get_infohash()

            # Create the destination directory if it does not exist yet
            try:
                if not os.path.isdir(dscfg.get_dest_dir()):
                    os.makedirs(dscfg.get_dest_dir())
            except OSError:
                self._logger.error("Unable to create the download destination directory.")

            if dscfg.get_time_added() == 0:
                dscfg.set_time_added(int(timemod.time()))

            # Check if running or saved on disk
            if infohash in self.downloads:
                self._logger.info("Torrent already exists in the downloads. Infohash:%s", infohash.encode('hex'))

            from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl
            d = LibtorrentDownloadImpl(self.session, tdef)

            if pstate is None:  # not already resuming
                pstate = self.load_download_pstate_noexc(infohash)
                if pstate is not None:
                    self._logger.debug("tlm: add: pstate is %s %s",
                                       pstate.get('dlstate', 'status'), pstate.get('dlstate', 'progress'))

            # Store in list of Downloads, always.
            self.downloads[infohash] = d
            setup_deferred = d.setup(dscfg, pstate, wrapperDelay=setupDelay,
                                     share_mode=share_mode, checkpoint_disabled=checkpoint_disabled)
            setup_deferred.addCallback(self.on_download_handle_created)

        if d and not hidden and self.session.config.get_megacache_enabled():
            @forceDBThread
            def write_my_pref():
                torrent_id = self.torrent_db.getTorrentID(infohash)
                data = {'destination_path': d.get_dest_dir()}
                self.mypref_db.addMyPreference(torrent_id, data)

            if isinstance(tdef, TorrentDefNoMetainfo):
                self.torrent_db.addOrGetTorrentID(tdef.get_infohash())
                self.torrent_db.updateTorrent(tdef.get_infohash(), name=tdef.get_name_as_unicode())
                self.torrent_db._db.commit_now()
                write_my_pref()
            elif self.rtorrent_handler:
                self.rtorrent_handler.save_torrent(tdef, write_my_pref)
            else:
                self.torrent_db.addExternalTorrent(tdef, extra_info={'status': 'good'})
                write_my_pref()

        return d

    def on_download_handle_created(self, download):
        """
        This method is called when the download handle has been created.
        Immediately checkpoint the download and write the resume data.
        """
        return download.checkpoint()

    def remove(self, d, removecontent=False, removestate=True, hidden=False):
        """ Called by any thread """
        out = None
        with self.session_lock:
            out = d.stop_remove(removestate=removestate, removecontent=removecontent)
            infohash = d.get_def().get_infohash()
            if infohash in self.downloads:
                del self.downloads[infohash]

        if not hidden:
            self.remove_id(infohash)

        if self.tunnel_community:
            self.tunnel_community.on_download_removed(d)

        return out or succeed(None)

    def remove_id(self, infohash):
        @forceDBThread
        def do_db():
            torrent_id = self.torrent_db.getTorrentID(infohash)
            if torrent_id:
                self.mypref_db.deletePreference(torrent_id)

        if self.session.config.get_megacache_enabled():
            do_db()

    def get_downloads(self):
        """ Called by any thread """
        with self.session_lock:
            return self.downloads.values()  # copy, is mutable

    def get_download(self, infohash):
        """ Called by any thread """
        with self.session_lock:
            return self.downloads.get(infohash, None)

    def download_exists(self, infohash):
        with self.session_lock:
            return infohash in self.downloads

    @inlineCallbacks
    def update_download_hops(self, download, new_hops):
        """
        Update the amount of hops for a specified download. This can be done on runtime.
        """
        infohash = binascii.hexlify(download.tdef.get_infohash())
        self._logger.info("Updating the amount of hops of download %s", infohash)
        pstate = download.get_persistent_download_config()
        pstate.set('state', 'engineresumedata', (yield download.save_resume_data()))
        yield self.session.remove_download(download)

        # copy the old download_config and change the hop count
        dscfg = download.copy()
        dscfg.set_hops(new_hops)
        # If the user wants to change the hop count to 0, don't automatically bump this up to 1 anymore
        dscfg.set_safe_seeding(False)

        self.session.start_download_from_tdef(download.tdef, dscfg, pstate=pstate)

    def update_trackers(self, infohash, trackers):
        """ Update the trackers for a download.
        :param infohash: infohash of the torrent that needs to be updated
        :param trackers: A list of tracker urls.
        """
        dl = self.get_download(infohash)
        old_def = dl.get_def() if dl else None

        if old_def:
            old_trackers = old_def.get_trackers_as_single_tuple()
            new_trackers = list(set(trackers) - set(old_trackers))
            all_trackers = list(old_trackers) + new_trackers

            if new_trackers:
                # Add new trackers to the download
                dl.add_trackers(new_trackers)

                # Create a new TorrentDef
                if isinstance(old_def, TorrentDefNoMetainfo):
                    new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link())
                else:
                    metainfo = old_def.get_metainfo()
                    if len(all_trackers) > 1:
                        metainfo["announce-list"] = [all_trackers]
                    else:
                        metainfo["announce"] = all_trackers[0]
                    new_def = TorrentDef.load_from_dict(metainfo)

                # Set TorrentDef + checkpoint
                dl.set_def(new_def)
                dl.checkpoint()

                if isinstance(old_def, TorrentDefNoMetainfo):
                    @forceDBThread
                    def update_trackers_db(infohash, new_trackers):
                        torrent_id = self.torrent_db.getTorrentID(infohash)
                        if torrent_id is not None:
                            self.torrent_db.addTorrentTrackerMappingInBatch(torrent_id, new_trackers)
                            self.session.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash)

                    if self.session.config.get_megacache_enabled():
                        update_trackers_db(infohash, new_trackers)

                elif not isinstance(old_def, TorrentDefNoMetainfo) and self.rtorrent_handler:
                    # Update collected torrents
                    self.rtorrent_handler.save_torrent(new_def)

    #
    # State retrieval
    #
    def stop_download_states_callback(self):
        """
        Stop any download states callback if present.
        """
        if self.is_pending_task_active("download_states_lc"):
            self.cancel_pending_task("download_states_lc")

    def set_download_states_callback(self, user_callback, interval=1.0):
        """
        Set the download state callback. Remove any old callback if it's present.
        """
        self.stop_download_states_callback()
        self._logger.debug("Starting the download state callback with interval %f", interval)
        self.download_states_lc = self.register_task("download_states_lc",
                                                     LoopingCall(self._invoke_states_cb, user_callback))
        self.download_states_lc.start(interval)

    def _invoke_states_cb(self, callback):
        """
        Invoke the download states callback with a list of the download states.
        """
        dslist = []
        for d in self.downloads.values():
            d.set_moreinfo_stats(True in self.get_peer_list or d.get_def().get_infohash() in
                                 self.get_peer_list)
            ds = d.network_get_state(None)
            dslist.append(ds)

        def on_cb_done(new_get_peer_list):
            self.get_peer_list = new_get_peer_list

        return deferToThread(callback, dslist).addCallback(on_cb_done)

    def sesscb_states_callback(self, states_list):
        """
        This method is periodically (every second) called with a list of the download states of the active downloads.
        """
        self.state_cb_count += 1

        # Check to see if a download has finished
        new_active_downloads = []
        do_checkpoint = False
        seeding_download_list = []

        for ds in states_list:
            state = ds.get_status()
            download = ds.get_download()
            tdef = download.get_def()
            safename = tdef.get_name_as_unicode()
            infohash = tdef.get_infohash()

            if state == DLSTATUS_DOWNLOADING:
                new_active_downloads.append(infohash)
            elif state == DLSTATUS_STOPPED_ON_ERROR:
                self._logger.error("Error during download: %s", repr(ds.get_error()))
                if self.download_exists(infohash):
                    self.get_download(infohash).stop()
                    self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, infohash, repr(ds.get_error()))
            elif state == DLSTATUS_SEEDING:
                seeding_download_list.append({u'infohash': infohash,
                                              u'download': download})

                if infohash in self.previous_active_downloads:
                    self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, infohash, safename)
                    do_checkpoint = True
                elif download.get_hops() == 0 and download.get_safe_seeding():
                    # Re-add the download with anonymity enabled
                    hops = self.session.config.get_default_number_hops()
                    self.update_download_hops(download, hops)

            # Check the peers of this download every five seconds and add them to the payout manager when
            # this peer runs a Tribler instance
            if self.state_cb_count % 5 == 0 and download.get_hops() == 0 and self.payout_manager:
                for peer in download.get_peerlist():
                    if peer["extended_version"].startswith('Tribler'):
                        self.payout_manager.update_peer(peer["id"].decode('hex'), infohash, peer["dtotal"])

        self.previous_active_downloads = new_active_downloads
        if do_checkpoint:
            self.session.checkpoint_downloads()

        if self.state_cb_count % 4 == 0:
            if self.tunnel_community:
                self.tunnel_community.monitor_downloads(states_list)
            if self.credit_mining_manager:
                self.credit_mining_manager.monitor_downloads(states_list)

        return []

    #
    # Persistence methods
    #
    def load_checkpoint(self):
        """ Called by any thread """

        def do_load_checkpoint():
            with self.session_lock:
                for i, filename in enumerate(iglob(os.path.join(self.session.get_downloads_pstate_dir(), '*.state'))):
                    self.resume_download(filename, setupDelay=i * 0.1)

        if self.initComplete:
            do_load_checkpoint()
        else:
            self.register_task("load_checkpoint", reactor.callLater(1, do_load_checkpoint))

    def load_download_pstate_noexc(self, infohash):
        """ Called by any thread, assume session_lock already held """
        try:
            basename = binascii.hexlify(infohash) + '.state'
            filename = os.path.join(self.session.get_downloads_pstate_dir(), basename)
            if os.path.exists(filename):
                return self.load_download_pstate(filename)
            else:
                self._logger.info("%s not found", basename)

        except Exception:
            self._logger.exception("Exception while loading pstate: %s", infohash)

    def resume_download(self, filename, setupDelay=0):
        tdef = dscfg = pstate = None

        try:
            pstate = self.load_download_pstate(filename)

            # SWIFTPROC
            metainfo = pstate.get('state', 'metainfo')
            if 'infohash' in metainfo:
                tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None))
            else:
                tdef = TorrentDef.load_from_dict(metainfo)

            if pstate.has_option('download_defaults', 'saveas') and \
                    isinstance(pstate.get('download_defaults', 'saveas'), tuple):
                pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1])

            dscfg = DownloadStartupConfig(pstate)

        except:
            # pstate is invalid or non-existing
            _, file = os.path.split(filename)

            infohash = binascii.unhexlify(file[:-6])

            torrent_data = self.torrent_store.get(infohash)
            if torrent_data:
                try:
                    tdef = TorrentDef.load_from_memory(torrent_data)
                    defaultDLConfig = DefaultDownloadStartupConfig.getInstance()
                    dscfg = defaultDLConfig.copy()

                    if self.mypref_db is not None:
                        dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash)
                        if dest_dir and os.path.isdir(dest_dir):
                            dscfg.set_dest_dir(dest_dir)
                except ValueError:
                    self._logger.warning("tlm: torrent data invalid")

        if pstate is not None:
            has_resume_data = pstate.get('state', 'engineresumedata') is not None
            self._logger.debug("tlm: load_checkpoint: resumedata %s",
                               'len %s ' % len(pstate.get('state', 'engineresumedata')) if has_resume_data else 'None')

        if tdef and dscfg:
            if dscfg.get_dest_dir() != '':  # removed torrent ignoring
                try:
                    if self.download_exists(tdef.get_infohash()):
                        self._logger.info("tlm: not resuming checkpoint because download has already been added")
                    elif dscfg.get_credit_mining() and not self.session.config.get_credit_mining_enabled():
                        self._logger.info("tlm: not resuming checkpoint since token mining is disabled")
                    else:
                        self.add(tdef, dscfg, pstate, setupDelay=setupDelay)
                except Exception as e:
                    self._logger.exception("tlm: load check_point: exception while adding download %s", tdef)
            else:
                self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir())
                os.remove(filename)
        else:
            self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg)

    def checkpoint_downloads(self):
        """
        Checkpoints all running downloads in Tribler.
        Even if the list of Downloads changes in the mean time this is no problem.
        For removals, dllist will still hold a pointer to the download, and additions are no problem
        (just won't be included in list of states returned via callback).
        """
        downloads = self.downloads.values()
        deferred_list = []
        self._logger.debug("tlm: checkpointing %s downloads", len(downloads))
        for download in downloads:
            deferred_list.append(download.checkpoint())

        return DeferredList(deferred_list)

    def shutdown_downloads(self):
        """
        Shutdown all downloads in Tribler.
        """
        for download in self.downloads.values():
            download.stop()

    def remove_pstate(self, infohash):
        def do_remove():
            if not self.download_exists(infohash):
                dlpstatedir = self.session.get_downloads_pstate_dir()

                # Remove checkpoint
                hexinfohash = binascii.hexlify(infohash)
                try:
                    basename = hexinfohash + '.state'
                    filename = os.path.join(dlpstatedir, basename)
                    self._logger.debug("remove pstate: removing dlcheckpoint entry %s", filename)
                    if os.access(filename, os.F_OK):
                        os.remove(filename)
                except:
                    # Show must go on
                    self._logger.exception("Could not remove state")
            else:
                self._logger.warning("remove pstate: download is back, restarted? Canceling removal! %s",
                                      repr(infohash))
        reactor.callFromThread(do_remove)

    @inlineCallbacks
    def early_shutdown(self):
        """ Called as soon as Session shutdown is initiated. Used to start
        shutdown tasks that takes some time and that can run in parallel
        to checkpointing, etc.
        :returns a Deferred that will fire once all dependencies acknowledge they have shutdown.
        """
        self._logger.info("tlm: early_shutdown")

        self.shutdown_task_manager()

        # Note: session_lock not held
        self.shutdownstarttime = timemod.time()
        if self.credit_mining_manager:
            yield self.credit_mining_manager.shutdown()
        self.credit_mining_manager = None

        if self.torrent_checker:
            yield self.torrent_checker.shutdown()
        self.torrent_checker = None

        if self.channel_manager:
            yield self.channel_manager.shutdown()
        self.channel_manager = None

        if self.search_manager:
            yield self.search_manager.shutdown()
        self.search_manager = None

        if self.rtorrent_handler:
            yield self.rtorrent_handler.shutdown()
        self.rtorrent_handler = None

        if self.video_server:
            yield self.video_server.shutdown_server()
        self.video_server = None

        if self.version_check_manager:
            self.version_check_manager.stop()
        self.version_check_manager = None

        if self.resource_monitor:
            self.resource_monitor.stop()
        self.resource_monitor = None

        self.tracker_manager = None

        if self.tftp_handler is not None:
            yield self.tftp_handler.shutdown()
        self.tftp_handler = None

        if self.tunnel_community and self.trustchain_community:
            # We unload these overlays manually since the TrustChain has to be unloaded after the tunnel overlay.
            tunnel_community = self.tunnel_community
            self.tunnel_community = None
            yield self.ipv8.unload_overlay(tunnel_community)
            trustchain_community = self.trustchain_community
            self.trustchain_community = None
            yield self.ipv8.unload_overlay(trustchain_community)

        if self.dispersy:
            self._logger.info("lmc: Shutting down Dispersy...")
            now = timemod.time()
            try:
                success = yield self.dispersy.stop()
            except:
                print_exc()
                success = False

            diff = timemod.time() - now
            if success:
                self._logger.info("lmc: Dispersy successfully shutdown in %.2f seconds", diff)
            else:
                self._logger.info("lmc: Dispersy failed to shutdown in %.2f seconds", diff)

        if self.ipv8:
            yield self.ipv8.stop(stop_reactor=False)

        if self.metadata_store is not None:
            yield self.metadata_store.close()
        self.metadata_store = None

        if self.channelcast_db is not None:
            yield self.channelcast_db.close()
        self.channelcast_db = None

        if self.votecast_db is not None:
            yield self.votecast_db.close()
        self.votecast_db = None

        if self.mypref_db is not None:
            yield self.mypref_db.close()
        self.mypref_db = None

        if self.torrent_db is not None:
            yield self.torrent_db.close()
        self.torrent_db = None

        if self.peer_db is not None:
            yield self.peer_db.close()
        self.peer_db = None

        if self.mainline_dht is not None:
            from Tribler.Core.DecentralizedTracking import mainlineDHT
            yield mainlineDHT.deinit(self.mainline_dht)
        self.mainline_dht = None

        if self.torrent_store is not None:
            yield self.torrent_store.close()
        self.torrent_store = None

        if self.watch_folder is not None:
            yield self.watch_folder.stop()
        self.watch_folder = None

        # We close the API manager as late as possible during shutdown.
        if self.api_manager is not None:
            yield self.api_manager.stop()
        self.api_manager = None

    def network_shutdown(self):
        try:
            self._logger.info("tlm: network_shutdown")

            ts = enumerate_threads()
            self._logger.info("tlm: Number of threads still running %d", len(ts))
            for t in ts:
                self._logger.info("tlm: Thread still running=%s, daemon=%s, instance=%s", t.getName(), t.isDaemon(), t)
        except:
            print_exc()

        # Stop network thread
        self.sessdoneflag.set()

        # Shutdown libtorrent session after checkpoints have been made
        if self.ltmgr is not None:
            self.ltmgr.shutdown()
            self.ltmgr = None

    def save_download_pstate(self, infohash, pstate):
        """ Called by network thread """

        self.downloads[infohash].pstate_for_restart = pstate

        self.register_anonymous_task("save_pstate", self.downloads[infohash].save_resume_data())

    def load_download_pstate(self, filename):
        """ Called by any thread """
        pstate = CallbackConfigParser()
        pstate.read_file(filename)
        return pstate
示例#45
0
class TestTorrentMetadata(TriblerCoreTest):
    """
    Contains various tests for the torrent metadata type.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(TestTorrentMetadata, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(
            os.path.join(self.session_base_dir, 'test.db'),
            self.session_base_dir, self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestTorrentMetadata, self).tearDown()

    @db_session
    def test_serialization(self):
        """
        Test converting torrent metadata to serialized data
        """
        torrent_metadata = self.mds.TorrentMetadata.from_dict({})
        self.assertTrue(torrent_metadata.serialized())

    @db_session
    def test_get_magnet(self):
        """
        Test converting torrent metadata to a magnet link
        """
        torrent_metadata = self.mds.TorrentMetadata.from_dict({})
        self.assertTrue(torrent_metadata.get_magnet())

    @db_session
    def test_search_keyword(self):
        """
        Test searching in a database with some torrent metadata inserted
        """
        torrent1 = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="foo bar 123", tags="video"))
        torrent2 = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="eee 123", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="xoxoxo bar", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="xoxoxo bar", tags="audio"))

        # Search for torrents with the keyword 'foo', it should return one result
        results = self.mds.TorrentMetadata.search_keyword("foo")
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].rowid, torrent1.rowid)

        # Search for torrents with the keyword 'eee', it should return one result
        results = self.mds.TorrentMetadata.search_keyword("eee")
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].rowid, torrent2.rowid)

        # Search for torrents with the keyword '123', it should return two results
        results = self.mds.TorrentMetadata.search_keyword("123")
        self.assertEqual(len(results), 2)

        # Search for torrents with the keyword 'video', it should return three results
        results = self.mds.TorrentMetadata.search_keyword("video")
        self.assertEqual(len(results), 3)

    def test_search_empty_query(self):
        """
        Test whether an empty query returns nothing
        """
        self.assertFalse(self.mds.TorrentMetadata.search_keyword(None))

    @db_session
    def test_unicode_search(self):
        """
        Test searching in the database with unicode characters
        """
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title=u"я маленький апельсин"))
        results = self.mds.TorrentMetadata.search_keyword(u"маленький")
        self.assertEqual(1, len(results))

    @db_session
    def test_wildcard_search(self):
        """
        Test searching in the database with a wildcard
        """
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="foobar 123"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="foobla 123"))
        self.assertEqual(0, len(self.mds.TorrentMetadata.search_keyword("*")))
        self.assertEqual(
            1, len(self.mds.TorrentMetadata.search_keyword("foobl*")))
        self.assertEqual(2,
                         len(self.mds.TorrentMetadata.search_keyword("foo*")))

    @db_session
    def test_stemming_search(self):
        """
        Test searching in the database with stemmed words
        """
        torrent = self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="mountains sheep", tags="video"))

        # Search with the word 'mountain' should return the torrent with 'mountains' in the title
        results = self.mds.TorrentMetadata.search_keyword("mountain")
        self.assertEqual(torrent.rowid, results[0].rowid)

        # Search with the word 'sheeps' should return the torrent with 'sheep' in the title
        results = self.mds.TorrentMetadata.search_keyword("sheeps")
        self.assertEqual(torrent.rowid, results[0].rowid)

    @db_session
    def test_get_autocomplete_terms(self):
        """
        Test fetching autocompletion terms from the database
        """
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="mountains sheep", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template,
                 title="regular sheepish guy",
                 tags="video"))

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "shee", 10)
        self.assertIn('sheep', autocomplete_terms)

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "shee", 10)
        self.assertIn('sheepish', autocomplete_terms)

    @db_session
    def test_get_autocomplete_terms_max(self):
        """
        Test fetching autocompletion terms from the database with a maximum number of terms
        """
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template,
                 title="mountains sheeps wolf",
                 tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, title="lakes sheep", tags="video"))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template,
                 title="regular sheepish guy",
                 tags="video"))

        autocomplete_terms = self.mds.TorrentMetadata.get_auto_complete_terms(
            "sheep", 2)
        self.assertEqual(len(autocomplete_terms), 2)
示例#46
0
 def setUp(self):
     yield super(TestMetadata, self).setUp()
     self.my_key = default_eccrypto.generate_key(u"curve25519")
     self.mds = MetadataStore(':memory:', self.session_base_dir, self.my_key)
示例#47
0
class TestChannelMetadata(TriblerCoreTest):
    """
    Contains various tests for the channel metadata type.
    """
    DATA_DIR = os.path.join(
        os.path.abspath(os.path.dirname(os.path.realpath(__file__))), '..',
        '..', 'data')
    CHANNEL_METADATA = os.path.join(DATA_DIR, 'sample_channel',
                                    'channel.mdblob')

    @inlineCallbacks
    def setUp(self):
        yield super(TestChannelMetadata, self).setUp()
        self.torrent_template = {
            "title": "",
            "infohash": "",
            "torrent_date": datetime(1970, 1, 1),
            "tags": "video"
        }
        self.my_key = default_eccrypto.generate_key(u"curve25519")
        self.mds = MetadataStore(
            os.path.join(self.session_base_dir, 'test.db'),
            self.session_base_dir, self.my_key)

    @inlineCallbacks
    def tearDown(self):
        self.mds.shutdown()
        yield super(TestChannelMetadata, self).tearDown()

    @staticmethod
    def get_sample_torrent_dict(my_key):
        """
        Utility method to return a dictionary with torrent information.
        """
        return {
            "infohash": buffer("1" * 20),
            "size": 123,
            "timestamp": datetime.utcnow(),
            "torrent_date": datetime.utcnow(),
            "tags": "bla",
            "tc_pointer": 123,
            "public_key": buffer(my_key.pub().key_to_bin()),
            "title": "lalala"
        }

    @staticmethod
    def get_sample_channel_dict(my_key):
        """
        Utility method to return a dictionary with a channel information.
        """
        return dict(TestChannelMetadata.get_sample_torrent_dict(my_key),
                    votes=222,
                    subscribed=False,
                    version=1)

    @db_session
    def test_serialization(self):
        """
        Test converting channel metadata to serialized data
        """
        channel_metadata = self.mds.ChannelMetadata.from_dict({})
        self.assertTrue(channel_metadata.serialized())

    @db_session
    def test_list_contents(self):
        """
        Test whether a correct list with channel content is returned from the database
        """
        pub_key1 = default_eccrypto.generate_key('low').pub().key_to_bin()
        pub_key2 = default_eccrypto.generate_key('low').pub().key_to_bin()

        channel1 = self.mds.ChannelMetadata(public_key=pub_key1)
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=pub_key1))

        channel2 = self.mds.ChannelMetadata(public_key=pub_key2)
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=pub_key2))
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=pub_key2))

        self.assertEqual(1, len(channel1.contents_list))
        self.assertEqual(2, len(channel2.contents_list))
        self.assertEqual(2, channel2.contents_len)

    @db_session
    def test_create_channel(self):
        """
        Test whether creating a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')

        self.assertTrue(channel_metadata)
        self.assertRaises(DuplicateChannelNameError,
                          self.mds.ChannelMetadata.create_channel, 'test',
                          'test')

    @db_session
    def test_update_metadata(self):
        """
        Test whether metadata is correctly updated and signed
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(
            self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(
            sample_channel_dict)
        self.mds.TorrentMetadata.from_dict(self.torrent_template)
        update_dict = {"tc_pointer": 222, "tags": "eee", "title": "qqq"}
        channel_metadata.update_metadata(update_dict=update_dict)
        self.assertDictContainsSubset(update_dict, channel_metadata.to_dict())

    @db_session
    def test_process_channel_metadata_payload(self):
        """
        Test whether a channel metadata payload is processed correctly
        """
        payload = ChannelMetadataPayload.from_file(self.CHANNEL_METADATA)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(
            payload)
        self.assertTrue(channel_metadata)

        # Check that we do not add it again
        self.mds.ChannelMetadata.process_channel_metadata_payload(payload)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

        # Check that we always take the latest version
        channel_metadata.version -= 1
        self.assertEqual(channel_metadata.version, 2)
        channel_metadata = self.mds.ChannelMetadata.process_channel_metadata_payload(
            payload)
        self.assertEqual(channel_metadata.version, 3)
        self.assertEqual(len(self.mds.ChannelMetadata.select()), 1)

    @db_session
    def test_get_dirname(self):
        """
        Test whether the correct directory name is returned for channel metadata
        """
        sample_channel_dict = TestChannelMetadata.get_sample_channel_dict(
            self.my_key)
        channel_metadata = self.mds.ChannelMetadata.from_dict(
            sample_channel_dict)

        self.assertEqual(len(channel_metadata.dir_name), 60)

    @db_session
    def test_get_channel_with_id(self):
        """
        Test retrieving a channel with a specific ID
        """
        self.assertIsNone(
            self.mds.ChannelMetadata.get_channel_with_id('a' * 20))
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        self.assertIsNotNone(
            self.mds.ChannelMetadata.get_channel_with_id(
                channel_metadata.public_key))

    @db_session
    def test_add_metadata_to_channel(self):
        """
        Test whether adding new torrents to a channel works as expected
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template,
                 public_key=channel_metadata.public_key))
        channel_metadata.commit_channel_torrent()

        self.assertEqual(channel_metadata.version, 1)
        self.assertEqual(channel_metadata.size, 1)

    @db_session
    def test_add_torrent_to_channel(self):
        """
        Test adding a torrent to your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)
        channel_metadata.add_torrent_to_channel(tdef, None)
        self.assertTrue(channel_metadata.contents_list)
        self.assertRaises(DuplicateTorrentFileError,
                          channel_metadata.add_torrent_to_channel, tdef, None)

    @db_session
    def test_delete_torrent_from_channel(self):
        """
        Test deleting a torrent from your channel
        """
        channel_metadata = self.mds.ChannelMetadata.create_channel(
            'test', 'test')
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # Check that nothing is committed when deleting uncommited torrent metadata
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.delete_torrent_from_channel(tdef.get_infohash())
        self.assertEqual(0, len(channel_metadata.contents_list))

        # Check append-only deletion process
        channel_metadata.add_torrent_to_channel(tdef, None)
        channel_metadata.commit_channel_torrent()
        self.assertEqual(1, len(channel_metadata.contents_list))
        channel_metadata.delete_torrent_from_channel(tdef.get_infohash())
        channel_metadata.commit_channel_torrent()
        self.assertEqual(0, len(channel_metadata.contents_list))

    @db_session
    def test_consolidate_channel_torrent(self):
        """
        Test completely re-commit your channel
        """
        channel = self.mds.ChannelMetadata.create_channel('test', 'test')
        my_dir = os.path.abspath(
            os.path.join(self.mds.channels_dir, channel.dir_name))
        tdef = TorrentDef.load(TORRENT_UBUNTU_FILE)

        # 1st torrent
        channel.add_torrent_to_channel(tdef, None)
        channel.commit_channel_torrent()

        # 2nd torrent
        self.mds.TorrentMetadata.from_dict(
            dict(self.torrent_template, public_key=channel.public_key))
        channel.commit_channel_torrent()

        # Delete entry
        channel.delete_torrent_from_channel(tdef.get_infohash())
        channel.commit_channel_torrent()

        self.assertEqual(1, len(channel.contents_list))
        self.assertEqual(3, len(os.listdir(my_dir)))
        channel.consolidate_channel_torrent()
        self.assertEqual(1, len(os.listdir(my_dir)))

    def test_mdblob_dont_fit_exception(self):
        with db_session:
            md_list = [
                self.mds.TorrentMetadata(title='test' + str(x))
                for x in xrange(0, 1)
            ]
        self.assertRaises(Exception, entries_to_chunk, md_list, chunk_size=1)