Пример #1
0
    def on_save_resume_data_alert(self, alert):
        """
        Callback for the alert that contains the resume data of a specific download.
        This resume data will be written to a file on disk.
        """
        self._logger.debug(f'On save resume data alert: {alert}')
        if self.checkpoint_disabled:
            return

        resume_data = alert.resume_data
        # Make save_path relative if the torrent is saved in the Tribler state directory
        if self.state_dir and b'save_path' in resume_data:
            save_path = Path(resume_data[b'save_path'].decode('utf8'))
            if save_path.exists():
                resume_data[b'save_path'] = str(
                    save_path.normalize_to(self.state_dir))

        metainfo = {
            'infohash': self.tdef.get_infohash(),
            'name': self.tdef.get_name_as_unicode(),
            'url': self.tdef.get_url()
        } if isinstance(self.tdef,
                        TorrentDefNoMetainfo) else self.tdef.get_metainfo()

        self.config.set_metainfo(metainfo)
        self.config.set_engineresumedata(resume_data)

        # Save it to file
        basename = hexlify(resume_data[b'info-hash']) + '.conf'
        filename = self.dlmgr.get_checkpoint_dir() / basename
        self.config.config['download_defaults'][
            'name'] = self.tdef.get_name_as_unicode(
            )  # store name (for debugging)
        self.config.write(str(filename))
        self._logger.debug('Saving download config to file %s', filename)
Пример #2
0
    async def test_regenerate_channel_torrent(self):
        with db_session:
            chan = self.generate_personal_channel()
            chan.commit_channel_torrent()
            chan_pk, chan_id = chan.public_key, chan.id_
            channel_dir = Path(self.mock_session.mds.ChannelMetadata._channels_dir) / Path(chan.dirname)
            for f in channel_dir.iterdir():
                f.unlink()

        # Test trying to regenerate a non-existing channel
        self.assertIsNone(await self.chanman.regenerate_channel_torrent(chan_pk, chan_id + 1))

        # Mock existing downloads removal-related functions
        self.mock_session.dlmgr.get_downloads_by_name = lambda *_: [Mock()]
        downloads_to_remove = []

        async def mock_remove_download(download_obj, **_):
            downloads_to_remove.append(download_obj)

        self.mock_session.dlmgr.remove_download = mock_remove_download

        # Test regenerating an empty channel
        self.mock_session.mds.ChannelMetadata.consolidate_channel_torrent = lambda *_: None
        self.assertIsNone(await self.chanman.regenerate_channel_torrent(chan_pk, chan_id))
        self.assertEqual(1, len(downloads_to_remove))

        # Test regenerating a non-empty channel
        self.chanman.updated_my_channel = Mock()
        self.mock_session.mds.ChannelMetadata.consolidate_channel_torrent = lambda *_: Mock()
        with patch("tribler_core.modules.libtorrent.torrentdef.TorrentDef.load_from_dict"):
            await self.chanman.regenerate_channel_torrent(chan_pk, chan_id)
            self.chanman.updated_my_channel.assert_called_once()
Пример #3
0
    async def test_move_storage(self):
        """
        Test that move_storage method works as expected by Libtorrent
        """
        result = []

        def mock_move(s):
            result.append(s)

        tdef = self.create_tdef()
        dl = Download(self.session, tdef)
        dl.setup()
        dl.handle = Mock()
        dl.handle.move_storage = mock_move

        dl.move_storage(Path("some_path"))
        self.assertEqual("some_path", result[0])
        self.assertTrue("some_path", dl.config.get_dest_dir().name)
        await dl.shutdown()

        # Check the same thing, this time for TorrentDefNoMetainfo
        dl = Download(self.session,
                      TorrentDefNoMetainfo(random_infohash(), "some_torrent"))
        dl.setup()
        dl.move_storage(Path("some_path"))
        self.assertEqual("some_path", result[0])
        self.assertTrue("some_path", dl.config.get_dest_dir().name)
        await dl.shutdown()
Пример #4
0
def _existing_files(path_list):
    for path in path_list:
        path = Path(path)
        if not path.exists():
            raise OSError(f'Path does not exist: {path}')
        elif path.is_file():
            yield path
Пример #5
0
def test_skip_processing_of_received_personal_channel_torrents(metadata_store):
    """
    Test that personal torrent is ignored by default when processing the torrent metadata payload
    """
    channel = metadata_store.ChannelMetadata.create_channel('testchan')
    torrent_md = metadata_store.TorrentMetadata(origin_id=channel.id_,
                                                title='test',
                                                status=NEW,
                                                infohash=random_infohash())
    channel.commit_channel_torrent()
    torrent_md.delete()

    channel_dir = Path(
        metadata_store.ChannelMetadata._channels_dir) / channel.dirname
    assert os.listdir(Path.fix_win_long_file(channel_dir))

    # By default, personal channel torrent metadata processing is skipped so there should be no torrents
    # added to the channel
    channel.local_version = 0
    metadata_store.process_channel_dir(channel_dir, channel.public_key,
                                       channel.id_)
    assert not channel.contents

    # Enable processing of personal channel torrent metadata
    channel.local_version = 0
    metadata_store.process_channel_dir(channel_dir,
                                       channel.public_key,
                                       channel.id_,
                                       skip_personal_metadata_payload=False)
    assert len(channel.contents) == 1
Пример #6
0
def get_mdblob_sequence_number(filename):
    filepath = Path(filename)
    if filepath.suffixes == [BLOB_EXTENSION]:
        return int(filename.stem)
    if filepath.suffixes == [BLOB_EXTENSION, '.lz4']:
        return int(Path(filepath.stem).stem)
    return None
Пример #7
0
def get_base_path():
    """ Get absolute path to resource, works for dev and for PyInstaller """
    try:
        # PyInstaller creates a temp folder and stores path in _MEIPASS
        base_path = Path(sys._MEIPASS)
    except Exception:
        base_path = Path(tribler_core.__file__).parent
    return Path(str_path(base_path))
Пример #8
0
 def create_node(self, *args, **kwargs):
     metadata_store = MetadataStore(
         Path(self.temporary_directory()) / ("%d.db" % self.count),
         Path(self.temporary_directory()),
         default_eccrypto.generate_key(u"curve25519"),
     )
     kwargs['metadata_store'] = metadata_store
     node = super(TestGigaChannelUnits, self).create_node(*args, **kwargs)
     self.count += 1
     return node
Пример #9
0
 def _get_path_to_requirements_txt() -> Optional[Path]:
     requirements_txt = 'requirements.txt'
     if scope == Scope.core:
         import tribler_core
         return Path(tribler_core.__file__).parent / requirements_txt
     if scope == Scope.gui:
         import tribler_gui
         return Path(tribler_gui.__file__).parent / requirements_txt
     raise AttributeError(
         f'Scope is {scope} but should be in {[s for s in Scope]}')  # pylint: disable=unnecessary-comprehension
Пример #10
0
    async def start_tribler(self, options):
        """
        Main method to startup Tribler.
        """

        async def signal_handler(sig):
            print("Received shut down signal %s" % sig)
            if not self._stopping:
                self._stopping = True
                await self.session.shutdown()
                print("Tribler shut down")
                get_event_loop().stop()
                self.process_checker.remove_lock_file()

        signal.signal(signal.SIGINT, lambda sig, _: ensure_future(signal_handler(sig)))
        signal.signal(signal.SIGTERM, lambda sig, _: ensure_future(signal_handler(sig)))

        statedir = Path(options.statedir or Path(get_appstate_dir(), '.Tribler'))
        config = TriblerConfig(statedir, config_file=statedir / 'triblerd.conf')

        # Check if we are already running a Tribler instance
        self.process_checker = ProcessChecker()
        if self.process_checker.already_running:
            print("Another Tribler instance is already using statedir %s" % config.get_state_dir())
            get_event_loop().stop()
            return

        print("Starting Tribler")

        if options.restapi > 0:
            config.set_http_api_enabled(True)
            config.set_http_api_port(options.restapi)

        if options.ipv8 > 0:
            config.set_ipv8_port(options.ipv8)
        elif options.ipv8 == 0:
            config.set_ipv8_enabled(False)

        if options.libtorrent != -1 and options.libtorrent > 0:
            config.set_libtorrent_port(options.libtorrent)

        if options.ipv8_bootstrap_override is not None:
            config.set_ipv8_bootstrap_override(options.ipv8_bootstrap_override)

        if options.testnet:
            config.set_testnet(True)

        self.session = Session(config)
        try:
            await self.session.start()
        except Exception as e:
            print(str(e))
            get_event_loop().stop()
        else:
            print("Tribler started")
Пример #11
0
 def create_node(self, *args, **kwargs):
     metadata_store = MetadataStore(
         Path(self.temporary_directory()) / f"{self.count}.db",
         Path(self.temporary_directory()),
         default_eccrypto.generate_key("curve25519"),
         disable_sync=True,
     )
     kwargs['metadata_store'] = metadata_store
     node = super().create_node(*args, **kwargs)
     self.count += 1
     return node
Пример #12
0
 def create_node(self, *args, **kwargs):
     metadata_store = MetadataStore(
         Path(self.temporary_directory()) / f"{self.count}.db",
         Path(self.temporary_directory()),
         default_eccrypto.generate_key("curve25519"),
         disable_sync=True,
     )
     kwargs['metadata_store'] = metadata_store
     with mock.patch('tribler_core.modules.metadata_store.community.gigachannel_community.DiscoveryBooster'):
         node = super().create_node(*args, **kwargs)
     self.count += 1
     return node
Пример #13
0
async def test_get_path_relative(tmpdir):
    config = TriblerConfig(state_dir=tmpdir)
    config.general.log_dir = None
    assert not config.general.log_dir

    config.general.log_dir = '.'
    assert config.general.get_path_as_absolute('log_dir',
                                               tmpdir) == Path(tmpdir)

    config.general.log_dir = '1'
    assert config.general.get_path_as_absolute('log_dir',
                                               tmpdir) == Path(tmpdir) / '1'
Пример #14
0
 def create_node(self, *args, **kwargs):
     metadata_store = MetadataStore(
         Path(self.temporary_directory()) / f"{self.count}.db",
         Path(self.temporary_directory()),
         default_eccrypto.generate_key("curve25519"),
         disable_sync=True,
     )
     self.metadata_store_set.add(metadata_store)
     kwargs['metadata_store'] = metadata_store
     kwargs['rqc_settings'] = RemoteQueryCommunitySettings()
     node = super().create_node(*args, **kwargs)
     self.count += 1
     return node
Пример #15
0
    def create_node(self, *args, **kwargs):
        mds = MetadataStore(
            Path(self.temporary_directory()) / ("%d.db" % self.count),
            Path(self.temporary_directory()),
            default_eccrypto.generate_key("curve25519"))

        torrent_checker = MockObject()
        torrent_checker.torrents_checked = set()

        return MockIPv8("curve25519",
                        PopularityCommunity,
                        metadata_store=mds,
                        torrent_checker=torrent_checker)
Пример #16
0
    def get_dest_dir(self):
        """ Gets the directory where to save this Download.
        """
        dest_dir = self.config['download_defaults']['saveas']
        if not dest_dir:
            dest_dir = get_default_dest_dir()
            self.set_dest_dir(dest_dir)

        # This is required to support relative paths
        if not Path(dest_dir).is_absolute():
            dest_dir = self.state_dir / dest_dir

        return Path(dest_dir)
Пример #17
0
def get_default_dest_dir():
    """
    Returns the default dir to save content to.
    """
    tribler_downloads = Path("TriblerDownloads")
    if tribler_downloads.is_dir():
        return tribler_downloads.resolve()

    home = get_home_dir()
    downloads = home / "Downloads"
    if downloads.is_dir():
        return (downloads / tribler_downloads).resolve()

    return (home / tribler_downloads).resolve()
Пример #18
0
    def create_node(self, *args, **kwargs):
        self.metadata_store = MetadataStore(
            Path(self.temporary_directory()) / "mds.db",
            Path(self.temporary_directory()),
            default_eccrypto.generate_key("curve25519"),
            disable_sync=True,
        )
        self.tags_db = TagDatabase(
            str(Path(self.temporary_directory()) / "tags.db"))

        kwargs['metadata_store'] = self.metadata_store
        kwargs['tags_db'] = self.tags_db
        kwargs['rqc_settings'] = RemoteQueryCommunitySettings()
        return super().create_node(*args, **kwargs)
Пример #19
0
 def create_node(self, *args, **kwargs):
     metadata_store = MetadataStore(
         Path(self.temporary_directory()) / f"{self.count}.db",
         Path(self.temporary_directory()),
         default_eccrypto.generate_key("curve25519"),
         disable_sync=True,
     )
     self.metadata_store_set.add(metadata_store)
     kwargs['metadata_store'] = metadata_store
     kwargs['settings'] = ChantSettings()
     kwargs['rqc_settings'] = RemoteQueryCommunitySettings()
     with mock.patch('tribler_core.components.gigachannel.community.gigachannel_community.DiscoveryBooster'):
         node = super().create_node(*args, **kwargs)
     self.count += 1
     return node
Пример #20
0
 async def test_remove_with_files(self):
     """
     Testing whether the API returns 200 if a download is being removed
     """
     # Create a copy of the file, so we can remove it later
     source_file = TESTS_DATA_DIR / 'video.avi'
     tmpdir = self.temporary_directory()
     copied_file = tmpdir / Path(source_file).name
     shutil.copyfile(source_file, copied_file)
     video_tdef, _ = self.create_local_torrent(copied_file)
     dcfg = DownloadConfig()
     dcfg.set_dest_dir(tmpdir)
     download = self.session.dlmgr.start_download(tdef=video_tdef,
                                                  config=dcfg)
     infohash = get_hex_infohash(video_tdef)
     while not download.handle:
         await sleep(0.1)
     await sleep(2)
     await self.do_request('downloads/%s' % infohash,
                           post_data={"remove_data": True},
                           expected_code=200,
                           request_type='DELETE',
                           expected_json={
                               u"removed":
                               True,
                               u"infohash":
                               u"c9a19e7fe5d9a6c106d6ea3c01746ac88ca3c7a5"
                           })
     while copied_file.exists():
         await sleep(0.1)
     self.assertEqual(len(self.session.dlmgr.get_downloads()), 0)
     self.assertFalse(copied_file.exists())
Пример #21
0
def test_multiple_squashed_commit_and_read(metadata_store):
    """
    Test committing entries into several squashed blobs and reading them back
    """
    metadata_store.ChannelMetadata._CHUNK_SIZE_LIMIT = 500

    num_entries = 10
    channel = metadata_store.ChannelMetadata.create_channel('testchan')
    md_list = [
        metadata_store.TorrentMetadata(origin_id=channel.id_,
                                       title='test' + str(x),
                                       status=NEW,
                                       infohash=random_infohash())
        for x in range(0, num_entries)
    ]
    channel.commit_channel_torrent()

    channel.local_version = 0
    for md in md_list:
        md.delete()

    channel_dir = Path(
        metadata_store.ChannelMetadata._channels_dir) / channel.dirname
    assert len(os.listdir(channel_dir)
               ) > 1  # make sure it was broken into more than one .mdblob file
    metadata_store.process_channel_dir(channel_dir,
                                       channel.public_key,
                                       channel.id_,
                                       skip_personal_metadata_payload=False)
    assert num_entries == len(channel.contents)
Пример #22
0
 def set_dest_dir(self, path):
     """ Sets the directory where to save this Download.
     @param path A path of a directory.
     """
     # If something is saved inside the Tribler state dir, it should use relative path
     path = Path(path).normalize_to(self.state_dir)
     self.config['download_defaults']['saveas'] = str(path)
Пример #23
0
 def get_files_info_json(self, download):
     """
     Return file information as JSON from a specified download.
     """
     files_json = []
     files_completion = dict(
         (name, progress)
         for name, progress in download.get_state().get_files_completion())
     selected_files = download.config.get_selected_files()
     file_index = 0
     for fn, size in download.get_def().get_files_with_length():
         files_json.append({
             "index":
             file_index,
             "name":
             str(Path(fn)),
             "size":
             size,
             "included": (file_index in selected_files
                          or not selected_files),
             "progress":
             files_completion.get(fn, 0.0)
         })
         file_index += 1
     return files_json
Пример #24
0
    def test_relative_paths(self):
        # Default should be taken from config.spec
        self.assertEqual(
            self.tribler_config.get_trustchain_keypair_filename(),
            path_util.abspath(self.state_dir / "ec_multichain.pem"))

        local_name = Path("somedir") / "ec_multichain.pem"
        global_name = self.state_dir / local_name
        self.tribler_config.set_trustchain_keypair_filename(global_name)

        # It should always return global path
        self.assertEqual(self.tribler_config.get_trustchain_keypair_filename(),
                         global_name)
        # But internally it should be stored as a local path string
        self.assertEqual(
            self.tribler_config.config['trustchain']['ec_keypair_filename'],
            str(local_name))

        # If it points out of the state dir, it should be saved as a global path string
        out_of_dir_name_global = path_util.abspath(self.state_dir / ".." /
                                                   "filename").resolve()
        self.tribler_config.set_trustchain_keypair_filename(
            out_of_dir_name_global)
        self.assertEqual(
            self.tribler_config.config['trustchain']['ec_keypair_filename'],
            str(out_of_dir_name_global))
Пример #25
0
    def initialize(self):
        # Create the checkpoints directory
        (self.state_dir / STATEDIR_CHECKPOINT_DIR).mkdir(exist_ok=True)

        # Start upnp
        if self.config.upnp:
            self.get_session().start_upnp()

        if has_bep33_support() and self.download_defaults.number_hops <= len(self.socks_listen_ports or []):
            # Also listen to DHT log notifications - we need the dht_pkt_alert and extract the BEP33 bloom filters
            dht_health_session = self.get_session(self.download_defaults.number_hops)
            dht_health_session.set_alert_mask(self.default_alert_mask | lt.alert.category_t.dht_log_notification)
            self.dht_health_manager = DHTHealthManager(dht_health_session)

        # Make temporary directory for metadata collecting through DHT
        self.metadata_tmpdir = self.metadata_tmpdir or Path.mkdtemp(suffix='tribler_metainfo_tmpdir')

        # Register tasks
        self.register_task("process_alerts", self._task_process_alerts, interval=1)
        if self.dht_readiness_timeout > 0 and self.config.dht:
            self._dht_ready_task = self.register_task("check_dht_ready", self._check_dht_ready)
        self.register_task("request_torrent_updates", self._request_torrent_updates, interval=1)
        self.register_task('task_cleanup_metacache', self._task_cleanup_metainfo_cache, interval=60, delay=0)

        self.set_download_states_callback(self.sesscb_states_callback)
Пример #26
0
    def create_node(self):
        config = TunnelCommunitySettings()
        mock_ipv8 = MockIPv8("curve25519",
                             TriblerTunnelCommunity,
                             settings={'remove_tunnel_delay': 0},
                             config=config,
                             exitnode_cache=Path(self.temporary_directory()) /
                             "exitnode_cache.dat")
        mock_ipv8.overlay.settings.max_circuits = 1

        db = BandwidthDatabase(
            db_path=MEMORY_DB,
            my_pub_key=mock_ipv8.my_peer.public_key.key_to_bin())

        # Load the bandwidth accounting community
        mock_ipv8.overlay.bandwidth_community = BandwidthAccountingCommunity(
            mock_ipv8.my_peer,
            mock_ipv8.endpoint,
            mock_ipv8.network,
            settings=BandwidthAccountingSettings(),
            database=db)
        mock_ipv8.overlay.dht_provider = MockDHTProvider(
            Peer(mock_ipv8.overlay.my_peer.key,
                 mock_ipv8.overlay.my_estimated_wan))

        return mock_ipv8
Пример #27
0
    def test_get_files_with_length(self):
        name_bytes = b'\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86'
        name_unicode = name_bytes.decode()
        t = TorrentDef()
        t.metainfo = {
            b'info': {
                b'files': [{
                    b'path.utf-8': [name_bytes],
                    b'length': 123
                }, {
                    b'path.utf-8': [b'file.txt'],
                    b'length': 456
                }]
            }
        }
        self.assertEqual(t.get_files_with_length(), [(Path(name_unicode), 123),
                                                     (Path('file.txt'), 456)])

        t.metainfo = {
            b'info': {
                b'files': [{
                    b'path': [name_bytes],
                    b'length': 123
                }, {
                    b'path': [b'file.txt'],
                    b'length': 456
                }]
            }
        }
        self.assertEqual(t.get_files_with_length(), [(Path(name_unicode), 123),
                                                     (Path('file.txt'), 456)])

        t.metainfo = {
            b'info': {
                b'files': [{
                    b'path': [b'test\xff' + name_bytes],
                    b'length': 123
                }, {
                    b'path': [b'file.txt'],
                    b'length': 456
                }]
            }
        }
        self.assertEqual(t.get_files_with_length(),
                         [(Path('test?????????????'), 123),
                          (Path('file.txt'), 456)])

        t.metainfo = {
            b'info': {
                b'files': [{
                    b'path.utf-8': [b'test\xff' + name_bytes],
                    b'length': 123
                }, {
                    b'path': [b'file.txt'],
                    b'length': 456
                }]
            }
        }
        self.assertEqual(t.get_files_with_length(), [(Path('file.txt'), 456)])
Пример #28
0
        def consolidate_channel_torrent(self):
            """
            Delete the channel dir contents and create it anew.
            Use it to consolidate fragmented channel torrent directories.
            :param key: The public/private key, used to sign the data
            """

            # Remark: there should be a way to optimize this stuff with SQL and better tree traversal algorithms
            # Cleanup entries marked for deletion

            db.CollectionNode.collapse_deleted_subtrees()
            # Note: It should be possible to stop alling get_contents_to_commit here
            commit_queue = self.get_contents_to_commit()
            for entry in commit_queue:
                if entry.status == TODELETE:
                    entry.delete()

            folder = Path(self._channels_dir) / self.dirname
            # We check if we need to re-create the channel dir in case it was deleted for some reason
            if not folder.is_dir():
                os.makedirs(folder)
            for filename in os.listdir(folder):
                file_path = folder / filename
                # We only remove mdblobs and leave the rest as it is
                if filename.endswith(BLOB_EXTENSION) or filename.endswith(
                        BLOB_EXTENSION + '.lz4'):
                    os.unlink(Path.fix_win_long_file(file_path))

            # Channel should get a new starting timestamp and its contents should get higher timestamps
            start_timestamp = clock.tick()

            def update_timestamps_recursive(node):
                if issubclass(type(node), db.CollectionNode):
                    for child in node.contents:
                        update_timestamps_recursive(child)
                if node.status in [COMMITTED, UPDATED, NEW]:
                    node.status = UPDATED
                    node.timestamp = clock.tick()
                    node.sign()

            update_timestamps_recursive(self)

            return self.commit_channel_torrent(
                new_start_timestamp=start_timestamp)
Пример #29
0
    async def start(self, options):
        # Determine ipv8 port
        ipv8_port = options.ipv8_port
        if ipv8_port == -1 and "HELPER_INDEX" in os.environ and "HELPER_BASE" in os.environ:
            base_port = int(os.environ["HELPER_BASE"])
            ipv8_port = base_port + int(os.environ["HELPER_INDEX"]) * 5

        statedir = Path(
            os.path.join(get_root_state_directory(), "tunnel-%d") % ipv8_port)
        config = TriblerConfig(statedir,
                               config_file=statedir / 'triblerd.conf')
        config.set_tunnel_community_socks5_listen_ports([])
        config.set_tunnel_community_random_slots(options.random_slots)
        config.set_tunnel_community_competing_slots(options.competing_slots)
        config.set_torrent_checking_enabled(False)
        config.set_ipv8_enabled(True)
        config.set_libtorrent_enabled(False)
        config.set_ipv8_port(ipv8_port)
        config.set_ipv8_address(options.ipv8_address)
        config.set_trustchain_enabled(True)
        config.set_market_community_enabled(False)
        config.set_dht_enabled(True)
        config.set_tunnel_community_exitnode_enabled(bool(options.exit))
        config.set_popularity_community_enabled(False)
        config.set_testnet(bool(options.testnet))
        config.set_chant_enabled(False)
        config.set_bootstrap_enabled(False)

        if not options.no_rest_api:
            config.set_http_api_enabled(True)
            api_port = options.restapi
            if "HELPER_INDEX" in os.environ and "HELPER_BASE" in os.environ:
                api_port = int(os.environ["HELPER_BASE"]) + 10000 + int(
                    os.environ["HELPER_INDEX"])
            config.set_http_api_port(api_port)

        if options.ipv8_bootstrap_override is not None:
            config.set_ipv8_bootstrap_override(options.ipv8_bootstrap_override)

        self.session = Session(config)

        self.log_circuits = options.log_circuits
        self.session.notifier.add_observer(NTFY.TUNNEL_REMOVE,
                                           self.circuit_removed)

        await self.session.start()

        if options.log_rejects:
            # We set this after Tribler has started since the tunnel_community won't be available otherwise
            self.session.tunnel_community.reject_callback = self.on_circuit_reject

        # Tunnel helpers store more TrustChain blocks
        self.session.trustchain_community.settings.max_db_blocks = 5000000

        self.tribler_started()
Пример #30
0
def test_get_files_with_length(tdef):
    name_bytes = b'\xe8\xaf\xad\xe8\xa8\x80\xe5\xa4\x84\xe7\x90\x86'
    name_unicode = name_bytes.decode()
    tdef.metainfo = {
        b'info': {
            b'files': [{
                b'path.utf-8': [name_bytes],
                b'length': 123
            }, {
                b'path.utf-8': [b'file.txt'],
                b'length': 456
            }]
        }
    }
    assert tdef.get_files_with_length() == [(Path(name_unicode), 123),
                                            (Path('file.txt'), 456)]

    tdef.metainfo = {
        b'info': {
            b'files': [{
                b'path': [name_bytes],
                b'length': 123
            }, {
                b'path': [b'file.txt'],
                b'length': 456
            }]
        }
    }
    assert tdef.get_files_with_length() == [(Path(name_unicode), 123),
                                            (Path('file.txt'), 456)]

    tdef.metainfo = {
        b'info': {
            b'files': [{
                b'path': [b'test\xff' + name_bytes],
                b'length': 123
            }, {
                b'path': [b'file.txt'],
                b'length': 456
            }]
        }
    }
    assert tdef.get_files_with_length() == [(Path('test?????????????'), 123),
                                            (Path('file.txt'), 456)]

    tdef.metainfo = {
        b'info': {
            b'files': [{
                b'path.utf-8': [b'test\xff' + name_bytes],
                b'length': 123
            }, {
                b'path': [b'file.txt'],
                b'length': 456
            }]
        }
    }
    assert tdef.get_files_with_length() == [(Path('file.txt'), 456)]