示例#1
0
class StreamManagerComponent(Component):
    component_name = STREAM_MANAGER_COMPONENT
    depends_on = [BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT]

    def __init__(self, component_manager):
        super().__init__(component_manager)
        self.stream_manager: typing.Optional[StreamManager] = None

    @property
    def component(self) -> typing.Optional[StreamManager]:
        return self.stream_manager

    async def get_status(self):
        if not self.stream_manager:
            return
        return {
            'managed_files': len(self.stream_manager.streams),
        }

    async def start(self):
        blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
        storage = self.component_manager.get_component(DATABASE_COMPONENT)
        wallet = self.component_manager.get_component(WALLET_COMPONENT)
        node = self.component_manager.get_component(DHT_COMPONENT)\
            if self.component_manager.has_component(DHT_COMPONENT) else None
        log.info('Starting the file manager')
        loop = asyncio.get_event_loop()
        self.stream_manager = StreamManager(
            loop, self.conf, blob_manager, wallet, storage, node,
            self.component_manager.analytics_manager)
        await self.stream_manager.start()
        log.info('Done setting up file manager')

    async def stop(self):
        self.stream_manager.stop()
示例#2
0
 async def setup_stream_manager(self,
                                balance=10.0,
                                fee=None,
                                old_sort=False):
     file_path = os.path.join(self.server_dir, "test_file")
     with open(file_path, 'wb') as f:
         f.write(os.urandom(20000000))
     descriptor = await StreamDescriptor.create_stream(
         self.loop,
         self.server_blob_manager.blob_dir,
         file_path,
         old_sort=old_sort)
     self.sd_hash = descriptor.sd_hash
     self.mock_wallet, self.uri = await get_mock_wallet(
         self.sd_hash, self.client_storage, self.client_wallet_dir, balance,
         fee)
     analytics_manager = AnalyticsManager(
         self.client_config,
         binascii.hexlify(generate_id()).decode(),
         binascii.hexlify(generate_id()).decode())
     self.stream_manager = StreamManager(
         self.loop, self.client_config, self.client_blob_manager,
         self.mock_wallet, self.client_storage,
         get_mock_node(self.server_from_client), analytics_manager)
     self.file_manager = FileManager(self.loop, self.client_config,
                                     self.mock_wallet, self.client_storage,
                                     analytics_manager)
     self.file_manager.source_managers['stream'] = self.stream_manager
     self.exchange_rate_manager = get_fake_exchange_rate_manager()
示例#3
0
 async def start(self):
     blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
     storage = self.component_manager.get_component(DATABASE_COMPONENT)
     wallet = self.component_manager.get_component(WALLET_COMPONENT)
     node = self.component_manager.get_component(DHT_COMPONENT)\
         if self.component_manager.has_component(DHT_COMPONENT) else None
     log.info('Starting the file manager')
     loop = asyncio.get_event_loop()
     self.stream_manager = StreamManager(
         loop, self.conf, blob_manager, wallet, storage, node,
         self.component_manager.analytics_manager)
     await self.stream_manager.start()
     log.info('Done setting up file manager')
示例#4
0
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()
        self.key = b'deadbeef' * 4
        self.cleartext = os.urandom(20000000)

        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        self.storage = SQLiteStorage(self.conf, os.path.join(tmp_dir, "lbrynet.sqlite"))
        await self.storage.open()
        self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage, self.conf)
        self.stream_manager = StreamManager(self.loop, Config(), self.blob_manager, None, self.storage, None)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(server_tmp_dir))
        self.server_conf = Config()
        self.server_storage = SQLiteStorage(self.server_conf, os.path.join(server_tmp_dir, "lbrynet.sqlite"))
        await self.server_storage.open()
        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir, self.server_storage, self.server_conf)

        download_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(download_dir))

        # create the stream
        file_path = os.path.join(tmp_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(self.cleartext)

        self.stream = await self.stream_manager.create_stream(file_path)
示例#5
0
 async def start(self):
     blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
     storage = self.component_manager.get_component(DATABASE_COMPONENT)
     wallet = self.component_manager.get_component(WALLET_COMPONENT)
     node = self.component_manager.get_component(DHT_COMPONENT) \
         if self.component_manager.has_component(DHT_COMPONENT) else None
     try:
         torrent = self.component_manager.get_component(
             LIBTORRENT_COMPONENT) if TorrentSession else None
     except NameError:
         torrent = None
     log.info('Starting the file manager')
     loop = asyncio.get_event_loop()
     self.file_manager = FileManager(
         loop, self.conf, wallet, storage,
         self.component_manager.analytics_manager)
     self.file_manager.source_managers['stream'] = StreamManager(
         loop,
         self.conf,
         blob_manager,
         wallet,
         storage,
         node,
     )
     if TorrentSession and LIBTORRENT_COMPONENT not in self.conf.components_to_skip:
         self.file_manager.source_managers['torrent'] = TorrentManager(
             loop, self.conf, torrent, storage,
             self.component_manager.analytics_manager)
     await self.file_manager.start()
     log.info('Done setting up file manager')
示例#6
0
 async def mock_resolve(*args):
     result = {txo.meta['permanent_url']: txo}
     claims = [
         StreamManager._convert_to_old_resolve_output(manager, result)[txo.meta['permanent_url']]
     ]
     await storage.save_claims(claims)
     return result
示例#7
0
class TestStreamManager(BlobExchangeTestBase):
    async def setup_stream_manager(self,
                                   balance=10.0,
                                   fee=None,
                                   old_sort=False):
        file_path = os.path.join(self.server_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(os.urandom(20000000))
        descriptor = await StreamDescriptor.create_stream(
            self.loop,
            self.server_blob_manager.blob_dir,
            file_path,
            old_sort=old_sort)
        self.sd_hash = descriptor.sd_hash
        self.mock_wallet, self.uri = await get_mock_wallet(
            self.sd_hash, self.client_storage, balance, fee)
        self.stream_manager = StreamManager(
            self.loop, self.client_config, self.client_blob_manager,
            self.mock_wallet, self.client_storage,
            get_mock_node(self.server_from_client),
            AnalyticsManager(self.client_config,
                             binascii.hexlify(generate_id()).decode(),
                             binascii.hexlify(generate_id()).decode()))
        self.exchange_rate_manager = get_dummy_exchange_rate_manager(time)

    async def _test_time_to_first_bytes(self,
                                        check_post,
                                        error=None,
                                        after_setup=None):
        await self.setup_stream_manager()
        if after_setup:
            after_setup()
        checked_analytics_event = False

        async def _check_post(event):
            check_post(event)
            nonlocal checked_analytics_event
            checked_analytics_event = True

        self.stream_manager.analytics_manager._post = _check_post
        if error:
            with self.assertRaises(error):
                await self.stream_manager.download_stream_from_uri(
                    self.uri, self.exchange_rate_manager)
        else:
            await self.stream_manager.download_stream_from_uri(
                self.uri, self.exchange_rate_manager)
        await asyncio.sleep(0, loop=self.loop)
        self.assertTrue(checked_analytics_event)

    async def test_time_to_first_bytes(self):
        def check_post(event):
            self.assertEqual(event['event'], 'Time To First Bytes')
            total_duration = event['properties']['total_duration']
            resolve_duration = event['properties']['resolve_duration']
            head_blob_duration = event['properties']['head_blob_duration']
            sd_blob_duration = event['properties']['sd_blob_duration']
            self.assertFalse(event['properties']['added_fixed_peers'])
            self.assertEqual(event['properties']['wallet_server'],
                             "fakespv.lbry.com:50001")
            self.assertGreaterEqual(
                total_duration,
                resolve_duration + head_blob_duration + sd_blob_duration)

        await self._test_time_to_first_bytes(check_post)

    async def test_fixed_peer_delay_dht_peers_found(self):
        self.client_config.reflector_servers = [
            (self.server_from_client.address,
             self.server_from_client.tcp_port - 1)
        ]
        server_from_client = None
        self.server_from_client, server_from_client = server_from_client, self.server_from_client

        def after_setup():
            self.stream_manager.node.protocol.routing_table.get_peers = lambda: [
                server_from_client
            ]

        def check_post(event):
            self.assertEqual(event['event'], 'Time To First Bytes')
            total_duration = event['properties']['total_duration']
            resolve_duration = event['properties']['resolve_duration']
            head_blob_duration = event['properties']['head_blob_duration']
            sd_blob_duration = event['properties']['sd_blob_duration']

            self.assertEqual(event['event'], 'Time To First Bytes')
            self.assertEqual(event['properties']['tried_peers_count'], 1)
            self.assertEqual(event['properties']['active_peer_count'], 1)
            self.assertEqual(event['properties']['connection_failures_count'],
                             0)
            self.assertTrue(event['properties']['use_fixed_peers'])
            self.assertTrue(event['properties']['added_fixed_peers'])
            self.assertEqual(event['properties']['fixed_peer_delay'],
                             self.client_config.fixed_peer_delay)
            self.assertGreaterEqual(
                total_duration,
                resolve_duration + head_blob_duration + sd_blob_duration)

        await self._test_time_to_first_bytes(check_post,
                                             after_setup=after_setup)

    async def test_tcp_connection_failure_analytics(self):
        self.client_config.download_timeout = 3.0

        def after_setup():
            self.server.stop_server()

        def check_post(event):
            self.assertEqual(event['event'], 'Time To First Bytes')
            self.assertIsNone(event['properties']['head_blob_duration'])
            self.assertIsNone(event['properties']['sd_blob_duration'])
            self.assertFalse(event['properties']['added_fixed_peers'])
            self.assertEqual(event['properties']['connection_failures_count'],
                             1)
            self.assertEqual(
                event['properties']['error_message'],
                f'Failed to download sd blob {self.sd_hash} within timeout.')

        await self._test_time_to_first_bytes(check_post,
                                             DownloadSDTimeoutError,
                                             after_setup=after_setup)

    async def test_override_fixed_peer_delay_dht_disabled(self):
        self.client_config.reflector_servers = [
            (self.server_from_client.address,
             self.server_from_client.tcp_port - 1)
        ]
        self.client_config.components_to_skip = ['dht', 'hash_announcer']
        self.client_config.fixed_peer_delay = 9001.0
        self.server_from_client = None

        def check_post(event):
            total_duration = event['properties']['total_duration']
            resolve_duration = event['properties']['resolve_duration']
            head_blob_duration = event['properties']['head_blob_duration']
            sd_blob_duration = event['properties']['sd_blob_duration']

            self.assertEqual(event['event'], 'Time To First Bytes')
            self.assertEqual(event['properties']['tried_peers_count'], 1)
            self.assertEqual(event['properties']['active_peer_count'], 1)
            self.assertTrue(event['properties']['use_fixed_peers'])
            self.assertTrue(event['properties']['added_fixed_peers'])
            self.assertEqual(event['properties']['fixed_peer_delay'], 0.0)
            self.assertGreaterEqual(
                total_duration,
                resolve_duration + head_blob_duration + sd_blob_duration)

        start = self.loop.time()
        await self._test_time_to_first_bytes(check_post)
        self.assertLess(self.loop.time() - start, 3)

    async def test_no_peers_timeout(self):
        # FIXME: the download should ideally fail right away if there are no peers
        # to initialize the shortlist and fixed peers are disabled
        self.server_from_client = None
        self.client_config.download_timeout = 3.0

        def check_post(event):
            self.assertEqual(event['event'], 'Time To First Bytes')
            self.assertEqual(event['properties']['error'],
                             'DownloadSDTimeoutError')
            self.assertEqual(event['properties']['tried_peers_count'], 0)
            self.assertEqual(event['properties']['active_peer_count'], 0)
            self.assertFalse(event['properties']['use_fixed_peers'])
            self.assertFalse(event['properties']['added_fixed_peers'])
            self.assertIsNone(event['properties']['fixed_peer_delay'])
            self.assertEqual(
                event['properties']['error_message'],
                f'Failed to download sd blob {self.sd_hash} within timeout.')

        start = self.loop.time()
        await self._test_time_to_first_bytes(check_post,
                                             DownloadSDTimeoutError)
        duration = self.loop.time() - start
        self.assertLessEqual(duration, 5)
        self.assertGreaterEqual(duration, 3.0)

    async def test_download_stop_resume_delete(self):
        await self.setup_stream_manager()
        received = []
        expected_events = ['Time To First Bytes', 'Download Finished']

        async def check_post(event):
            received.append(event['event'])

        self.stream_manager.analytics_manager._post = check_post

        self.assertDictEqual(self.stream_manager.streams, {})
        stream = await self.stream_manager.download_stream_from_uri(
            self.uri, self.exchange_rate_manager)
        stream_hash = stream.stream_hash
        self.assertDictEqual(self.stream_manager.streams,
                             {stream.sd_hash: stream})
        self.assertTrue(stream.running)
        self.assertFalse(stream.finished)
        self.assertTrue(
            os.path.isfile(os.path.join(self.client_dir, "test_file")))
        stored_status = await self.client_storage.run_and_return_one_or_none(
            "select status from file where stream_hash=?", stream_hash)
        self.assertEqual(stored_status, "running")

        await stream.stop()

        self.assertFalse(stream.finished)
        self.assertFalse(stream.running)
        self.assertFalse(
            os.path.isfile(os.path.join(self.client_dir, "test_file")))
        stored_status = await self.client_storage.run_and_return_one_or_none(
            "select status from file where stream_hash=?", stream_hash)
        self.assertEqual(stored_status, "stopped")

        await stream.save_file(node=self.stream_manager.node)
        await stream.finished_writing.wait()
        await asyncio.sleep(0, loop=self.loop)
        self.assertTrue(stream.finished)
        self.assertFalse(stream.running)
        self.assertTrue(
            os.path.isfile(os.path.join(self.client_dir, "test_file")))
        stored_status = await self.client_storage.run_and_return_one_or_none(
            "select status from file where stream_hash=?", stream_hash)
        self.assertEqual(stored_status, "finished")

        await self.stream_manager.delete_stream(stream, True)
        self.assertDictEqual(self.stream_manager.streams, {})
        self.assertFalse(
            os.path.isfile(os.path.join(self.client_dir, "test_file")))
        stored_status = await self.client_storage.run_and_return_one_or_none(
            "select status from file where stream_hash=?", stream_hash)
        self.assertIsNone(stored_status)
        self.assertListEqual(expected_events, received)

    async def _test_download_error_on_start(self,
                                            expected_error,
                                            timeout=None):
        error = None
        try:
            await self.stream_manager.download_stream_from_uri(
                self.uri, self.exchange_rate_manager, timeout)
        except Exception as err:
            if isinstance(err, asyncio.CancelledError):
                raise
            error = err
        self.assertEqual(expected_error, type(error))

    async def _test_download_error_analytics_on_start(self,
                                                      expected_error,
                                                      error_message,
                                                      timeout=None):
        received = []

        async def check_post(event):
            self.assertEqual("Time To First Bytes", event['event'])
            self.assertEqual(event['properties']['error_message'],
                             error_message)
            received.append(event['properties']['error'])

        self.stream_manager.analytics_manager._post = check_post
        await self._test_download_error_on_start(expected_error, timeout)
        await asyncio.sleep(0, loop=self.loop)
        self.assertListEqual([expected_error.__name__], received)

    async def test_insufficient_funds(self):
        fee = {
            'currency': 'LBC',
            'amount': 11.0,
            'address': 'bYFeMtSL7ARuG1iMpjFyrnTe4oJHSAVNXF',
            'version': '_0_0_1'
        }
        await self.setup_stream_manager(10.0, fee)
        await self._test_download_error_on_start(InsufficientFundsError, "")

    async def test_fee_above_max_allowed(self):
        fee = {
            'currency': 'USD',
            'amount': 51.0,
            'address': 'bYFeMtSL7ARuG1iMpjFyrnTe4oJHSAVNXF',
            'version': '_0_0_1'
        }
        await self.setup_stream_manager(1000000.0, fee)
        await self._test_download_error_on_start(KeyFeeAboveMaxAllowedError,
                                                 "")

    async def test_resolve_error(self):
        await self.setup_stream_manager()
        self.uri = "fake"
        await self._test_download_error_on_start(ResolveError)

    async def test_download_sd_timeout(self):
        self.server.stop_server()
        await self.setup_stream_manager()
        await self._test_download_error_analytics_on_start(
            DownloadSDTimeoutError,
            f'Failed to download sd blob {self.sd_hash} within timeout.',
            timeout=1)

    async def test_download_data_timeout(self):
        await self.setup_stream_manager()
        with open(os.path.join(self.server_dir, self.sd_hash), 'r') as sdf:
            head_blob_hash = json.loads(sdf.read())['blobs'][0]['blob_hash']
        self.server_blob_manager.delete_blob(head_blob_hash)
        await self._test_download_error_analytics_on_start(
            DownloadDataTimeoutError,
            f'Failed to download data blobs for sd hash {self.sd_hash} within timeout.',
            timeout=1)

    async def test_unexpected_error(self):
        await self.setup_stream_manager()
        err_msg = f"invalid blob directory '{self.client_dir}'"
        shutil.rmtree(self.client_dir)
        await self._test_download_error_analytics_on_start(OSError,
                                                           err_msg,
                                                           timeout=1)
        os.mkdir(self.client_dir)  # so the test cleanup doesn't error

    async def test_non_head_data_timeout(self):
        await self.setup_stream_manager()
        with open(os.path.join(self.server_dir, self.sd_hash), 'r') as sdf:
            last_blob_hash = json.loads(sdf.read())['blobs'][-2]['blob_hash']
        self.server_blob_manager.delete_blob(last_blob_hash)
        self.client_config.blob_download_timeout = 0.1
        stream = await self.stream_manager.download_stream_from_uri(
            self.uri, self.exchange_rate_manager)
        await stream.started_writing.wait()
        self.assertEqual('running', stream.status)
        self.assertIsNotNone(stream.full_path)
        self.assertGreater(stream.written_bytes, 0)
        await stream.finished_write_attempt.wait()
        self.assertEqual('stopped', stream.status)
        self.assertIsNone(stream.full_path)
        self.assertEqual(0, stream.written_bytes)

        self.stream_manager.stop()
        await self.stream_manager.start()
        self.assertEqual(1, len(self.stream_manager.streams))
        stream = list(self.stream_manager.streams.values())[0]
        self.assertEqual('stopped', stream.status)
        self.assertIsNone(stream.full_path)
        self.assertEqual(0, stream.written_bytes)

    async def test_download_then_recover_stream_on_startup(
            self, old_sort=False):
        expected_analytics_events = [
            'Time To First Bytes', 'Download Finished'
        ]
        received_events = []

        async def check_post(event):
            received_events.append(event['event'])

        await self.setup_stream_manager(old_sort=old_sort)
        self.stream_manager.analytics_manager._post = check_post

        self.assertDictEqual(self.stream_manager.streams, {})
        stream = await self.stream_manager.download_stream_from_uri(
            self.uri, self.exchange_rate_manager)
        await stream.finished_writing.wait()
        await asyncio.sleep(0, loop=self.loop)
        self.stream_manager.stop()
        self.client_blob_manager.stop()
        os.remove(
            os.path.join(self.client_blob_manager.blob_dir, stream.sd_hash))
        for blob in stream.descriptor.blobs[:-1]:
            os.remove(
                os.path.join(self.client_blob_manager.blob_dir,
                             blob.blob_hash))
        await self.client_blob_manager.setup()
        await self.stream_manager.start()
        self.assertEqual(1, len(self.stream_manager.streams))
        self.assertListEqual([self.sd_hash],
                             list(self.stream_manager.streams.keys()))
        for blob_hash in [stream.sd_hash] + [
                b.blob_hash for b in stream.descriptor.blobs[:-1]
        ]:
            blob_status = await self.client_storage.get_blob_status(blob_hash)
            self.assertEqual('pending', blob_status)
        self.assertEqual('finished',
                         self.stream_manager.streams[self.sd_hash].status)

        sd_blob = self.client_blob_manager.get_blob(stream.sd_hash)
        self.assertTrue(sd_blob.file_exists)
        self.assertTrue(sd_blob.get_is_verified())
        self.assertListEqual(expected_analytics_events, received_events)

    def test_download_then_recover_old_sort_stream_on_startup(self):
        return self.test_download_then_recover_stream_on_startup(old_sort=True)