コード例 #1
0
ファイル: stream_manager.py プロジェクト: mdpovey/lbry
    async def _download_stream_from_claim(self, node: 'Node', download_directory: str, claim_info: typing.Dict,
                                          file_name: typing.Optional[str] = None) -> typing.Optional[ManagedStream]:

        claim = smart_decode(claim_info['value'])
        downloader = StreamDownloader(self.loop, self.config, self.blob_manager, claim.source_hash.decode(),
                                      download_directory, file_name)
        try:
            downloader.download(node)
            await downloader.got_descriptor.wait()
            log.info("got descriptor %s for %s", claim.source_hash.decode(), claim_info['name'])
        except (asyncio.TimeoutError, asyncio.CancelledError):
            log.info("stream timeout")
            downloader.stop()
            log.info("stopped stream")
            raise DownloadSDTimeout(downloader.sd_hash)
        rowid = await self._store_stream(downloader)
        await self.storage.save_content_claim(
            downloader.descriptor.stream_hash, f"{claim_info['txid']}:{claim_info['nout']}"
        )
        stream = ManagedStream(self.loop, self.blob_manager, rowid, downloader.descriptor, download_directory,
                               file_name, downloader, ManagedStream.STATUS_RUNNING)
        stream.set_claim(claim_info, claim)
        self.streams.add(stream)
        try:
            await stream.downloader.wrote_bytes_event.wait()
            self.wait_for_stream_finished(stream)
            return stream
        except asyncio.CancelledError:
            downloader.stop()
            log.debug("stopped stream")
        await self.stop_stream(stream)
        raise DownloadDataTimeout(downloader.sd_hash)
コード例 #2
0
 async def start_downloader(
         self,
         got_descriptor_time: asyncio.Future,
         downloader: StreamDownloader,
         download_id: str,
         outpoint: str,
         claim: Claim,
         resolved: typing.Dict,
         file_name: typing.Optional[str] = None) -> ManagedStream:
     start_time = self.loop.time()
     downloader.download(self.node)
     await downloader.got_descriptor.wait()
     got_descriptor_time.set_result(self.loop.time() - start_time)
     rowid = await self._store_stream(downloader)
     await self.storage.save_content_claim(
         downloader.descriptor.stream_hash, outpoint)
     stream = ManagedStream(self.loop,
                            self.blob_manager,
                            rowid,
                            downloader.descriptor,
                            self.config.download_dir,
                            file_name,
                            downloader,
                            ManagedStream.STATUS_RUNNING,
                            download_id=download_id)
     stream.set_claim(resolved, claim)
     await stream.downloader.wrote_bytes_event.wait()
     self.streams.add(stream)
     return stream
コード例 #3
0
ファイル: stream_manager.py プロジェクト: EnigmaCurry/lbry
 async def start_stream(self, stream: ManagedStream):
     stream.update_status(ManagedStream.STATUS_RUNNING)
     await self.storage.change_file_status(stream.stream_hash,
                                           ManagedStream.STATUS_RUNNING)
     await stream.setup(self.node, save_file=self.config.save_files)
     self.storage.content_claim_callbacks[
         stream.stream_hash] = lambda: self._update_content_claim(stream)
コード例 #4
0
 async def stop_stream(self, stream: ManagedStream):
     stream.stop_download()
     if not stream.finished and os.path.isfile(stream.full_path):
         try:
             os.remove(stream.full_path)
         except OSError as err:
             log.warning("Failed to delete partial download %s from downloads directory: %s", stream.full_path,
                         str(err))
     if stream.running:
         stream.update_status(ManagedStream.STATUS_STOPPED)
         await self.storage.change_file_status(stream.stream_hash, ManagedStream.STATUS_STOPPED)
コード例 #5
0
 async def delete_stream(self,
                         stream: ManagedStream,
                         delete_file: typing.Optional[bool] = False):
     stream.stop_tasks()
     if stream.sd_hash in self.streams:
         del self.streams[stream.sd_hash]
     blob_hashes = [stream.sd_hash
                    ] + [b.blob_hash for b in stream.descriptor.blobs[:-1]]
     await self.blob_manager.delete_blobs(blob_hashes, delete_from_db=False)
     await self.storage.delete_stream(stream.descriptor)
     if delete_file and stream.output_file_exists:
         os.remove(stream.full_path)
コード例 #6
0
 async def add_stream(self, rowid: int, sd_hash: str,
                      file_name: typing.Optional[str],
                      download_directory: typing.Optional[str], status: str,
                      claim: typing.Optional['StoredStreamClaim'],
                      content_fee: typing.Optional['Transaction']):
     try:
         descriptor = await self.blob_manager.get_stream_descriptor(sd_hash)
     except InvalidStreamDescriptorError as err:
         log.warning("Failed to start stream for sd %s - %s", sd_hash,
                     str(err))
         return
     stream = ManagedStream(self.loop,
                            self.config,
                            self.blob_manager,
                            descriptor.sd_hash,
                            download_directory,
                            file_name,
                            status,
                            claim,
                            content_fee=content_fee,
                            rowid=rowid,
                            descriptor=descriptor,
                            analytics_manager=self.analytics_manager)
     self.streams[sd_hash] = stream
     self.storage.content_claim_callbacks[
         stream.stream_hash] = lambda: self._update_content_claim(stream)
コード例 #7
0
    async def delete_stream(self, stream: ManagedStream, delete_file: typing.Optional[bool] = False):
        stream_finished = False if not stream.finished and stream.downloader\
            else (stream.downloader and stream.downloader.stream_finished_event.is_set())
        if not stream_finished:
            delete_file = True
        stream.stop_download()
        self.streams.remove(stream)
        await self.storage.delete_stream(stream.descriptor)

        blob_hashes = [stream.sd_hash]
        for blob_info in stream.descriptor.blobs[:-1]:
            blob_hashes.append(blob_info.blob_hash)
        await self.blob_manager.delete_blobs(blob_hashes)
        if delete_file:
            path = os.path.join(stream.download_directory, stream.file_name)
            if os.path.isfile(path):
                os.remove(path)
コード例 #8
0
    async def test_create_and_decrypt_one_blob_stream(self,
                                                      blobs=1,
                                                      corrupt=False):
        descriptor = await self.create_stream(blobs)

        # copy blob files
        shutil.copy(
            os.path.join(self.server_blob_manager.blob_dir, self.sd_hash),
            os.path.join(self.client_blob_manager.blob_dir, self.sd_hash))
        self.stream = ManagedStream(self.loop, self.client_config,
                                    self.client_blob_manager, self.sd_hash,
                                    self.client_dir)

        for blob_info in descriptor.blobs[:-1]:
            shutil.copy(
                os.path.join(self.server_blob_manager.blob_dir,
                             blob_info.blob_hash),
                os.path.join(self.client_blob_manager.blob_dir,
                             blob_info.blob_hash))
            if corrupt and blob_info.length == MAX_BLOB_SIZE:
                with open(
                        os.path.join(self.client_blob_manager.blob_dir,
                                     blob_info.blob_hash), "rb+") as handle:
                    handle.truncate()
                    handle.flush()
        await self.stream.setup()
        await self.stream.finished_writing.wait()
        if corrupt:
            return self.assertFalse(
                os.path.isfile(os.path.join(self.client_dir, "test_file")))

        with open(os.path.join(self.client_dir, "test_file"), "rb") as f:
            decrypted = f.read()
        self.assertEqual(decrypted, self.stream_bytes)

        self.assertEqual(
            True,
            self.client_blob_manager.get_blob(self.sd_hash).get_is_verified())
        self.assertEqual(
            True,
            self.client_blob_manager.get_blob(
                self.stream.descriptor.blobs[0].blob_hash).get_is_verified())
コード例 #9
0
 async def add_stream(self, sd_hash: str, file_name: str, download_directory: str, status: str, claim):
     sd_blob = self.blob_manager.get_blob(sd_hash)
     if sd_blob.get_is_verified():
         descriptor = await self.blob_manager.get_stream_descriptor(sd_blob.blob_hash)
         downloader = self.make_downloader(descriptor.sd_hash, download_directory, file_name)
         stream = ManagedStream(
             self.loop, self.blob_manager, descriptor,
             download_directory,
             file_name,
             downloader, status, claim
         )
         self.streams.add(stream)
         self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
コード例 #10
0
    async def start_stream(self, stream: ManagedStream) -> bool:
        """
        Resume or rebuild a partial or completed stream
        """

        path = os.path.join(stream.download_directory, stream.file_name)

        if not stream.running and not os.path.isfile(path):
            if stream.downloader:
                stream.downloader.stop()
                stream.downloader = None

            # the directory is gone, can happen when the folder that contains a published file is deleted
            # reset the download directory to the default and update the file name
            if not os.path.isdir(stream.download_directory):
                stream.download_directory = self.config.download_dir

            stream.downloader = self.make_downloader(
                stream.sd_hash, stream.download_directory, stream.descriptor.suggested_file_name
            )
            if stream.status != ManagedStream.STATUS_FINISHED:
                await self.storage.change_file_status(stream.stream_hash, 'running')
                stream.update_status('running')
            stream.start_download(self.node)
            try:
                await asyncio.wait_for(self.loop.create_task(stream.downloader.got_descriptor.wait()),
                                       self.config.download_timeout)
            except asyncio.TimeoutError:
                stream.stop_download()
                stream.downloader = None
                return False
            file_name = os.path.basename(stream.downloader.output_path)
            await self.storage.change_file_download_dir_and_file_name(
                stream.stream_hash, self.config.download_dir, file_name
            )
            self.wait_for_stream_finished(stream)
            return True
        return True
コード例 #11
0
    async def add_stream(self, sd_hash: str, file_name: str, download_directory: str, status: str, claim):
        sd_blob = self.blob_manager.get_blob(sd_hash)
        if sd_blob.get_is_verified():
            try:
                descriptor = await self.blob_manager.get_stream_descriptor(sd_blob.blob_hash)
            except InvalidStreamDescriptorError as err:
                log.warning("Failed to start stream for sd %s - %s", sd_hash, str(err))
                return

            downloader = self.make_downloader(descriptor.sd_hash, download_directory, file_name)
            stream = ManagedStream(
                self.loop, self.blob_manager, descriptor, download_directory, file_name, downloader, status, claim
            )
            self.streams.add(stream)
            self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
コード例 #12
0
 async def load_streams_from_database(self):
     infos = await self.storage.get_all_lbry_files()
     for file_info in infos:
         sd_blob = self.blob_manager.get_blob(file_info['sd_hash'])
         if sd_blob.get_is_verified():
             descriptor = await self.blob_manager.get_stream_descriptor(sd_blob.blob_hash)
             downloader = StreamDownloader(
                 self.loop, self.blob_manager, descriptor.sd_hash, self.peer_timeout,
                 self.peer_connect_timeout, binascii.unhexlify(file_info['download_directory']).decode(),
                 binascii.unhexlify(file_info['file_name']).decode(), self.fixed_peers
             )
             stream = ManagedStream(
                 self.loop, self.blob_manager, descriptor,
                 binascii.unhexlify(file_info['download_directory']).decode(),
                 binascii.unhexlify(file_info['file_name']).decode(),
                 downloader, file_info['status'], file_info['claim']
             )
             self.streams.add(stream)
コード例 #13
0
ファイル: stream_manager.py プロジェクト: mdpovey/lbry
 async def add_stream(self, rowid: int, sd_hash: str, file_name: str, download_directory: str, status: str,
                      claim: typing.Optional['StoredStreamClaim']):
     sd_blob = self.blob_manager.get_blob(sd_hash)
     if not sd_blob.get_is_verified():
         return
     try:
         descriptor = await self.blob_manager.get_stream_descriptor(sd_blob.blob_hash)
     except InvalidStreamDescriptorError as err:
         log.warning("Failed to start stream for sd %s - %s", sd_hash, str(err))
         return
     if status == ManagedStream.STATUS_RUNNING:
         downloader = self.make_downloader(descriptor.sd_hash, download_directory, file_name)
     else:
         downloader = None
     stream = ManagedStream(
         self.loop, self.blob_manager, rowid, descriptor, download_directory, file_name, downloader, status, claim
     )
     self.streams.add(stream)
     self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
コード例 #14
0
    async def _download_stream_from_claim(self, node: 'Node', download_directory: str, claim_info: typing.Dict,
                                          file_name: typing.Optional[str] = None) -> typing.Optional[ManagedStream]:

        claim = ClaimDict.load_dict(claim_info['value'])
        downloader = StreamDownloader(self.loop, self.blob_manager, claim.source_hash.decode(), self.peer_timeout,
                                      self.peer_connect_timeout, download_directory, file_name, self.fixed_peers)
        try:
            downloader.download(node)
            await downloader.got_descriptor.wait()
            log.info("got descriptor %s for %s", claim.source_hash.decode(), claim_info['name'])
        except (asyncio.TimeoutError, asyncio.CancelledError):
            log.info("stream timeout")
            await downloader.stop()
            log.info("stopped stream")
            return
        if not await self.blob_manager.storage.stream_exists(downloader.sd_hash):
            await self.blob_manager.storage.store_stream(downloader.sd_blob, downloader.descriptor)
        if not await self.blob_manager.storage.file_exists(downloader.sd_hash):
            await self.blob_manager.storage.save_downloaded_file(
                downloader.descriptor.stream_hash, os.path.basename(downloader.output_path), download_directory,
                0.0
            )
        await self.blob_manager.storage.save_content_claim(
            downloader.descriptor.stream_hash, f"{claim_info['txid']}:{claim_info['nout']}"
        )

        stored_claim = StoredStreamClaim(
            downloader.descriptor.stream_hash, f"{claim_info['txid']}:{claim_info['nout']}", claim_info['claim_id'],
            claim_info['name'], claim_info['amount'], claim_info['height'], claim_info['hex'],
            claim.certificate_id, claim_info['address'], claim_info['claim_sequence'],
            claim_info.get('channel_name')
        )
        stream = ManagedStream(self.loop, self.blob_manager, downloader.descriptor, download_directory,
                               os.path.basename(downloader.output_path), downloader, ManagedStream.STATUS_RUNNING,
                               stored_claim)
        self.streams.add(stream)
        try:
            await stream.downloader.wrote_bytes_event.wait()
            self.wait_for_stream_finished(stream)
            return stream
        except asyncio.CancelledError:
            await downloader.stop()
コード例 #15
0
 async def _update_content_claim(self, stream: ManagedStream):
     claim_info = await self.storage.get_content_claim(stream.stream_hash)
     stream.set_claim(claim_info, claim_info['value'])
コード例 #16
0
class TestManagedStream(BlobExchangeTestBase):
    async def create_stream(self, blob_count: int = 10):
        self.stream_bytes = b''
        for _ in range(blob_count):
            self.stream_bytes += os.urandom((MAX_BLOB_SIZE - 1))
        # create the stream
        file_path = os.path.join(self.server_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(self.stream_bytes)
        descriptor = await StreamDescriptor.create_stream(
            self.loop, self.server_blob_manager.blob_dir, file_path)
        self.sd_hash = descriptor.calculate_sd_hash()
        return descriptor

    async def setup_stream(self, blob_count: int = 10):
        await self.create_stream(blob_count)
        self.stream = ManagedStream(self.loop, self.client_config,
                                    self.client_blob_manager, self.sd_hash,
                                    self.client_dir)

    async def _test_transfer_stream(self,
                                    blob_count: int,
                                    mock_accumulate_peers=None):
        await self.setup_stream(blob_count)
        mock_node = mock.Mock(spec=Node)

        def _mock_accumulate_peers(q1, q2):
            async def _task():
                pass

            q2.put_nowait([self.server_from_client])
            return q2, self.loop.create_task(_task())

        mock_node.accumulate_peers = mock_accumulate_peers or _mock_accumulate_peers
        await self.stream.setup(mock_node, save_file=True)
        await self.stream.finished_writing.wait()
        self.assertTrue(os.path.isfile(self.stream.full_path))
        self.stream.stop_download()
        self.assertTrue(os.path.isfile(self.stream.full_path))
        with open(self.stream.full_path, 'rb') as f:
            self.assertEqual(f.read(), self.stream_bytes)
        await asyncio.sleep(0.01)

    async def test_transfer_stream(self):
        await self._test_transfer_stream(10)

    @unittest.SkipTest
    async def test_transfer_hundred_blob_stream(self):
        await self._test_transfer_stream(100)

    async def test_transfer_stream_bad_first_peer_good_second(self):
        await self.setup_stream(2)

        mock_node = mock.Mock(spec=Node)
        q = asyncio.Queue()

        bad_peer = KademliaPeer(self.loop,
                                "127.0.0.1",
                                b'2' * 48,
                                tcp_port=3334)

        def _mock_accumulate_peers(q1, q2):
            async def _task():
                pass

            q2.put_nowait([bad_peer])
            self.loop.call_later(1, q2.put_nowait, [self.server_from_client])
            return q2, self.loop.create_task(_task())

        mock_node.accumulate_peers = _mock_accumulate_peers

        await self.stream.setup(mock_node, save_file=True)
        await self.stream.finished_writing.wait()
        self.assertTrue(os.path.isfile(self.stream.full_path))
        with open(self.stream.full_path, 'rb') as f:
            self.assertEqual(f.read(), self.stream_bytes)
        # self.assertIs(self.server_from_client.tcp_last_down, None)
        # self.assertIsNot(bad_peer.tcp_last_down, None)

    async def test_client_chunked_response(self):
        self.server.stop_server()

        class ChunkedServerProtocol(BlobServerProtocol):
            def send_response(self, responses):
                to_send = []
                while responses:
                    to_send.append(responses.pop())
                for byte in BlobResponse(to_send).serialize():
                    self.transport.write(bytes([byte]))

        self.server.server_protocol_class = ChunkedServerProtocol
        self.server.start_server(33333, '127.0.0.1')
        self.assertEqual(0,
                         len(self.client_blob_manager.completed_blob_hashes))
        await asyncio.wait_for(self._test_transfer_stream(10), timeout=2)
        self.assertEqual(11,
                         len(self.client_blob_manager.completed_blob_hashes))

    async def test_create_and_decrypt_one_blob_stream(self,
                                                      blobs=1,
                                                      corrupt=False):
        descriptor = await self.create_stream(blobs)

        # copy blob files
        shutil.copy(
            os.path.join(self.server_blob_manager.blob_dir, self.sd_hash),
            os.path.join(self.client_blob_manager.blob_dir, self.sd_hash))
        self.stream = ManagedStream(self.loop, self.client_config,
                                    self.client_blob_manager, self.sd_hash,
                                    self.client_dir)

        for blob_info in descriptor.blobs[:-1]:
            shutil.copy(
                os.path.join(self.server_blob_manager.blob_dir,
                             blob_info.blob_hash),
                os.path.join(self.client_blob_manager.blob_dir,
                             blob_info.blob_hash))
            if corrupt and blob_info.length == MAX_BLOB_SIZE:
                with open(
                        os.path.join(self.client_blob_manager.blob_dir,
                                     blob_info.blob_hash), "rb+") as handle:
                    handle.truncate()
                    handle.flush()
        await self.stream.setup()
        await self.stream.finished_writing.wait()
        if corrupt:
            return self.assertFalse(
                os.path.isfile(os.path.join(self.client_dir, "test_file")))

        with open(os.path.join(self.client_dir, "test_file"), "rb") as f:
            decrypted = f.read()
        self.assertEqual(decrypted, self.stream_bytes)

        self.assertEqual(
            True,
            self.client_blob_manager.get_blob(self.sd_hash).get_is_verified())
        self.assertEqual(
            True,
            self.client_blob_manager.get_blob(
                self.stream.descriptor.blobs[0].blob_hash).get_is_verified())
        #
        # # its all blobs + sd blob - last blob, which is the same size as descriptor.blobs
        # self.assertEqual(len(descriptor.blobs), len(await downloader_storage.get_all_finished_blobs()))
        # self.assertEqual(
        #     [descriptor.sd_hash, descriptor.blobs[0].blob_hash], await downloader_storage.get_blobs_to_announce()
        # )
        #
        # await downloader_storage.close()
        # await self.storage.close()

    async def test_create_and_decrypt_multi_blob_stream(self):
        await self.test_create_and_decrypt_one_blob_stream(10)
コード例 #17
0
    async def download_stream_from_uri(
            self,
            uri,
            exchange_rate_manager: 'ExchangeRateManager',
            timeout: typing.Optional[float] = None,
            file_name: typing.Optional[str] = None,
            download_directory: typing.Optional[str] = None,
            save_file: typing.Optional[bool] = None,
            resolve_timeout: float = 3.0) -> ManagedStream:
        timeout = timeout or self.config.download_timeout
        start_time = self.loop.time()
        resolved_time = None
        stream = None
        error = None
        outpoint = None
        if save_file is None:
            save_file = self.config.save_files
        if file_name and not save_file:
            save_file = True
        if save_file:
            download_directory = download_directory or self.config.download_dir
        else:
            download_directory = None

        try:
            # resolve the claim
            if not URL.parse(uri).has_stream:
                raise ResolveError(
                    "cannot download a channel claim, specify a /path")
            try:
                resolved_result = self._convert_to_old_resolve_output(
                    await asyncio.wait_for(self.wallet.ledger.resolve([uri]),
                                           resolve_timeout))
            except asyncio.TimeoutError:
                raise ResolveTimeout(uri)
            await self.storage.save_claims_for_resolve([
                value for value in resolved_result.values()
                if 'error' not in value
            ])
            resolved = resolved_result.get(uri, {})
            resolved = resolved if 'value' in resolved else resolved.get(
                'claim')
            if not resolved:
                raise ResolveError(f"Failed to resolve stream at '{uri}'")
            if 'error' in resolved:
                raise ResolveError(
                    f"error resolving stream: {resolved['error']}")

            claim = Claim.from_bytes(binascii.unhexlify(resolved['protobuf']))
            outpoint = f"{resolved['txid']}:{resolved['nout']}"
            resolved_time = self.loop.time() - start_time

            # resume or update an existing stream, if the stream changed download it and delete the old one after
            updated_stream, to_replace = await self._check_update_or_replace(
                outpoint, resolved['claim_id'], claim)
            if updated_stream:
                log.info("already have stream for %s", uri)
                if save_file and updated_stream.output_file_exists:
                    save_file = False
                await updated_stream.start(node=self.node,
                                           timeout=timeout,
                                           save_now=save_file)
                if not updated_stream.output_file_exists and (
                        save_file or file_name or download_directory):
                    await updated_stream.save_file(
                        file_name=file_name,
                        download_directory=download_directory,
                        node=self.node)
                return updated_stream

            content_fee = None
            fee_amount, fee_address = None, None

            # check that the fee is payable
            if not to_replace and claim.stream.has_fee:
                fee_amount = round(
                    exchange_rate_manager.convert_currency(
                        claim.stream.fee.currency, "LBC",
                        claim.stream.fee.amount), 5)
                max_fee_amount = round(
                    exchange_rate_manager.convert_currency(
                        self.config.max_key_fee['currency'], "LBC",
                        Decimal(self.config.max_key_fee['amount'])), 5)
                if fee_amount > max_fee_amount:
                    msg = f"fee of {fee_amount} exceeds max configured to allow of {max_fee_amount}"
                    log.warning(msg)
                    raise KeyFeeAboveMaxAllowed(msg)
                balance = await self.wallet.default_account.get_balance()
                if lbc_to_dewies(str(fee_amount)) > balance:
                    msg = f"fee of {fee_amount} exceeds max available balance"
                    log.warning(msg)
                    raise InsufficientFundsError(msg)
                fee_address = claim.stream.fee.address

            stream = ManagedStream(self.loop,
                                   self.config,
                                   self.blob_manager,
                                   claim.stream.source.sd_hash,
                                   download_directory,
                                   file_name,
                                   ManagedStream.STATUS_RUNNING,
                                   content_fee=content_fee,
                                   analytics_manager=self.analytics_manager)
            log.info("starting download for %s", uri)

            before_download = self.loop.time()
            await stream.start(self.node, timeout)
            stream.set_claim(resolved, claim)
            if to_replace:  # delete old stream now that the replacement has started downloading
                await self.delete_stream(to_replace)
            elif fee_address:
                stream.content_fee = await self.wallet.send_amount_to_address(
                    lbc_to_dewies(str(fee_amount)),
                    fee_address.encode('latin1'))
                log.info("paid fee of %s for %s", fee_amount, uri)
                await self.storage.save_content_fee(stream.stream_hash,
                                                    stream.content_fee)

            self.streams[stream.sd_hash] = stream
            self.storage.content_claim_callbacks[
                stream.stream_hash] = lambda: self._update_content_claim(stream
                                                                         )
            await self.storage.save_content_claim(stream.stream_hash, outpoint)
            if save_file:
                await asyncio.wait_for(stream.save_file(node=self.node),
                                       timeout -
                                       (self.loop.time() - before_download),
                                       loop=self.loop)
            return stream
        except asyncio.TimeoutError:
            error = DownloadDataTimeout(stream.sd_hash)
            raise error
        except Exception as err:  # forgive data timeout, dont delete stream
            error = err
            raise
        finally:
            if self.analytics_manager and (
                    error or (stream and
                              (stream.downloader.time_to_descriptor
                               or stream.downloader.time_to_first_bytes))):
                self.loop.create_task(
                    self.analytics_manager.send_time_to_first_bytes(
                        resolved_time,
                        self.loop.time() - start_time,
                        None if not stream else stream.download_id, uri,
                        outpoint, None if not stream else len(
                            stream.downloader.blob_downloader.
                            active_connections), None if not stream else
                        len(stream.downloader.blob_downloader.scores), False
                        if not stream else stream.downloader.added_fixed_peers,
                        self.config.fixed_peer_delay
                        if not stream else stream.downloader.fixed_peers_delay,
                        None if not stream else stream.sd_hash, None if
                        not stream else stream.downloader.time_to_descriptor,
                        None if not (stream and stream.descriptor) else
                        stream.descriptor.blobs[0].blob_hash,
                        None if not (stream and stream.descriptor) else
                        stream.descriptor.blobs[0].length, None if not stream
                        else stream.downloader.time_to_first_bytes,
                        None if not error else error.__class__.__name__))
コード例 #18
0
ファイル: test_managed_stream.py プロジェクト: woolf-wen/lbry
 async def setup_stream(self, blob_count: int = 10):
     await self.create_stream(blob_count)
     self.stream = ManagedStream(self.loop, self.client_config,
                                 self.client_blob_manager, self.sd_hash,
                                 self.client_dir)