Ejemplo n.º 1
0
 async def setup_blob_manager(self, save_blobs=True):
     tmp_dir = tempfile.mkdtemp()
     self.addCleanup(lambda: shutil.rmtree(tmp_dir))
     self.config = Config(save_blobs=save_blobs)
     self.storage = SQLiteStorage(self.config, os.path.join(tmp_dir, "lbrynet.sqlite"))
     self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage, self.config)
     await self.storage.open()
Ejemplo n.º 2
0
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()

        self.client_dir = tempfile.mkdtemp()
        self.server_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, self.client_dir)
        self.addCleanup(shutil.rmtree, self.server_dir)
        self.server_config = Config(data_dir=self.server_dir, download_dir=self.server_dir, wallet=self.server_dir,
                                    reflector_servers=[])
        self.server_storage = SQLiteStorage(self.server_config, os.path.join(self.server_dir, "lbrynet.sqlite"))
        self.server_blob_manager = BlobManager(self.loop, self.server_dir, self.server_storage, self.server_config)
        self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')

        self.client_config = Config(data_dir=self.client_dir, download_dir=self.client_dir, wallet=self.client_dir,
                                    reflector_servers=[])
        self.client_storage = SQLiteStorage(self.client_config, os.path.join(self.client_dir, "lbrynet.sqlite"))
        self.client_blob_manager = BlobManager(self.loop, self.client_dir, self.client_storage, self.client_config)
        self.client_peer_manager = PeerManager(self.loop)
        self.server_from_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)

        await self.client_storage.open()
        await self.server_storage.open()
        await self.client_blob_manager.setup()
        await self.server_blob_manager.setup()

        self.server.start_server(33333, '127.0.0.1')
        self.addCleanup(self.server.stop_server)
        await self.server.started_listening.wait()
Ejemplo n.º 3
0
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()
        self.key = b'deadbeef' * 4
        self.cleartext = os.urandom(20000000)

        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        self.storage = SQLiteStorage(self.conf, os.path.join(tmp_dir, "lbrynet.sqlite"))
        await self.storage.open()
        self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage, self.conf)
        self.stream_manager = StreamManager(self.loop, Config(), self.blob_manager, None, self.storage, None)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(server_tmp_dir))
        self.server_conf = Config()
        self.server_storage = SQLiteStorage(self.server_conf, os.path.join(server_tmp_dir, "lbrynet.sqlite"))
        await self.server_storage.open()
        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir, self.server_storage, self.server_conf)

        download_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(download_dir))

        # create the stream
        file_path = os.path.join(tmp_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(self.cleartext)

        self.stream = await self.stream_manager.create_stream(file_path)
Ejemplo n.º 4
0
async def main(blob_hash: str, url: str):
    conf = Config()
    loop = asyncio.get_running_loop()
    host_url, port = url.split(":")
    try:
        host = None
        if ipaddress.ip_address(host_url):
            host = host_url
    except ValueError:
        host = None
    if not host:
        host_info = await loop.getaddrinfo(
            host_url, 'https',
            proto=socket.IPPROTO_TCP,
        )
        host = host_info[0][4][0]

    storage = SQLiteStorage(conf, os.path.join(conf.data_dir, "lbrynet.sqlite"))
    blob_manager = BlobManager(loop, os.path.join(conf.data_dir, "blobfiles"), storage)
    await storage.open()
    await blob_manager.setup()

    blob = blob_manager.get_blob(blob_hash)
    success, keep = await request_blob(loop, blob, host, int(port), conf.peer_connect_timeout,
                                       conf.blob_download_timeout)
    print(f"{'downloaded' if success else 'failed to download'} {blob_hash} from {host}:{port}\n"
          f"keep connection: {keep}")
    if blob.get_is_verified():
        await blob_manager.delete_blobs([blob.blob_hash])
        print(f"deleted {blob_hash}")
Ejemplo n.º 5
0
    async def test_old_key_sort_sd_blob(self):
        loop = asyncio.get_event_loop()
        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        storage = SQLiteStorage(self.conf, ":memory:")
        await storage.open()
        blob_manager = BlobManager(loop, tmp_dir, storage, self.conf)

        sd_bytes = b'{"stream_name": "4f62616d6120446f6e6b65792d322e73746c", "blobs": [{"length": 1153488, "blob_num' \
                   b'": 0, "blob_hash": "9fa32a249ce3f2d4e46b78599800f368b72f2a7f22b81df443c7f6bdbef496bd61b4c0079c7' \
                   b'3d79c8bb9be9a6bf86592", "iv": "0bf348867244019c9e22196339016ea6"}, {"length": 0, "blob_num": 1,' \
                   b' "iv": "9f36abae16955463919b07ed530a3d18"}], "stream_type": "lbryfile", "key": "a03742b87628aa7' \
                   b'228e48f1dcd207e48", "suggested_file_name": "4f62616d6120446f6e6b65792d322e73746c", "stream_hash' \
                   b'": "b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e1' \
                   b'60207"}'
        sd_hash = '9313d1807551186126acc3662e74d9de29cede78d4f133349ace846273ef116b9bb86be86c54509eb84840e4b032f6b2'
        stream_hash = 'b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e160207'

        blob = blob_manager.get_blob(sd_hash)
        blob.set_length(len(sd_bytes))
        writer = blob.get_blob_writer()
        writer.write(sd_bytes)
        await blob.verified.wait()
        descriptor = await StreamDescriptor.from_stream_descriptor_blob(
            loop, blob_manager.blob_dir, blob
        )
        self.assertEqual(stream_hash, descriptor.get_stream_hash())
        self.assertEqual(sd_hash, descriptor.calculate_old_sort_sd_hash())
        self.assertNotEqual(sd_hash, descriptor.calculate_sd_hash())
Ejemplo n.º 6
0
    async def test_host_same_blob_to_multiple_peers_at_once(self):
        blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
        mock_blob_bytes = b'1' * ((2 * 2**20) - 1)

        second_client_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, second_client_dir)
        second_client_conf = Config()
        second_client_storage = SQLiteStorage(
            second_client_conf,
            os.path.join(second_client_dir, "lbrynet.sqlite"))
        second_client_blob_manager = BlobManager(self.loop, second_client_dir,
                                                 second_client_storage,
                                                 second_client_conf)
        server_from_second_client = make_kademlia_peer(b'1' * 48,
                                                       "127.0.0.1",
                                                       tcp_port=33333,
                                                       allow_localhost=True)

        await second_client_storage.open()
        await second_client_blob_manager.setup()

        await self._add_blob_to_server(blob_hash, mock_blob_bytes)

        second_client_blob = second_client_blob_manager.get_blob(blob_hash)

        # download the blob
        await asyncio.gather(
            request_blob(self.loop, second_client_blob,
                         server_from_second_client.address,
                         server_from_second_client.tcp_port, 2, 3),
            self._test_transfer_blob(blob_hash))
        await second_client_blob.verified.wait()
        self.assertTrue(second_client_blob.get_is_verified())
Ejemplo n.º 7
0
class TestBlobManager(AsyncioTestCase):
    async def setup_blob_manager(self, save_blobs=True):
        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.config = Config(save_blobs=save_blobs)
        self.storage = SQLiteStorage(self.config, os.path.join(tmp_dir, "lbrynet.sqlite"))
        self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage, self.config)
        await self.storage.open()

    async def test_memory_blobs_arent_verifie_but_real_ones_are(self):
        for save_blobs in (False, True):
            await self.setup_blob_manager(save_blobs=save_blobs)
            # add a blob file
            blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
            blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
            blob = self.blob_manager.get_blob(blob_hash, len(blob_bytes))
            blob.save_verified_blob(blob_bytes)
            self.assertTrue(blob.get_is_verified())
            self.blob_manager.blob_completed(blob)
            self.assertEqual(self.blob_manager.is_blob_verified(blob_hash), save_blobs)

    async def test_sync_blob_file_manager_on_startup(self):
        await self.setup_blob_manager(save_blobs=True)

        # add a blob file
        blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
        blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
        with open(os.path.join(self.blob_manager.blob_dir, blob_hash), 'wb') as f:
            f.write(blob_bytes)

        # it should not have been added automatically on startup

        await self.blob_manager.setup()
        self.assertSetEqual(self.blob_manager.completed_blob_hashes, set())

        # make sure we can add the blob
        await self.blob_manager.blob_completed(self.blob_manager.get_blob(blob_hash, len(blob_bytes)))
        self.assertSetEqual(self.blob_manager.completed_blob_hashes, {blob_hash})

        # stop the blob manager and restart it, make sure the blob is there
        self.blob_manager.stop()
        self.assertSetEqual(self.blob_manager.completed_blob_hashes, set())
        await self.blob_manager.setup()
        self.assertSetEqual(self.blob_manager.completed_blob_hashes, {blob_hash})

        # test that the blob is removed upon the next startup after the file being manually deleted
        self.blob_manager.stop()

        # manually delete the blob file and restart the blob manager
        os.remove(os.path.join(self.blob_manager.blob_dir, blob_hash))
        await self.blob_manager.setup()
        self.assertSetEqual(self.blob_manager.completed_blob_hashes, set())

        # check that the deleted blob was updated in the database
        self.assertEqual(
            'pending', (
                await self.storage.run_and_return_one_or_none('select status from blob where blob_hash=?', blob_hash)
            )
        )
Ejemplo n.º 8
0
 async def start(self):
     storage = self.component_manager.get_component(DATABASE_COMPONENT)
     data_store = None
     if DHT_COMPONENT not in self.component_manager.skip_components:
         dht_node: Node = self.component_manager.get_component(DHT_COMPONENT)
         if dht_node:
             data_store = dht_node.protocol.data_store
     blob_dir = os.path.join(self.conf.data_dir, 'blobfiles')
     if not os.path.isdir(blob_dir):
         os.mkdir(blob_dir)
     self.blob_manager = BlobManager(self.component_manager.loop, blob_dir, storage, self.conf, data_store)
     return await self.blob_manager.setup()
Ejemplo n.º 9
0
    async def asyncSetUp(self):
        await super().asyncSetUp()

        logging.getLogger('lbry.blob_exchange').setLevel(self.VERBOSITY)
        logging.getLogger('lbry.daemon').setLevel(self.VERBOSITY)
        logging.getLogger('lbry.stream').setLevel(self.VERBOSITY)
        logging.getLogger('lbry.wallet').setLevel(self.VERBOSITY)

        self.daemon = await self.add_daemon(self.wallet_node)

        await self.account.ensure_address_gap()
        address = (await self.account.receiving.get_addresses(limit=1, only_usable=True))[0]
        sendtxid = await self.blockchain.send_to_address(address, 10)
        await self.confirm_tx(sendtxid)
        await self.generate(5)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, server_tmp_dir)
        self.server_config = Config()
        self.server_config.transaction_cache_size = 10000
        self.server_storage = SQLiteStorage(self.server_config, ':memory:')
        await self.server_storage.open()

        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir, self.server_storage, self.server_config)
        self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
        self.server.start_server(5567, '127.0.0.1')
        await self.server.started_listening.wait()

        self.reflector = ReflectorServer(self.server_blob_manager)
        self.reflector.start_server(5566, '127.0.0.1')
        await self.reflector.started_listening.wait()
        self.addCleanup(self.reflector.stop_server)
Ejemplo n.º 10
0
    async def test_host_different_blobs_to_multiple_peers_at_once(self):
        blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
        mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)

        sd_hash = "3e2706157a59aaa47ef52bc264fce488078b4026c0b9bab649a8f2fe1ecc5e5cad7182a2bb7722460f856831a1ac0f02"
        mock_sd_blob_bytes = b"""{"blobs": [{"blob_hash": "6f53c72de100f6f007aa1b9720632e2d049cc6049e609ad790b556dba262159f739d5a14648d5701afc84b991254206a", "blob_num": 0, "iv": "3b6110c2d8e742bff66e4314863dee7e", "length": 2097152}, {"blob_hash": "18493bc7c5164b00596153859a0faffa45765e47a6c3f12198a4f7be4658111505b7f8a15ed0162306a0672c4a9b505d", "blob_num": 1, "iv": "df973fa64e73b4ff2677d682cdc32d3e", "length": 2097152}, {"blob_num": 2, "iv": "660d2dc2645da7c7d4540a466fcb0c60", "length": 0}], "key": "6465616462656566646561646265656664656164626565666465616462656566", "stream_hash": "22423c6786584974bd6b462af47ecb03e471da0ef372fe85a4e71a78bef7560c4afb0835c689f03916105404653b7bdf", "stream_name": "746573745f66696c65", "stream_type": "lbryfile", "suggested_file_name": "746573745f66696c65"}"""

        second_client_dir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, second_client_dir)
        second_client_conf = Config()

        second_client_storage = SQLiteStorage(second_client_conf, os.path.join(second_client_dir, "lbrynet.sqlite"))
        second_client_blob_manager = BlobManager(
            self.loop, second_client_dir, second_client_storage, second_client_conf
        )
        server_from_second_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)

        await second_client_storage.open()
        await second_client_blob_manager.setup()

        await self._add_blob_to_server(blob_hash, mock_blob_bytes)
        await self._add_blob_to_server(sd_hash, mock_sd_blob_bytes)

        second_client_blob = self.client_blob_manager.get_blob(blob_hash)

        await asyncio.gather(
            request_blob(
                self.loop, second_client_blob, server_from_second_client.address,
                server_from_second_client.tcp_port, 2, 3
            ),
            self._test_transfer_blob(sd_hash),
            second_client_blob.verified.wait()
        )
        self.assertTrue(second_client_blob.get_is_verified())
Ejemplo n.º 11
0
 async def asyncSetUp(self):
     self.conf = Config()
     self.storage = SQLiteStorage(self.conf, ':memory:')
     self.blob_dir = tempfile.mkdtemp()
     self.addCleanup(shutil.rmtree, self.blob_dir)
     self.blob_manager = BlobManager(asyncio.get_event_loop(), self.blob_dir, self.storage, self.conf)
     await self.storage.open()
Ejemplo n.º 12
0
 async def asyncSetUp(self):
     self.tmp_dir = tempfile.mkdtemp()
     self.addCleanup(lambda: shutil.rmtree(self.tmp_dir))
     self.loop = asyncio.get_running_loop()
     self.config = Config()
     self.storage = SQLiteStorage(self.config, ":memory:", self.loop)
     self.blob_manager = BlobManager(self.loop, self.tmp_dir, self.storage, self.config)
     await self.storage.open()
Ejemplo n.º 13
0
class BlobComponent(Component):
    component_name = BLOB_COMPONENT
    depends_on = [DATABASE_COMPONENT]

    def __init__(self, component_manager):
        super().__init__(component_manager)
        self.blob_manager: typing.Optional[BlobManager] = None

    @property
    def component(self) -> typing.Optional[BlobManager]:
        return self.blob_manager

    async def start(self):
        storage = self.component_manager.get_component(DATABASE_COMPONENT)
        data_store = None
        if DHT_COMPONENT not in self.component_manager.skip_components:
            dht_node: Node = self.component_manager.get_component(
                DHT_COMPONENT)
            if dht_node:
                data_store = dht_node.protocol.data_store
        blob_dir = os.path.join(self.conf.data_dir, 'blobfiles')
        if not os.path.isdir(blob_dir):
            os.mkdir(blob_dir)
        self.blob_manager = BlobManager(self.component_manager.loop, blob_dir,
                                        storage, self.conf, data_store)
        return await self.blob_manager.setup()

    async def stop(self):
        self.blob_manager.stop()

    async def get_status(self):
        count = 0
        if self.blob_manager:
            count = len(self.blob_manager.completed_blob_hashes)
        return {
            'finished_blobs': count,
            'connections': {} if not self.blob_manager else
            self.blob_manager.connection_manager.status
        }
Ejemplo n.º 14
0
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()
        self.key = b'deadbeef' * 4
        self.cleartext = os.urandom(20000000)
        self.tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(self.tmp_dir))
        self.conf = Config()
        self.storage = SQLiteStorage(self.conf, ":memory:")
        await self.storage.open()
        self.blob_manager = BlobManager(self.loop, self.tmp_dir, self.storage, self.conf)

        self.file_path = os.path.join(self.tmp_dir, "test_file")
        with open(self.file_path, 'wb') as f:
            f.write(self.cleartext)

        self.descriptor = await StreamDescriptor.create_stream(self.loop, self.tmp_dir, self.file_path, key=self.key)
        self.sd_hash = self.descriptor.calculate_sd_hash()
        self.sd_dict = json.loads(self.descriptor.as_json())
Ejemplo n.º 15
0
async def main(address: str):
    try:
        decode_address(address)
    except:
        print(f"'{address}' is not a valid lbrycrd address")
        return 1
    loop = asyncio.get_running_loop()

    storage = SQLiteStorage(os.path.expanduser("~/.lbrynet/lbrynet.sqlite"))
    await storage.open()
    blob_manager = BlobManager(loop, os.path.expanduser("~/.lbrynet/blobfiles"), storage)
    await blob_manager.setup()

    server = await loop.create_server(
        lambda: BlobServer(loop, blob_manager, address),
        '0.0.0.0', 4444)
    try:
        async with server:
            await server.serve_forever()
    finally:
        await storage.close()
Ejemplo n.º 16
0
class TestReflector(AsyncioTestCase):
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()
        self.key = b'deadbeef' * 4
        self.cleartext = os.urandom(20000000)

        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        self.storage = SQLiteStorage(self.conf,
                                     os.path.join(tmp_dir, "lbrynet.sqlite"))
        await self.storage.open()
        self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage,
                                        self.conf)
        self.addCleanup(self.blob_manager.stop)
        self.stream_manager = StreamManager(self.loop, Config(),
                                            self.blob_manager, None,
                                            self.storage, None)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(server_tmp_dir))
        self.server_conf = Config()
        self.server_storage = SQLiteStorage(
            self.server_conf, os.path.join(server_tmp_dir, "lbrynet.sqlite"))
        await self.server_storage.open()
        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir,
                                               self.server_storage,
                                               self.server_conf)
        self.addCleanup(self.server_blob_manager.stop)

        download_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(download_dir))

        # create the stream
        file_path = os.path.join(tmp_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(self.cleartext)
        self.stream_manager.config.reflect_streams = False
        self.stream = await self.stream_manager.create(file_path)

    async def _test_reflect_stream(self,
                                   response_chunk_size=50,
                                   partial_needs=False):
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=response_chunk_size,
                                    partial_needs=partial_needs)
        reflector.start_server(5566, '127.0.0.1')
        if partial_needs:
            server_blob = self.server_blob_manager.get_blob(
                self.stream.sd_hash)
            client_blob = self.blob_manager.get_blob(self.stream.sd_hash)
            with client_blob.reader_context() as handle:
                server_blob.set_length(client_blob.get_length())
                writer = server_blob.get_blob_writer('nobody', 0)
                writer.write(handle.read())
            self.server_blob_manager.blob_completed(server_blob)
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        self.assertEqual(0, self.stream.reflector_progress)
        sent = await self.stream.upload_to_reflector('127.0.0.1', 5566)
        self.assertEqual(100, self.stream.reflector_progress)
        if partial_needs:
            self.assertFalse(self.stream.is_fully_reflected)
            send_more = await self.stream.upload_to_reflector(
                '127.0.0.1', 5566)
            self.assertGreater(len(send_more), 0)
            sent.extend(send_more)
            sent.append(self.stream.sd_hash)
        self.assertSetEqual(
            set(sent),
            set(
                map(
                    lambda b: b.blob_hash, self.stream.descriptor.blobs[:-1] +
                    [self.blob_manager.get_blob(self.stream.sd_hash)])))
        send_more = await self.stream.upload_to_reflector('127.0.0.1', 5566)
        self.assertEqual(len(send_more), 0)
        self.assertTrue(self.stream.is_fully_reflected)
        server_sd_blob = self.server_blob_manager.get_blob(self.stream.sd_hash)
        self.assertTrue(server_sd_blob.get_is_verified())
        self.assertEqual(server_sd_blob.length, server_sd_blob.length)
        for blob in self.stream.descriptor.blobs[:-1]:
            server_blob = self.server_blob_manager.get_blob(blob.blob_hash)
            self.assertTrue(server_blob.get_is_verified())
            self.assertEqual(server_blob.length, blob.length)

        sent = await self.stream.upload_to_reflector('127.0.0.1', 5566)
        self.assertListEqual(sent, [])

    async def test_reflect_stream(self):
        return await asyncio.wait_for(
            self._test_reflect_stream(response_chunk_size=50),
            3,
            loop=self.loop)

    async def test_reflect_stream_but_reflector_changes_its_mind(self):
        return await asyncio.wait_for(
            self._test_reflect_stream(partial_needs=True), 3, loop=self.loop)

    async def test_reflect_stream_small_response_chunks(self):
        return await asyncio.wait_for(
            self._test_reflect_stream(response_chunk_size=30),
            3,
            loop=self.loop)

    async def test_announces(self):
        to_announce = await self.storage.get_blobs_to_announce()
        self.assertIn(self.stream.sd_hash, to_announce,
                      "sd blob not set to announce")
        self.assertNotIn(self.stream.descriptor.blobs[0].blob_hash,
                         to_announce, "head blob set to announce")

    async def test_result_from_disconnect_mid_sd_transfer(self):
        stop = asyncio.Event()
        incoming = asyncio.Event()
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=50,
                                    stop_event=stop,
                                    incoming_event=incoming)
        reflector.start_server(5566, '127.0.0.1')
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        self.assertEqual(0, self.stream.reflector_progress)
        reflect_task = asyncio.create_task(
            self.stream.upload_to_reflector('127.0.0.1', 5566))
        await incoming.wait()
        stop.set()
        # this used to raise (and then propagate) a CancelledError
        self.assertListEqual(await reflect_task, [])
        self.assertFalse(self.stream.is_fully_reflected)
        self.assertFalse(
            self.server_blob_manager.get_blob(
                self.stream.sd_hash).get_is_verified())

    async def test_result_from_disconnect_after_sd_transfer(self):
        stop = asyncio.Event()
        incoming = asyncio.Event()
        not_incoming = asyncio.Event()
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=50,
                                    stop_event=stop,
                                    incoming_event=incoming,
                                    not_incoming_event=not_incoming)
        reflector.start_server(5566, '127.0.0.1')
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        self.assertEqual(0, self.stream.reflector_progress)
        reflect_task = asyncio.create_task(
            self.stream.upload_to_reflector('127.0.0.1', 5566))
        await incoming.wait()
        await not_incoming.wait()
        stop.set()
        sent = await reflect_task
        self.assertListEqual([self.stream.sd_hash], sent)
        self.assertTrue(
            self.server_blob_manager.get_blob(
                self.stream.sd_hash).get_is_verified())
        self.assertFalse(self.stream.is_fully_reflected)

    async def test_result_from_disconnect_after_data_transfer(self):
        stop = asyncio.Event()
        incoming = asyncio.Event()
        not_incoming = asyncio.Event()
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=50,
                                    stop_event=stop,
                                    incoming_event=incoming,
                                    not_incoming_event=not_incoming)
        reflector.start_server(5566, '127.0.0.1')
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        self.assertEqual(0, self.stream.reflector_progress)
        reflect_task = asyncio.create_task(
            self.stream.upload_to_reflector('127.0.0.1', 5566))
        await incoming.wait()
        await not_incoming.wait()
        await incoming.wait()
        await not_incoming.wait()
        stop.set()
        sent = await reflect_task
        self.assertListEqual(
            [self.stream.sd_hash, self.stream.descriptor.blobs[0].blob_hash],
            sent)
        self.assertTrue(
            self.server_blob_manager.get_blob(
                self.stream.sd_hash).get_is_verified())
        self.assertTrue(
            self.server_blob_manager.get_blob(
                self.stream.descriptor.blobs[0].blob_hash).get_is_verified())
        self.assertFalse(self.stream.is_fully_reflected)

    async def test_result_from_disconnect_mid_data_transfer(self):
        stop = asyncio.Event()
        incoming = asyncio.Event()
        not_incoming = asyncio.Event()
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=50,
                                    stop_event=stop,
                                    incoming_event=incoming,
                                    not_incoming_event=not_incoming)
        reflector.start_server(5566, '127.0.0.1')
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        self.assertEqual(0, self.stream.reflector_progress)
        reflect_task = asyncio.create_task(
            self.stream.upload_to_reflector('127.0.0.1', 5566))
        await incoming.wait()
        await not_incoming.wait()
        await incoming.wait()
        stop.set()
        self.assertListEqual(await reflect_task, [self.stream.sd_hash])
        self.assertTrue(
            self.server_blob_manager.get_blob(
                self.stream.sd_hash).get_is_verified())
        self.assertFalse(
            self.server_blob_manager.get_blob(
                self.stream.descriptor.blobs[0].blob_hash).get_is_verified())
        self.assertFalse(self.stream.is_fully_reflected)

    async def test_delete_file_during_reflector_upload(self):
        stop = asyncio.Event()
        incoming = asyncio.Event()
        not_incoming = asyncio.Event()
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=50,
                                    stop_event=stop,
                                    incoming_event=incoming,
                                    not_incoming_event=not_incoming)
        reflector.start_server(5566, '127.0.0.1')
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        self.assertEqual(0, self.stream.reflector_progress)
        reflect_task = asyncio.create_task(
            self.stream.upload_to_reflector('127.0.0.1', 5566))
        await incoming.wait()
        await not_incoming.wait()
        await incoming.wait()
        await self.stream_manager.delete(self.stream, delete_file=True)
        # this used to raise OSError when it can't read the deleted blob for the upload
        sent = await reflect_task
        self.assertListEqual([self.stream.sd_hash], sent)
        self.assertTrue(
            self.server_blob_manager.get_blob(
                self.stream.sd_hash).get_is_verified())
        self.assertFalse(
            self.server_blob_manager.get_blob(
                self.stream.descriptor.blobs[0].blob_hash).get_is_verified())
        self.assertFalse(self.stream.is_fully_reflected)
Ejemplo n.º 17
0
class TestStreamAssembler(AsyncioTestCase):
    async def asyncSetUp(self):
        self.loop = asyncio.get_event_loop()
        self.key = b'deadbeef' * 4
        self.cleartext = os.urandom(20000000)

        tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(tmp_dir))
        self.conf = Config()
        self.storage = SQLiteStorage(self.conf,
                                     os.path.join(tmp_dir, "lbrynet.sqlite"))
        await self.storage.open()
        self.blob_manager = BlobManager(self.loop, tmp_dir, self.storage,
                                        self.conf)
        self.stream_manager = StreamManager(self.loop, Config(),
                                            self.blob_manager, None,
                                            self.storage, None)

        server_tmp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(server_tmp_dir))
        self.server_conf = Config()
        self.server_storage = SQLiteStorage(
            self.server_conf, os.path.join(server_tmp_dir, "lbrynet.sqlite"))
        await self.server_storage.open()
        self.server_blob_manager = BlobManager(self.loop, server_tmp_dir,
                                               self.server_storage,
                                               self.server_conf)

        download_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: shutil.rmtree(download_dir))

        # create the stream
        file_path = os.path.join(tmp_dir, "test_file")
        with open(file_path, 'wb') as f:
            f.write(self.cleartext)

        self.stream = await self.stream_manager.create_stream(file_path)

    async def _test_reflect_stream(self, response_chunk_size):
        reflector = ReflectorServer(self.server_blob_manager,
                                    response_chunk_size=response_chunk_size)
        reflector.start_server(5566, '127.0.0.1')
        await reflector.started_listening.wait()
        self.addCleanup(reflector.stop_server)
        sent = await self.stream.upload_to_reflector('127.0.0.1', 5566)
        self.assertSetEqual(
            set(sent),
            set(
                map(
                    lambda b: b.blob_hash, self.stream.descriptor.blobs[:-1] +
                    [self.blob_manager.get_blob(self.stream.sd_hash)])))
        server_sd_blob = self.server_blob_manager.get_blob(self.stream.sd_hash)
        self.assertTrue(server_sd_blob.get_is_verified())
        self.assertEqual(server_sd_blob.length, server_sd_blob.length)
        for blob in self.stream.descriptor.blobs[:-1]:
            server_blob = self.server_blob_manager.get_blob(blob.blob_hash)
            self.assertTrue(server_blob.get_is_verified())
            self.assertEqual(server_blob.length, blob.length)

        sent = await self.stream.upload_to_reflector('127.0.0.1', 5566)
        self.assertListEqual(sent, [])

    async def test_reflect_stream(self):
        return await asyncio.wait_for(
            self._test_reflect_stream(response_chunk_size=50),
            3,
            loop=self.loop)

    async def test_reflect_stream_small_response_chunks(self):
        return await asyncio.wait_for(
            self._test_reflect_stream(response_chunk_size=30),
            3,
            loop=self.loop)

    async def test_announces(self):
        to_announce = await self.storage.get_blobs_to_announce()
        self.assertIn(self.stream.sd_hash, to_announce,
                      "sd blob not set to announce")
        self.assertIn(self.stream.descriptor.blobs[0].blob_hash, to_announce,
                      "head blob not set to announce")