async def asyncSetUp(self): self.loop = asyncio.get_event_loop() self.client_dir = tempfile.mkdtemp() self.server_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.client_dir) self.addCleanup(shutil.rmtree, self.server_dir) self.server_storage = SQLiteStorage( Config(), os.path.join(self.server_dir, "lbrynet.sqlite")) self.server_blob_manager = BlobFileManager(self.loop, self.server_dir, self.server_storage) self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP') self.client_storage = SQLiteStorage( Config(), os.path.join(self.client_dir, "lbrynet.sqlite")) self.client_blob_manager = BlobFileManager(self.loop, self.client_dir, self.client_storage) self.client_peer_manager = PeerManager(self.loop) self.server_from_client = KademliaPeer(self.loop, "127.0.0.1", b'1' * 48, tcp_port=33333) await self.client_storage.open() await self.server_storage.open() await self.client_blob_manager.setup() await self.server_blob_manager.setup() self.server.start_server(33333, '127.0.0.1') await self.server.started_listening.wait()
async def test_old_key_sort_sd_blob(self): loop = asyncio.get_event_loop() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) storage = SQLiteStorage(Config(), ":memory:") await storage.open() blob_manager = BlobFileManager(loop, tmp_dir, storage) sd_bytes = b'{"stream_name": "4f62616d6120446f6e6b65792d322e73746c", "blobs": [{"length": 1153488, "blob_num' \ b'": 0, "blob_hash": "9fa32a249ce3f2d4e46b78599800f368b72f2a7f22b81df443c7f6bdbef496bd61b4c0079c7' \ b'3d79c8bb9be9a6bf86592", "iv": "0bf348867244019c9e22196339016ea6"}, {"length": 0, "blob_num": 1,' \ b' "iv": "9f36abae16955463919b07ed530a3d18"}], "stream_type": "lbryfile", "key": "a03742b87628aa7' \ b'228e48f1dcd207e48", "suggested_file_name": "4f62616d6120446f6e6b65792d322e73746c", "stream_hash' \ b'": "b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e1' \ b'60207"}' sd_hash = '9313d1807551186126acc3662e74d9de29cede78d4f133349ace846273ef116b9bb86be86c54509eb84840e4b032f6b2' stream_hash = 'b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e160207' blob = blob_manager.get_blob(sd_hash) blob.set_length(len(sd_bytes)) writer = blob.open_for_writing() writer.write(sd_bytes) await blob.verified.wait() descriptor = await StreamDescriptor.from_stream_descriptor_blob( loop, blob_manager.blob_dir, blob) self.assertEqual(stream_hash, descriptor.get_stream_hash()) self.assertEqual(sd_hash, descriptor.calculate_old_sort_sd_hash()) self.assertNotEqual(sd_hash, descriptor.calculate_sd_hash())
async def test_create_blob(self): blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed" blob_bytes = b'1' * ((2 * 2**20) - 1) loop = asyncio.get_event_loop() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) blob_manager = BlobFileManager(loop, tmp_dir, storage) await storage.open() await blob_manager.setup() # add the blob on the server blob = blob_manager.get_blob(blob_hash, len(blob_bytes)) self.assertEqual(blob.get_is_verified(), False) self.assertNotIn(blob_hash, blob_manager.completed_blob_hashes) writer = blob.open_for_writing() writer.write(blob_bytes) await blob.finished_writing.wait() self.assertTrue(os.path.isfile(blob.file_path), True) self.assertEqual(blob.get_is_verified(), True) self.assertIn(blob_hash, blob_manager.completed_blob_hashes)
class BlobComponent(Component): component_name = BLOB_COMPONENT depends_on = [DATABASE_COMPONENT] def __init__(self, component_manager): super().__init__(component_manager) self.blob_manager: BlobFileManager = None @property def component(self) -> typing.Optional[BlobFileManager]: return self.blob_manager async def start(self): storage = self.component_manager.get_component(DATABASE_COMPONENT) data_store = None if DHT_COMPONENT not in self.component_manager.skip_components: dht_node: Node = self.component_manager.get_component( DHT_COMPONENT) if dht_node: data_store = dht_node.protocol.data_store self.blob_manager = BlobFileManager( asyncio.get_event_loop(), os.path.join(self.conf.data_dir, "blobfiles"), storage, data_store) return await self.blob_manager.setup() async def stop(self): self.blob_manager.stop() async def get_status(self): count = 0 if self.blob_manager: count = len(self.blob_manager.completed_blob_hashes) return {'finished_blobs': count}
async def asyncSetUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = os.urandom(20000000) tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) self.stream_manager = StreamManager(self.loop, Config(), self.blob_manager, None, self.storage, None) server_tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(server_tmp_dir)) self.server_storage = SQLiteStorage( Config(), os.path.join(server_tmp_dir, "lbrynet.sqlite")) await self.server_storage.open() self.server_blob_manager = BlobFileManager(self.loop, server_tmp_dir, self.server_storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) self.stream = await self.stream_manager.create_stream(file_path)
class TestStreamAssembler(AsyncioTestCase): async def asyncSetUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = os.urandom(20000000) tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) self.stream_manager = StreamManager(self.loop, Config(), self.blob_manager, None, self.storage, None) server_tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(server_tmp_dir)) self.server_storage = SQLiteStorage( Config(), os.path.join(server_tmp_dir, "lbrynet.sqlite")) await self.server_storage.open() self.server_blob_manager = BlobFileManager(self.loop, server_tmp_dir, self.server_storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) self.stream = await self.stream_manager.create_stream(file_path) async def test_reflect_stream(self): reflector = ReflectorServer(self.server_blob_manager) reflector.start_server(5566, '127.0.0.1') await reflector.started_listening.wait() self.addCleanup(reflector.stop_server) sent = await self.stream.upload_to_reflector('127.0.0.1', 5566) self.assertSetEqual( set(sent), set( map( lambda b: b.blob_hash, self.stream.descriptor.blobs[:-1] + [self.blob_manager.get_blob(self.stream.sd_hash)]))) server_sd_blob = self.server_blob_manager.get_blob(self.stream.sd_hash) self.assertTrue(server_sd_blob.get_is_verified()) self.assertEqual(server_sd_blob.length, server_sd_blob.length) for blob in self.stream.descriptor.blobs[:-1]: server_blob = self.server_blob_manager.get_blob(blob.blob_hash) self.assertTrue(server_blob.get_is_verified()) self.assertEqual(server_blob.length, blob.length) sent = await self.stream.upload_to_reflector('127.0.0.1', 5566) self.assertListEqual(sent, [])
async def start(self): storage = self.component_manager.get_component(DATABASE_COMPONENT) data_store = None if DHT_COMPONENT not in self.component_manager.skip_components: dht_node: Node = self.component_manager.get_component(DHT_COMPONENT) if dht_node: data_store = dht_node.protocol.data_store self.blob_manager = BlobFileManager(asyncio.get_event_loop(), os.path.join(self.conf.data_dir, "blobfiles"), storage, data_store) return await self.blob_manager.setup()
async def test_create_and_decrypt_one_blob_stream(self, corrupt=False): tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), ":memory:") await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) sd = await StreamDescriptor.create_stream(self.loop, tmp_dir, file_path, key=self.key) # copy blob files sd_hash = sd.calculate_sd_hash() shutil.copy(os.path.join(tmp_dir, sd_hash), os.path.join(download_dir, sd_hash)) for blob_info in sd.blobs: if blob_info.blob_hash: shutil.copy(os.path.join(tmp_dir, blob_info.blob_hash), os.path.join(download_dir, blob_info.blob_hash)) if corrupt and blob_info.length == MAX_BLOB_SIZE: with open(os.path.join(download_dir, blob_info.blob_hash), "rb+") as handle: handle.truncate() handle.flush() downloader_storage = SQLiteStorage(Config(), os.path.join(download_dir, "lbrynet.sqlite")) await downloader_storage.open() # add the blobs to the blob table (this would happen upon a blob download finishing) downloader_blob_manager = BlobFileManager(self.loop, download_dir, downloader_storage) descriptor = await downloader_blob_manager.get_stream_descriptor(sd_hash) # assemble the decrypted file assembler = StreamAssembler(self.loop, downloader_blob_manager, descriptor.sd_hash) await assembler.assemble_decrypted_stream(download_dir) if corrupt: return self.assertFalse(os.path.isfile(os.path.join(download_dir, "test_file"))) with open(os.path.join(download_dir, "test_file"), "rb") as f: decrypted = f.read() self.assertEqual(decrypted, self.cleartext) self.assertEqual(True, self.blob_manager.get_blob(sd_hash).get_is_verified()) self.assertEqual(True, self.blob_manager.get_blob(descriptor.blobs[0].blob_hash).get_is_verified()) # its all blobs + sd blob - last blob, which is the same size as descriptor.blobs self.assertEqual(len(descriptor.blobs), len(await downloader_storage.get_all_finished_blobs())) self.assertEqual( [descriptor.sd_hash, descriptor.blobs[0].blob_hash], await downloader_storage.get_blobs_to_announce() ) await downloader_storage.close() await self.storage.close()
async def test_create_and_decrypt_one_blob_stream(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) sd = await StreamDescriptor.create_stream(self.loop, tmp_dir, file_path, key=self.key) # copy blob files sd_hash = sd.calculate_sd_hash() shutil.copy(os.path.join(tmp_dir, sd_hash), os.path.join(download_dir, sd_hash)) for blob_info in sd.blobs: if blob_info.blob_hash: shutil.copy(os.path.join(tmp_dir, blob_info.blob_hash), os.path.join(download_dir, blob_info.blob_hash)) downloader_storage = SQLiteStorage( Config(), os.path.join(download_dir, "lbrynet.sqlite")) await downloader_storage.open() # add the blobs to the blob table (this would happen upon a blob download finishing) downloader_blob_manager = BlobFileManager(self.loop, download_dir, downloader_storage) descriptor = await downloader_blob_manager.get_stream_descriptor( sd_hash) # assemble the decrypted file assembler = StreamAssembler(self.loop, downloader_blob_manager, descriptor.sd_hash) await assembler.assemble_decrypted_stream(download_dir) with open(os.path.join(download_dir, "test_file"), "rb") as f: decrypted = f.read() self.assertEqual(decrypted, self.cleartext) self.assertEqual(True, self.blob_manager.get_blob(sd_hash).get_is_verified()) await downloader_storage.close() await self.storage.close()
async def test_host_different_blobs_to_multiple_peers_at_once(self): blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed" mock_blob_bytes = b'1' * ((2 * 2**20) - 1) sd_hash = "3e2706157a59aaa47ef52bc264fce488078b4026c0b9bab649a8f2fe1ecc5e5cad7182a2bb7722460f856831a1ac0f02" mock_sd_blob_bytes = b"""{"blobs": [{"blob_hash": "6f53c72de100f6f007aa1b9720632e2d049cc6049e609ad790b556dba262159f739d5a14648d5701afc84b991254206a", "blob_num": 0, "iv": "3b6110c2d8e742bff66e4314863dee7e", "length": 2097152}, {"blob_hash": "18493bc7c5164b00596153859a0faffa45765e47a6c3f12198a4f7be4658111505b7f8a15ed0162306a0672c4a9b505d", "blob_num": 1, "iv": "df973fa64e73b4ff2677d682cdc32d3e", "length": 2097152}, {"blob_num": 2, "iv": "660d2dc2645da7c7d4540a466fcb0c60", "length": 0}], "key": "6465616462656566646561646265656664656164626565666465616462656566", "stream_hash": "22423c6786584974bd6b462af47ecb03e471da0ef372fe85a4e71a78bef7560c4afb0835c689f03916105404653b7bdf", "stream_name": "746573745f66696c65", "stream_type": "lbryfile", "suggested_file_name": "746573745f66696c65"}""" second_client_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, second_client_dir) second_client_storage = SQLiteStorage( Config(), os.path.join(second_client_dir, "lbrynet.sqlite")) second_client_blob_manager = BlobFileManager(self.loop, second_client_dir, second_client_storage) server_from_second_client = KademliaPeer(self.loop, "127.0.0.1", b'1' * 48, tcp_port=33333) await second_client_storage.open() await second_client_blob_manager.setup() await self._add_blob_to_server(blob_hash, mock_blob_bytes) await self._add_blob_to_server(sd_hash, mock_sd_blob_bytes) second_client_blob = self.client_blob_manager.get_blob(blob_hash) await asyncio.gather( request_blob(self.loop, second_client_blob, server_from_second_client.address, server_from_second_client.tcp_port, 2, 3), self._test_transfer_blob(sd_hash), second_client_blob.finished_writing.wait()) self.assertEqual(second_client_blob.get_is_verified(), True)
async def test_host_same_blob_to_multiple_peers_at_once(self): blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed" mock_blob_bytes = b'1' * ((2 * 2**20) - 1) second_client_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, second_client_dir) second_client_storage = SQLiteStorage( Config(), os.path.join(second_client_dir, "lbrynet.sqlite")) second_client_blob_manager = BlobFileManager(self.loop, second_client_dir, second_client_storage) server_from_second_client = KademliaPeer(self.loop, "127.0.0.1", b'1' * 48, tcp_port=33333) await second_client_storage.open() await second_client_blob_manager.setup() await self._add_blob_to_server(blob_hash, mock_blob_bytes) second_client_blob = self.client_blob_manager.get_blob(blob_hash) # download the blob await asyncio.gather( request_blob(self.loop, second_client_blob, server_from_second_client.address, server_from_second_client.tcp_port, 2, 3), self._test_transfer_blob(blob_hash)) await second_client_blob.finished_writing.wait() self.assertEqual(second_client_blob.get_is_verified(), True)
async def asyncSetUp(self): self.storage = SQLiteStorage(Config(), ':memory:') self.blob_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.blob_dir) self.blob_manager = BlobFileManager(asyncio.get_event_loop(), self.blob_dir, self.storage) await self.storage.open()
async def main(blob_hash: str, url: str): conf = Config() loop = asyncio.get_running_loop() host_url, port = url.split(":") try: host = None if ipaddress.ip_address(host_url): host = host_url except ValueError: host = None if not host: host_info = await loop.getaddrinfo( host_url, 'https', proto=socket.IPPROTO_TCP, ) host = host_info[0][4][0] storage = SQLiteStorage(conf, os.path.join(conf.data_dir, "lbrynet.sqlite")) blob_manager = BlobFileManager(loop, os.path.join(conf.data_dir, "blobfiles"), storage) await storage.open() await blob_manager.setup() blob = blob_manager.get_blob(blob_hash) success, keep = await request_blob(loop, blob, host, int(port), conf.peer_connect_timeout, conf.blob_download_timeout) print( f"{'downloaded' if success else 'failed to download'} {blob_hash} from {host}:{port}\n" f"keep connection: {keep}") if blob.get_is_verified(): await blob_manager.delete_blobs([blob.blob_hash]) print(f"deleted {blob_hash}")
async def test_create_managed_stream_announces(self): # setup a blob manager storage = SQLiteStorage(Config(), ":memory:") await storage.open() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) blob_manager = BlobFileManager(self.loop, tmp_dir, storage) stream_manager = StreamManager(self.loop, Config(), blob_manager, None, storage, None) # create the stream download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) file_path = os.path.join(download_dir, "test_file") with open(file_path, 'wb') as f: f.write(b'testtest') stream = await stream_manager.create_stream(file_path) self.assertEqual( [stream.sd_hash, stream.descriptor.blobs[0].blob_hash], await storage.get_blobs_to_announce())
async def asyncSetUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = os.urandom(20000000) self.tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(self.tmp_dir)) self.storage = SQLiteStorage(Config(), ":memory:") await self.storage.open() self.blob_manager = BlobFileManager(self.loop, self.tmp_dir, self.storage) self.file_path = os.path.join(self.tmp_dir, "test_file") with open(self.file_path, 'wb') as f: f.write(self.cleartext) self.descriptor = await StreamDescriptor.create_stream(self.loop, self.tmp_dir, self.file_path, key=self.key) self.sd_hash = self.descriptor.calculate_sd_hash() self.sd_dict = json.loads(self.descriptor.as_json())
async def test_sync_blob_manager_on_startup(self): loop = asyncio.get_event_loop() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) blob_manager = BlobFileManager(loop, tmp_dir, storage) # add a blob file blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed" blob_bytes = b'1' * ((2 * 2**20) - 1) with open(os.path.join(blob_manager.blob_dir, blob_hash), 'wb') as f: f.write(blob_bytes) # it should not have been added automatically on startup await storage.open() await blob_manager.setup() self.assertSetEqual(blob_manager.completed_blob_hashes, set()) # make sure we can add the blob await blob_manager.blob_completed( blob_manager.get_blob(blob_hash, len(blob_bytes))) self.assertSetEqual(blob_manager.completed_blob_hashes, {blob_hash}) # stop the blob manager and restart it, make sure the blob is there blob_manager.stop() self.assertSetEqual(blob_manager.completed_blob_hashes, set()) await blob_manager.setup() self.assertSetEqual(blob_manager.completed_blob_hashes, {blob_hash}) # test that the blob is removed upon the next startup after the file being manually deleted blob_manager.stop() # manually delete the blob file and restart the blob manager os.remove(os.path.join(blob_manager.blob_dir, blob_hash)) await blob_manager.setup() self.assertSetEqual(blob_manager.completed_blob_hashes, set()) # check that the deleted blob was updated in the database self.assertEqual('pending', (await storage.run_and_return_one_or_none( 'select status from blob where blob_hash=?', blob_hash)))
async def main(address: str): try: decode_address(address) except: print(f"'{address}' is not a valid lbrycrd address") return 1 loop = asyncio.get_running_loop() storage = SQLiteStorage(os.path.expanduser("~/.lbrynet/lbrynet.sqlite")) await storage.open() blob_manager = BlobFileManager(loop, os.path.expanduser("~/.lbrynet/blobfiles"), storage) await blob_manager.setup() server = await loop.create_server( lambda: BlobServer(loop, blob_manager, address), '0.0.0.0', 4444) try: async with server: await server.serve_forever() finally: await storage.close()
class TestStreamAssembler(AsyncioTestCase): def setUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = b'test' async def test_create_and_decrypt_one_blob_stream(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) sd = await StreamDescriptor.create_stream(self.loop, tmp_dir, file_path, key=self.key) # copy blob files sd_hash = sd.calculate_sd_hash() shutil.copy(os.path.join(tmp_dir, sd_hash), os.path.join(download_dir, sd_hash)) for blob_info in sd.blobs: if blob_info.blob_hash: shutil.copy(os.path.join(tmp_dir, blob_info.blob_hash), os.path.join(download_dir, blob_info.blob_hash)) downloader_storage = SQLiteStorage( Config(), os.path.join(download_dir, "lbrynet.sqlite")) await downloader_storage.open() # add the blobs to the blob table (this would happen upon a blob download finishing) downloader_blob_manager = BlobFileManager(self.loop, download_dir, downloader_storage) descriptor = await downloader_blob_manager.get_stream_descriptor( sd_hash) # assemble the decrypted file assembler = StreamAssembler(self.loop, downloader_blob_manager, descriptor.sd_hash) await assembler.assemble_decrypted_stream(download_dir) with open(os.path.join(download_dir, "test_file"), "rb") as f: decrypted = f.read() self.assertEqual(decrypted, self.cleartext) self.assertEqual(True, self.blob_manager.get_blob(sd_hash).get_is_verified()) await downloader_storage.close() await self.storage.close() async def test_create_and_decrypt_multi_blob_stream(self): self.cleartext = b'test\n' * 20000000 await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_padding(self): for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) + i) await self.test_create_and_decrypt_one_blob_stream() for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) - i) await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_random(self): self.cleartext = os.urandom(20000000) await self.test_create_and_decrypt_one_blob_stream()
async def asyncSetUp(self): await super().asyncSetUp() logging.getLogger('lbrynet.blob_exchange').setLevel(self.VERBOSITY) logging.getLogger('lbrynet.daemon').setLevel(self.VERBOSITY) logging.getLogger('lbrynet.stream').setLevel(self.VERBOSITY) conf = Config() conf.data_dir = self.wallet_node.data_path conf.wallet_dir = self.wallet_node.data_path conf.download_dir = self.wallet_node.data_path conf.share_usage_data = False conf.use_upnp = False conf.reflect_streams = True conf.blockchain_name = 'lbrycrd_regtest' conf.lbryum_servers = [('127.0.0.1', 50001)] conf.reflector_servers = [('127.0.0.1', 5566)] conf.known_dht_nodes = [] await self.account.ensure_address_gap() address = (await self.account.receiving.get_addresses(limit=1, only_usable=True))[0] sendtxid = await self.blockchain.send_to_address(address, 10) await self.confirm_tx(sendtxid) await self.generate(5) def wallet_maker(component_manager): self.wallet_component = WalletComponent(component_manager) self.wallet_component.wallet_manager = self.manager self.wallet_component._running = True return self.wallet_component conf.components_to_skip = [ DHT_COMPONENT, UPNP_COMPONENT, HASH_ANNOUNCER_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT ] self.daemon = Daemon( conf, ComponentManager( conf, skip_components=conf.components_to_skip, wallet=wallet_maker, exchange_rate_manager=ExchangeRateManagerComponent)) await self.daemon.initialize() self.manager.old_db = self.daemon.storage server_tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, server_tmp_dir) self.server_storage = SQLiteStorage(Config(), ':memory:') await self.server_storage.open() self.server_blob_manager = BlobFileManager(self.loop, server_tmp_dir, self.server_storage) self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP') self.server.start_server(5567, '127.0.0.1') await self.server.started_listening.wait() self.reflector = ReflectorServer(self.server_blob_manager) self.reflector.start_server(5566, '127.0.0.1') await self.reflector.started_listening.wait() self.addCleanup(self.reflector.stop_server)
class TestStreamAssembler(AsyncioTestCase): def setUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = b'test' async def test_create_and_decrypt_one_blob_stream(self, corrupt=False): tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), ":memory:") await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) sd = await StreamDescriptor.create_stream(self.loop, tmp_dir, file_path, key=self.key) # copy blob files sd_hash = sd.calculate_sd_hash() shutil.copy(os.path.join(tmp_dir, sd_hash), os.path.join(download_dir, sd_hash)) for blob_info in sd.blobs: if blob_info.blob_hash: shutil.copy(os.path.join(tmp_dir, blob_info.blob_hash), os.path.join(download_dir, blob_info.blob_hash)) if corrupt and blob_info.length == MAX_BLOB_SIZE: with open(os.path.join(download_dir, blob_info.blob_hash), "rb+") as handle: handle.truncate() handle.flush() downloader_storage = SQLiteStorage( Config(), os.path.join(download_dir, "lbrynet.sqlite")) await downloader_storage.open() # add the blobs to the blob table (this would happen upon a blob download finishing) downloader_blob_manager = BlobFileManager(self.loop, download_dir, downloader_storage) descriptor = await downloader_blob_manager.get_stream_descriptor( sd_hash) # assemble the decrypted file assembler = StreamAssembler(self.loop, downloader_blob_manager, descriptor.sd_hash) await assembler.assemble_decrypted_stream(download_dir) with open(os.path.join(download_dir, "test_file"), "rb") as f: decrypted = f.read() if corrupt: return decrypted self.assertEqual(decrypted, self.cleartext) self.assertEqual(True, self.blob_manager.get_blob(sd_hash).get_is_verified()) self.assertEqual( True, self.blob_manager.get_blob( descriptor.blobs[0].blob_hash).get_is_verified()) # its all blobs + sd blob - last blob, which is the same size as descriptor.blobs self.assertEqual( len(descriptor.blobs), len(await downloader_storage.get_all_finished_blobs())) self.assertEqual([descriptor.sd_hash, descriptor.blobs[0].blob_hash], await downloader_storage.get_blobs_to_announce()) await downloader_storage.close() await self.storage.close() async def test_create_and_decrypt_multi_blob_stream(self): self.cleartext = b'test\n' * 20000000 await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_padding(self): for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) + i) await self.test_create_and_decrypt_one_blob_stream() for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) - i) await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_random(self): self.cleartext = os.urandom(20000000) await self.test_create_and_decrypt_one_blob_stream() async def test_create_managed_stream_announces(self): # setup a blob manager storage = SQLiteStorage(Config(), ":memory:") await storage.open() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) blob_manager = BlobFileManager(self.loop, tmp_dir, storage) stream_manager = StreamManager(self.loop, Config(), blob_manager, None, storage, None) # create the stream download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) file_path = os.path.join(download_dir, "test_file") with open(file_path, 'wb') as f: f.write(b'testtest') stream = await stream_manager.create_stream(file_path) self.assertEqual( [stream.sd_hash, stream.descriptor.blobs[0].blob_hash], await storage.get_blobs_to_announce()) async def test_create_truncate_and_handle_stream(self): self.cleartext = b'potato' * 1337 * 5279 decrypted = await self.test_create_and_decrypt_one_blob_stream( corrupt=True) # The purpose of this test is just to make sure it can finish even if a blob is corrupt/truncated self.assertFalse(decrypted)