class TestStreamAssembler(AsyncioTestCase): async def asyncSetUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = os.urandom(20000000) tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) self.stream_manager = StreamManager(self.loop, Config(), self.blob_manager, None, self.storage, None) server_tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(server_tmp_dir)) self.server_storage = SQLiteStorage( Config(), os.path.join(server_tmp_dir, "lbrynet.sqlite")) await self.server_storage.open() self.server_blob_manager = BlobFileManager(self.loop, server_tmp_dir, self.server_storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) self.stream = await self.stream_manager.create_stream(file_path) async def test_reflect_stream(self): reflector = ReflectorServer(self.server_blob_manager) reflector.start_server(5566, '127.0.0.1') await reflector.started_listening.wait() self.addCleanup(reflector.stop_server) sent = await self.stream.upload_to_reflector('127.0.0.1', 5566) self.assertSetEqual( set(sent), set( map( lambda b: b.blob_hash, self.stream.descriptor.blobs[:-1] + [self.blob_manager.get_blob(self.stream.sd_hash)]))) server_sd_blob = self.server_blob_manager.get_blob(self.stream.sd_hash) self.assertTrue(server_sd_blob.get_is_verified()) self.assertEqual(server_sd_blob.length, server_sd_blob.length) for blob in self.stream.descriptor.blobs[:-1]: server_blob = self.server_blob_manager.get_blob(blob.blob_hash) self.assertTrue(server_blob.get_is_verified()) self.assertEqual(server_blob.length, blob.length) sent = await self.stream.upload_to_reflector('127.0.0.1', 5566) self.assertListEqual(sent, [])
async def test_old_key_sort_sd_blob(self): loop = asyncio.get_event_loop() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) storage = SQLiteStorage(Config(), ":memory:") await storage.open() blob_manager = BlobFileManager(loop, tmp_dir, storage) sd_bytes = b'{"stream_name": "4f62616d6120446f6e6b65792d322e73746c", "blobs": [{"length": 1153488, "blob_num' \ b'": 0, "blob_hash": "9fa32a249ce3f2d4e46b78599800f368b72f2a7f22b81df443c7f6bdbef496bd61b4c0079c7' \ b'3d79c8bb9be9a6bf86592", "iv": "0bf348867244019c9e22196339016ea6"}, {"length": 0, "blob_num": 1,' \ b' "iv": "9f36abae16955463919b07ed530a3d18"}], "stream_type": "lbryfile", "key": "a03742b87628aa7' \ b'228e48f1dcd207e48", "suggested_file_name": "4f62616d6120446f6e6b65792d322e73746c", "stream_hash' \ b'": "b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e1' \ b'60207"}' sd_hash = '9313d1807551186126acc3662e74d9de29cede78d4f133349ace846273ef116b9bb86be86c54509eb84840e4b032f6b2' stream_hash = 'b43f4b1379780caf60d20aa06ac38fb144df61e514ebfa97537018ba73bce8fe37ae712f473ff0ba0be0eef44e160207' blob = blob_manager.get_blob(sd_hash) blob.set_length(len(sd_bytes)) writer = blob.open_for_writing() writer.write(sd_bytes) await blob.verified.wait() descriptor = await StreamDescriptor.from_stream_descriptor_blob( loop, blob_manager.blob_dir, blob) self.assertEqual(stream_hash, descriptor.get_stream_hash()) self.assertEqual(sd_hash, descriptor.calculate_old_sort_sd_hash()) self.assertNotEqual(sd_hash, descriptor.calculate_sd_hash())
async def test_create_blob(self): blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed" blob_bytes = b'1' * ((2 * 2**20) - 1) loop = asyncio.get_event_loop() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) blob_manager = BlobFileManager(loop, tmp_dir, storage) await storage.open() await blob_manager.setup() # add the blob on the server blob = blob_manager.get_blob(blob_hash, len(blob_bytes)) self.assertEqual(blob.get_is_verified(), False) self.assertNotIn(blob_hash, blob_manager.completed_blob_hashes) writer = blob.open_for_writing() writer.write(blob_bytes) await blob.finished_writing.wait() self.assertTrue(os.path.isfile(blob.file_path), True) self.assertEqual(blob.get_is_verified(), True) self.assertIn(blob_hash, blob_manager.completed_blob_hashes)
async def test_sync_blob_manager_on_startup(self): loop = asyncio.get_event_loop() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) blob_manager = BlobFileManager(loop, tmp_dir, storage) # add a blob file blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed" blob_bytes = b'1' * ((2 * 2**20) - 1) with open(os.path.join(blob_manager.blob_dir, blob_hash), 'wb') as f: f.write(blob_bytes) # it should not have been added automatically on startup await storage.open() await blob_manager.setup() self.assertSetEqual(blob_manager.completed_blob_hashes, set()) # make sure we can add the blob await blob_manager.blob_completed( blob_manager.get_blob(blob_hash, len(blob_bytes))) self.assertSetEqual(blob_manager.completed_blob_hashes, {blob_hash}) # stop the blob manager and restart it, make sure the blob is there blob_manager.stop() self.assertSetEqual(blob_manager.completed_blob_hashes, set()) await blob_manager.setup() self.assertSetEqual(blob_manager.completed_blob_hashes, {blob_hash}) # test that the blob is removed upon the next startup after the file being manually deleted blob_manager.stop() # manually delete the blob file and restart the blob manager os.remove(os.path.join(blob_manager.blob_dir, blob_hash)) await blob_manager.setup() self.assertSetEqual(blob_manager.completed_blob_hashes, set()) # check that the deleted blob was updated in the database self.assertEqual('pending', (await storage.run_and_return_one_or_none( 'select status from blob where blob_hash=?', blob_hash)))
async def main(blob_hash: str, url: str): conf = Config() loop = asyncio.get_running_loop() host_url, port = url.split(":") try: host = None if ipaddress.ip_address(host_url): host = host_url except ValueError: host = None if not host: host_info = await loop.getaddrinfo( host_url, 'https', proto=socket.IPPROTO_TCP, ) host = host_info[0][4][0] storage = SQLiteStorage(conf, os.path.join(conf.data_dir, "lbrynet.sqlite")) blob_manager = BlobFileManager(loop, os.path.join(conf.data_dir, "blobfiles"), storage) await storage.open() await blob_manager.setup() blob = blob_manager.get_blob(blob_hash) success, keep = await request_blob(loop, blob, host, int(port), conf.peer_connect_timeout, conf.blob_download_timeout) print( f"{'downloaded' if success else 'failed to download'} {blob_hash} from {host}:{port}\n" f"keep connection: {keep}") if blob.get_is_verified(): await blob_manager.delete_blobs([blob.blob_hash]) print(f"deleted {blob_hash}")
class TestStreamAssembler(AsyncioTestCase): def setUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = b'test' async def test_create_and_decrypt_one_blob_stream(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), os.path.join(tmp_dir, "lbrynet.sqlite")) await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) sd = await StreamDescriptor.create_stream(self.loop, tmp_dir, file_path, key=self.key) # copy blob files sd_hash = sd.calculate_sd_hash() shutil.copy(os.path.join(tmp_dir, sd_hash), os.path.join(download_dir, sd_hash)) for blob_info in sd.blobs: if blob_info.blob_hash: shutil.copy(os.path.join(tmp_dir, blob_info.blob_hash), os.path.join(download_dir, blob_info.blob_hash)) downloader_storage = SQLiteStorage( Config(), os.path.join(download_dir, "lbrynet.sqlite")) await downloader_storage.open() # add the blobs to the blob table (this would happen upon a blob download finishing) downloader_blob_manager = BlobFileManager(self.loop, download_dir, downloader_storage) descriptor = await downloader_blob_manager.get_stream_descriptor( sd_hash) # assemble the decrypted file assembler = StreamAssembler(self.loop, downloader_blob_manager, descriptor.sd_hash) await assembler.assemble_decrypted_stream(download_dir) with open(os.path.join(download_dir, "test_file"), "rb") as f: decrypted = f.read() self.assertEqual(decrypted, self.cleartext) self.assertEqual(True, self.blob_manager.get_blob(sd_hash).get_is_verified()) await downloader_storage.close() await self.storage.close() async def test_create_and_decrypt_multi_blob_stream(self): self.cleartext = b'test\n' * 20000000 await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_padding(self): for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) + i) await self.test_create_and_decrypt_one_blob_stream() for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) - i) await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_random(self): self.cleartext = os.urandom(20000000) await self.test_create_and_decrypt_one_blob_stream()
class TestStreamAssembler(AsyncioTestCase): def setUp(self): self.loop = asyncio.get_event_loop() self.key = b'deadbeef' * 4 self.cleartext = b'test' async def test_create_and_decrypt_one_blob_stream(self, corrupt=False): tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) self.storage = SQLiteStorage(Config(), ":memory:") await self.storage.open() self.blob_manager = BlobFileManager(self.loop, tmp_dir, self.storage) download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) # create the stream file_path = os.path.join(tmp_dir, "test_file") with open(file_path, 'wb') as f: f.write(self.cleartext) sd = await StreamDescriptor.create_stream(self.loop, tmp_dir, file_path, key=self.key) # copy blob files sd_hash = sd.calculate_sd_hash() shutil.copy(os.path.join(tmp_dir, sd_hash), os.path.join(download_dir, sd_hash)) for blob_info in sd.blobs: if blob_info.blob_hash: shutil.copy(os.path.join(tmp_dir, blob_info.blob_hash), os.path.join(download_dir, blob_info.blob_hash)) if corrupt and blob_info.length == MAX_BLOB_SIZE: with open(os.path.join(download_dir, blob_info.blob_hash), "rb+") as handle: handle.truncate() handle.flush() downloader_storage = SQLiteStorage( Config(), os.path.join(download_dir, "lbrynet.sqlite")) await downloader_storage.open() # add the blobs to the blob table (this would happen upon a blob download finishing) downloader_blob_manager = BlobFileManager(self.loop, download_dir, downloader_storage) descriptor = await downloader_blob_manager.get_stream_descriptor( sd_hash) # assemble the decrypted file assembler = StreamAssembler(self.loop, downloader_blob_manager, descriptor.sd_hash) await assembler.assemble_decrypted_stream(download_dir) with open(os.path.join(download_dir, "test_file"), "rb") as f: decrypted = f.read() if corrupt: return decrypted self.assertEqual(decrypted, self.cleartext) self.assertEqual(True, self.blob_manager.get_blob(sd_hash).get_is_verified()) self.assertEqual( True, self.blob_manager.get_blob( descriptor.blobs[0].blob_hash).get_is_verified()) # its all blobs + sd blob - last blob, which is the same size as descriptor.blobs self.assertEqual( len(descriptor.blobs), len(await downloader_storage.get_all_finished_blobs())) self.assertEqual([descriptor.sd_hash, descriptor.blobs[0].blob_hash], await downloader_storage.get_blobs_to_announce()) await downloader_storage.close() await self.storage.close() async def test_create_and_decrypt_multi_blob_stream(self): self.cleartext = b'test\n' * 20000000 await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_padding(self): for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) + i) await self.test_create_and_decrypt_one_blob_stream() for i in range(16): self.cleartext = os.urandom((MAX_BLOB_SIZE * 2) - i) await self.test_create_and_decrypt_one_blob_stream() async def test_create_and_decrypt_random(self): self.cleartext = os.urandom(20000000) await self.test_create_and_decrypt_one_blob_stream() async def test_create_managed_stream_announces(self): # setup a blob manager storage = SQLiteStorage(Config(), ":memory:") await storage.open() tmp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(tmp_dir)) blob_manager = BlobFileManager(self.loop, tmp_dir, storage) stream_manager = StreamManager(self.loop, Config(), blob_manager, None, storage, None) # create the stream download_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(download_dir)) file_path = os.path.join(download_dir, "test_file") with open(file_path, 'wb') as f: f.write(b'testtest') stream = await stream_manager.create_stream(file_path) self.assertEqual( [stream.sd_hash, stream.descriptor.blobs[0].blob_hash], await storage.get_blobs_to_announce()) async def test_create_truncate_and_handle_stream(self): self.cleartext = b'potato' * 1337 * 5279 decrypted = await self.test_create_and_decrypt_one_blob_stream( corrupt=True) # The purpose of this test is just to make sure it can finish even if a blob is corrupt/truncated self.assertFalse(decrypted)