def test_store_stream(self, stream_hash=None): stream_hash = stream_hash or random_lbry_hash() sd_hash = random_lbry_hash() blob1 = random_lbry_hash() blob2 = random_lbry_hash() yield self.store_fake_blob(sd_hash) yield self.store_fake_blob(blob1) yield self.store_fake_blob(blob2) yield self.store_fake_stream(stream_hash, sd_hash) yield self.store_fake_stream_blob(stream_hash, blob1, 1) yield self.store_fake_stream_blob(stream_hash, blob2, 2) stream_blobs = yield self.storage.get_blobs_for_stream(stream_hash) stream_blob_hashes = [b.blob_hash for b in stream_blobs] self.assertListEqual(stream_blob_hashes, [blob1, blob2]) blob_hashes = yield self.storage.get_all_blob_hashes() self.assertSetEqual(set(blob_hashes), {sd_hash, blob1, blob2}) stream_blobs = yield self.storage.get_blobs_for_stream(stream_hash) stream_blob_hashes = [b.blob_hash for b in stream_blobs] self.assertListEqual(stream_blob_hashes, [blob1, blob2]) yield self.storage.set_should_announce(sd_hash, 1, 1) yield self.storage.set_should_announce(blob1, 1, 1) should_announce_count = yield self.storage.count_should_announce_blobs() self.assertEqual(should_announce_count, 2) should_announce_hashes = yield self.storage.get_blobs_to_announce(FakeAnnouncer()) self.assertSetEqual(set(should_announce_hashes), {sd_hash, blob1}) stream_hashes = yield self.storage.get_all_streams() self.assertListEqual(stream_hashes, [stream_hash])
def test_supports_storage(self): claim_ids = [random_lbry_hash() for _ in range(10)] random_supports = [{"txid": random_lbry_hash(), "nout":i, "address": "addr{}".format(i), "amount": i} for i in range(20)] expected_supports = {} for idx, claim_id in enumerate(claim_ids): yield self.storage.save_supports(claim_id, random_supports[idx*2:idx*2+2]) for random_support in random_supports[idx*2:idx*2+2]: random_support['claim_id'] = claim_id expected_supports.setdefault(claim_id, []).append(random_support) supports = yield self.storage.get_supports(claim_ids[0]) self.assertEqual(supports, expected_supports[claim_ids[0]]) all_supports = yield self.storage.get_supports(*claim_ids) for support in all_supports: self.assertIn(support, expected_supports[support['claim_id']])
def make_and_store_fake_stream(self, blob_count=2, stream_hash=None, sd_hash=None): stream_hash = stream_hash or random_lbry_hash() sd_hash = sd_hash or random_lbry_hash() blobs = { i + 1: random_lbry_hash() for i in range(blob_count) } yield self.store_fake_blob(sd_hash) for blob in blobs.itervalues(): yield self.store_fake_blob(blob) yield self.store_fake_stream(stream_hash, sd_hash) for pos, blob in sorted(blobs.iteritems(), key=lambda x: x[0]): yield self.store_fake_stream_blob(stream_hash, blob, pos)
def test_database_operations(self): # test database read/write functions in EncrypteFileManager class MocSession(object): pass session = MocSession() session.db_dir = '.' stream_info_manager = None sd_identifier = None download_directory = '.' manager = EncryptedFileManager(session, stream_info_manager, sd_identifier, download_directory) yield manager._open_db() out = yield manager._get_all_lbry_files() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() blob_data_rate = 0 out = yield manager._save_lbry_file(stream_hash, blob_data_rate) rowid = yield manager._get_rowid_for_stream_hash(stream_hash) self.assertEqual(out, rowid) files = yield manager._get_all_lbry_files() self.assertEqual(1, len(files)) yield manager._change_file_status( rowid, ManagedEncryptedFileDownloader.STATUS_RUNNING) out = yield manager._get_lbry_file_status(rowid) self.assertEqual(out, ManagedEncryptedFileDownloader.STATUS_RUNNING)
def test_immediate_announce(self): # Test that immediate announce puts a hash at the front of the queue self.announcer._announce_available_hashes() blob_hash = random_lbry_hash() self.announcer.immediate_announce([blob_hash]) self.assertEqual(self.announcer.hash_queue_size(), self.announcer.CONCURRENT_ANNOUNCERS+1) self.assertEqual(blob_hash, self.announcer.hash_queue[0][0])
def test_database_operations(self): # test database read/write functions in EncrypteFileManager class MocSession(object): pass session = MocSession() session.db_dir = '.' stream_info_manager = DBEncryptedFileMetadataManager('.') sd_identifier = None download_directory = '.' manager = EncryptedFileManager( session, stream_info_manager, sd_identifier, download_directory) yield manager.stream_info_manager.setup() out = yield manager._get_all_lbry_files() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() blob_data_rate = 0 out = yield manager._save_lbry_file(stream_hash, blob_data_rate) rowid = yield manager._get_rowid_for_stream_hash(stream_hash) self.assertEqual(out, rowid) files = yield manager._get_all_lbry_files() self.assertEqual(1, len(files)) yield manager._change_file_status(rowid, ManagedEncryptedFileDownloader.STATUS_RUNNING) out = yield manager._get_lbry_file_status(rowid) self.assertEqual(out, ManagedEncryptedFileDownloader.STATUS_RUNNING)
def test_delete_blob(self): blob_hash = random_lbry_hash() yield self.store_fake_blob(blob_hash) blob_hashes = yield self.storage.get_all_blob_hashes() self.assertEqual(blob_hashes, [blob_hash]) yield self.storage.delete_blobs_from_db(blob_hashes) blob_hashes = yield self.storage.get_all_blob_hashes() self.assertEqual(blob_hashes, [])
def test_too_much_write(self): # writing too much data should result in failure expected_length = 16 content = bytearray('0'*32) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, expected_length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) out = yield self.assertFailure(finished_d, InvalidDataError)
def test_immediate_announce(self): # Test that immediate announce puts a hash at the front of the queue d = self.announcer.immediate_announce(self.blobs_to_announce) self.assertEqual(len(self.announcer.hash_queue), self.num_blobs) blob_hash = random_lbry_hash() self.announcer.immediate_announce([blob_hash]) self.assertEqual(len(self.announcer.hash_queue), self.num_blobs + 1) self.assertEqual(blob_hash, self.announcer.hash_queue[-1]) yield d
def test_too_much_write(self): # writing too much data should result in failure expected_length = 16 content = bytearray('0' * 32) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, expected_length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) out = yield self.assertFailure(finished_d, InvalidDataError)
def test_bad_hash(self): # test a write that should fail because its content's hash # does not equal the blob_hash length = 64 content = bytearray('0'*length) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) yield self.assertFailure(finished_d, InvalidDataError)
def test_bad_hash(self): # test a write that should fail because its content's hash # does not equal the blob_hash length = 64 content = bytearray('0' * length) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) yield self.assertFailure(finished_d, InvalidDataError)
def test_delete_stream(self): stream_hash = random_lbry_hash() yield self.test_store_stream(stream_hash) yield self.storage.delete_stream(stream_hash) stream_hashes = yield self.storage.get_all_streams() self.assertListEqual(stream_hashes, []) stream_blobs = yield self.storage.get_blobs_for_stream(stream_hash) self.assertListEqual(stream_blobs, []) blob_hashes = yield self.storage.get_all_blob_hashes() self.assertListEqual(blob_hashes, [])
def setUp(self): conf.initialize_settings(False) self.num_blobs = 10 self.blobs_to_announce = [] for i in range(0, self.num_blobs): self.blobs_to_announce.append(random_lbry_hash()) self.dht_node = MocDHTNode() self.clock = self.dht_node.clock utils.call_later = self.clock.callLater self.storage = MocStorage(self.blobs_to_announce) self.announcer = DHTHashAnnouncer(self.dht_node, self.storage)
def setUp(self): self.num_blobs = 10 self.blobs_to_announce = [] for i in range(0, self.num_blobs): self.blobs_to_announce.append(random_lbry_hash()) self.clock = task.Clock() self.dht_node = MocDHTNode() utils.call_later = self.clock.callLater from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer self.announcer = DHTHashAnnouncer(self.dht_node, peer_port=3333) self.supplier = MocSupplier(self.blobs_to_announce) self.announcer.add_supplier(self.supplier)
def setUp(self): self.num_blobs = 10 self.blobs_to_announce = [] for i in range(0, self.num_blobs): self.blobs_to_announce.append(random_lbry_hash()) self.clock = task.Clock() self.dht_node = MocDHTNode() utils.call_later = self.clock.callLater from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer self.announcer = DHTHashAnnouncer(self.dht_node, peer_port=3333) self.supplier = MocSupplier(self.blobs_to_announce) self.announcer.add_supplier(self.supplier)
def test_basic(self): yield self.manager.setup() out = yield self.manager.get_all_streams() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() file_name = 'file_name' key = 'key' suggested_file_name = 'sug_file_name' blob1 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1) blob2 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1) blobs = [blob1, blob2] # save stream yield self.manager.save_stream(stream_hash, file_name, key, suggested_file_name, blobs) out = yield self.manager.get_stream_info(stream_hash) self.assertEqual(key, out[0]) self.assertEqual(file_name, out[1]) self.assertEqual(suggested_file_name, out[2]) out = yield self.manager.check_if_stream_exists(stream_hash) self.assertTrue(out) out = yield self.manager.get_blobs_for_stream(stream_hash) self.assertEqual(2, len(out)) out = yield self.manager.get_all_streams() self.assertEqual(1, len(out)) # add a blob to stream blob3 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1) blobs = [blob3] out = yield self.manager.add_blobs_to_stream(stream_hash, blobs) out = yield self.manager.get_blobs_for_stream(stream_hash) self.assertEqual(3, len(out)) out = yield self.manager.get_stream_of_blob(blob3.blob_hash) self.assertEqual(stream_hash, out) # check non existing stream with self.assertRaises(NoSuchStreamHash): out = yield self.manager.get_stream_info(random_lbry_hash()) # check save of sd blob hash sd_blob_hash = random_lbry_hash() yield self.manager.save_sd_blob_hash_to_stream(stream_hash, sd_blob_hash) out = yield self.manager.get_sd_blob_hashes_for_stream(stream_hash) self.assertEqual(1, len(out)) self.assertEqual(sd_blob_hash, out[0]) out = yield self.manager.get_stream_hash_for_sd_hash(sd_blob_hash) self.assertEqual(stream_hash, out) # delete stream yield self.manager.delete_stream(stream_hash) out = yield self.manager.check_if_stream_exists(stream_hash) self.assertFalse(out)
def test_store_file(self): session = MocSession(self.storage) session.db_dir = self.db_dir sd_identifier = StreamDescriptorIdentifier() download_directory = self.db_dir manager = EncryptedFileManager(session, sd_identifier) out = yield manager.session.storage.get_all_lbry_files() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() sd_hash = random_lbry_hash() blob1 = random_lbry_hash() blob2 = random_lbry_hash() yield self.store_fake_blob(sd_hash) yield self.store_fake_blob(blob1) yield self.store_fake_blob(blob2) yield self.store_fake_stream(stream_hash, sd_hash) yield self.store_fake_stream_blob(stream_hash, blob1, 1) yield self.store_fake_stream_blob(stream_hash, blob2, 2) blob_data_rate = 0 file_name = "test file" out = yield manager.session.storage.save_published_file( stream_hash, file_name, download_directory, blob_data_rate) rowid = yield manager.session.storage.get_rowid_for_stream_hash( stream_hash) self.assertEqual(out, rowid) files = yield manager.session.storage.get_all_lbry_files() self.assertEqual(1, len(files)) status = yield manager.session.storage.get_lbry_file_status(rowid) self.assertEqual(status, ManagedEncryptedFileDownloader.STATUS_STOPPED) running = ManagedEncryptedFileDownloader.STATUS_RUNNING yield manager.session.storage.change_file_status(rowid, running) status = yield manager.session.storage.get_lbry_file_status(rowid) self.assertEqual(status, ManagedEncryptedFileDownloader.STATUS_RUNNING)
def test_basic(self): yield self.manager.setup() out = yield self.manager.get_all_streams() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() file_name = 'file_name' key = 'key' suggested_file_name = 'sug_file_name' blob1 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1) blob2 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1) blobs = [blob1, blob2] # save stream yield self.manager.save_stream(stream_hash, file_name, key, suggested_file_name, blobs) out = yield self.manager.get_stream_info(stream_hash) self.assertEqual(key, out[0]) self.assertEqual(file_name, out[1]) self.assertEqual(suggested_file_name, out[2]) out = yield self.manager.check_if_stream_exists(stream_hash) self.assertTrue(out) out = yield self.manager.get_blobs_for_stream(stream_hash) self.assertEqual(2, len(out)) out = yield self.manager.get_all_streams() self.assertEqual(1, len(out)) # add a blob to stream blob3 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1) blobs = [blob3] out = yield self.manager.add_blobs_to_stream(stream_hash, blobs) out = yield self.manager.get_blobs_for_stream(stream_hash) self.assertEqual(3, len(out)) out = yield self.manager.get_stream_of_blob(blob3.blob_hash) self.assertEqual(stream_hash, out) # check non existing stream with self.assertRaises(NoSuchStreamHash): out = yield self.manager.get_stream_info(random_lbry_hash()) # check save of sd blob hash sd_blob_hash = random_lbry_hash() yield self.manager.save_sd_blob_hash_to_stream(stream_hash, sd_blob_hash) out = yield self.manager.get_sd_blob_hashes_for_stream(stream_hash) self.assertEqual(1, len(out)) self.assertEqual(sd_blob_hash, out[0]) out = yield self.manager.get_stream_hash_for_sd_hash(sd_blob_hash) self.assertEqual(stream_hash, out) # delete stream yield self.manager.delete_stream(stream_hash) out = yield self.manager.check_if_stream_exists(stream_hash) self.assertFalse(out)
def setUp(self): from lbrynet.conf import initialize_settings initialize_settings() self.num_blobs = 10 self.blobs_to_announce = [] for i in range(0, self.num_blobs): self.blobs_to_announce.append(random_lbry_hash()) self.dht_node = MocDHTNode() self.dht_node.peerPort = 3333 self.dht_node.clock = reactor self.db_dir = tempfile.mkdtemp() self.storage = SQLiteStorage(self.db_dir) yield self.storage.setup() self.announcer = DHTHashAnnouncer(self.dht_node, self.storage, 10) for blob_hash in self.blobs_to_announce: yield self.storage.add_completed_blob(blob_hash, 100, 0, 1)
def test_delete_blob(self): # create blob blob_hash = yield self._create_and_add_blob() blobs = yield self.bm.get_all_verified_blobs() self.assertEqual(len(blobs), 1) # delete blob yield self.bm.delete_blobs([blob_hash]) self.assertFalse(os.path.isfile(os.path.join(self.blob_dir, blob_hash))) blobs = yield self.bm.get_all_verified_blobs() self.assertEqual(len(blobs), 0) blobs = yield self.bm._get_all_blob_hashes() self.assertEqual(len(blobs), 0) # delete blob that does not exist, nothing will # happen blob_hash = random_lbry_hash() out = yield self.bm.delete_blobs([blob_hash])
def test_delete_blob(self): # create blob blob_hash = yield self._create_and_add_blob() blobs = yield self.bm.get_all_verified_blobs() self.assertEqual(len(blobs), 1) # delete blob yield self.bm.delete_blobs([blob_hash]) self.assertFalse(os.path.isfile(os.path.join(self.blob_dir, blob_hash))) blobs = yield self.bm.get_all_verified_blobs() self.assertEqual(len(blobs), 0) blobs = yield self.bm._get_all_blob_hashes() self.assertEqual(len(blobs), 0) self.assertFalse(blob_hash in self.bm.blobs) # delete blob that was already deleted once out = yield self.bm.delete_blobs([blob_hash]) # delete blob that does not exist, nothing will # happen blob_hash = random_lbry_hash() out = yield self.bm.delete_blobs([blob_hash])
def test_store_content_claim(self): session = MocSession(self.storage) session.db_dir = self.db_dir sd_identifier = StreamDescriptorIdentifier() download_directory = self.db_dir manager = EncryptedFileManager(session, sd_identifier) out = yield manager.session.storage.get_all_lbry_files() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() sd_hash = fake_claim_info['value']['stream']['source']['source'] # test that we can associate a content claim to a file # use the generated sd hash in the fake claim fake_outpoint = "%s:%i" % (fake_claim_info['txid'], fake_claim_info['nout']) yield self.make_and_store_fake_stream(blob_count=2, stream_hash=stream_hash, sd_hash=sd_hash) blob_data_rate = 0 file_name = "test file" yield manager.session.storage.save_published_file( stream_hash, file_name, download_directory, blob_data_rate) yield self.storage.save_claim(fake_claim_info) yield self.storage.save_content_claim(stream_hash, fake_outpoint) stored_content_claim = yield self.storage.get_content_claim( stream_hash) self.assertDictEqual(stored_content_claim, fake_claim_info) stream_hashes = yield self.storage.get_old_stream_hashes_for_claim_id( fake_claim_info['claim_id'], stream_hash) self.assertListEqual(stream_hashes, []) # test that we can't associate a claim update with a new stream to the file second_stream_hash, second_sd_hash = random_lbry_hash( ), random_lbry_hash() yield self.make_and_store_fake_stream(blob_count=2, stream_hash=second_stream_hash, sd_hash=second_sd_hash) try: yield self.storage.save_content_claim(second_stream_hash, fake_outpoint) raise Exception("test failed") except Exception as err: self.assertTrue(err.message == "stream mismatch") # test that we can associate a new claim update containing the same stream to the file update_info = deepcopy(fake_claim_info) update_info['txid'] = "beef0000" * 12 update_info['nout'] = 0 second_outpoint = "%s:%i" % (update_info['txid'], update_info['nout']) yield self.storage.save_claim(update_info) yield self.storage.save_content_claim(stream_hash, second_outpoint) update_info_result = yield self.storage.get_content_claim(stream_hash) self.assertDictEqual(update_info_result, update_info) # test that we can't associate an update with a mismatching claim id invalid_update_info = deepcopy(fake_claim_info) invalid_update_info['txid'] = "beef0001" * 12 invalid_update_info['nout'] = 0 invalid_update_info['claim_id'] = "beef0002" * 5 invalid_update_outpoint = "%s:%i" % (invalid_update_info['txid'], invalid_update_info['nout']) yield self.storage.save_claim(invalid_update_info) try: yield self.storage.save_content_claim(stream_hash, invalid_update_outpoint) raise Exception("test failed") except Exception as err: self.assertTrue(err.message == "invalid stream update") current_claim_info = yield self.storage.get_content_claim(stream_hash) # this should still be the previous update self.assertDictEqual(current_claim_info, update_info)