def test_good_write_and_read(self): # test a write that should succeed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertFalse(blob_file.verified) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content) writer.close() out = yield finished_d self.assertIsInstance(out, BlobFile) self.assertTrue(out.verified) self.assertEqual(self.fake_content_len, out.get_length()) # read from the instance used to write to, and verify content f = blob_file.open_for_reading() c = f.read() self.assertEqual(c, self.fake_content) self.assertFalse(out.is_downloading()) # read from newly declared instance, and verify content del blob_file blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) f = blob_file.open_for_reading() self.assertEqual(1, blob_file.readers) c = f.read() self.assertEqual(c, self.fake_content) # close reader f.close() self.assertEqual(0, blob_file.readers)
def test_too_much_write(self): # writing too much data should result in failure expected_length = 16 content = b'0' * 32 blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, expected_length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) out = yield self.assertFailure(finished_d, InvalidDataError)
def test_delete(self): blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content) out = yield finished_d out = yield blob_file.delete() blob_file = BlobFile(self.blob_dir, self.fake_content_hash) self.assertFalse(blob_file.verified)
def test_bad_hash(self): # test a write that should fail because its content's hash # does not equal the blob_hash length = 64 content = b'0' * length blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) yield self.assertFailure(finished_d, InvalidDataError)
async def make_sd_blob(self): sd_hash = self.calculate_sd_hash() sd_data = self.as_json() sd_blob = BlobFile(self.loop, self.blob_dir, sd_hash, len(sd_data)) if not sd_blob.get_is_verified(): writer = sd_blob.open_for_writing() writer.write(sd_data) await sd_blob.verified.wait() await sd_blob.close() return sd_blob
def test_multiple_writers(self): # start first writer and write half way, and then start second writer and write everything blob_hash = self.fake_content_hash blob_file = BlobFile(self.blob_dir, blob_hash, self.fake_content_len) writer_1, finished_d_1 = blob_file.open_for_writing(peer=1) writer_1.write(self.fake_content[:self.fake_content_len // 2]) writer_2, finished_d_2 = blob_file.open_for_writing(peer=2) writer_2.write(self.fake_content) out_2 = yield finished_d_2 out_1 = yield self.assertFailure(finished_d_1, DownloadCanceledError) self.assertIsInstance(out_2, BlobFile) self.assertTrue(out_2.verified) self.assertEqual(self.fake_content_len, out_2.get_length()) f = blob_file.open_for_reading() c = f.read() self.assertEqual(self.fake_content_len, len(c)) self.assertEqual(bytearray(c), self.fake_content)
def test_delete_fail(self): # deletes should fail if being written to blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) yield self.assertFailure(blob_file.delete(), ValueError) writer.write(self.fake_content) writer.close() # deletes should fail if being read and not closed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) f = blob_file.open_for_reading() yield self.assertFailure(blob_file.delete(), ValueError)
def test_delete_fail(self): # deletes should fail if being written to blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) with self.assertRaises(ValueError): blob_file.delete() writer.write(self.fake_content) writer.close() # deletes should fail if being read and not closed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) r = blob_file.open_for_reading() # must be set to variable otherwise it gets garbage collected with self.assertRaises(ValueError): blob_file.delete()
def test_multiple_writers_save_at_same_time(self): blob_hash = self.fake_content_hash blob_file = BlobFile(self.blob_dir, blob_hash, self.fake_content_len) writer_1, finished_d_1 = blob_file.open_for_writing(peer=1) writer_2, finished_d_2 = blob_file.open_for_writing(peer=2) blob_file.save_verified_blob(writer_1) # second write should fail to save yield self.assertFailure(blob_file.save_verified_blob(writer_2), DownloadCanceledError) # schedule a close, just to leave the reactor clean finished_d_1.addBoth(lambda x:None) finished_d_2.addBoth(lambda x:None) self.addCleanup(writer_1.close) self.addCleanup(writer_2.close)
def test_close_on_incomplete_write(self): # write all but 1 byte of data, blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content[:self.fake_content_len-1]) writer.close() yield self.assertFailure(finished_d, DownloadCanceledError) # writes after close will throw a IOError exception with self.assertRaises(IOError): writer.write(self.fake_content) # another call to close will do nothing writer.close() # file should not exist, since we did not finish write blob_file_2 = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) out = blob_file_2.open_for_reading() self.assertIsNone(out)