def test_good_write_and_read(self): # test a write that should succeed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertFalse(blob_file.verified) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content) writer.close() out = yield finished_d self.assertTrue(isinstance(out, BlobFile)) self.assertTrue(out.verified) self.assertEqual(self.fake_content_len, out.get_length()) # read from the instance used to write to, and verify content f = blob_file.open_for_reading() c = f.read() self.assertEqual(c, self.fake_content) self.assertFalse(out.is_downloading()) # read from newly declared instance, and verify content del blob_file blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) f = blob_file.open_for_reading() self.assertEqual(1, blob_file.readers) c = f.read() self.assertEqual(c, self.fake_content) # close reader f.close() self.assertEqual(0, blob_file.readers)
def test_too_much_write(self): # writing too much data should result in failure expected_length = 16 content = bytearray('0'*32) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, expected_length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) out = yield self.assertFailure(finished_d, InvalidDataError)
def test_multiple_writers_save_at_same_time(self): blob_hash = self.fake_content_hash blob_file = BlobFile(self.blob_dir, blob_hash, self.fake_content_len) writer_1, finished_d_1 = blob_file.open_for_writing(peer=1) writer_2, finished_d_2 = blob_file.open_for_writing(peer=2) blob_file.save_verified_blob(writer_1) # second write should fail to save yield self.assertFailure(blob_file.save_verified_blob(writer_2), DownloadCanceledError)
def test_too_much_write(self): # writing too much data should result in failure expected_length = 16 content = bytearray('0' * 32) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, expected_length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) out = yield self.assertFailure(finished_d, InvalidDataError)
def test_delete(self): blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content) out = yield finished_d out = yield blob_file.delete() blob_file = BlobFile(self.blob_dir, self.fake_content_hash) self.assertFalse(blob_file.verified)
def test_bad_hash(self): # test a write that should fail because its content's hash # does not equal the blob_hash length = 64 content = bytearray('0'*length) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) yield self.assertFailure(finished_d, InvalidDataError)
def test_bad_hash(self): # test a write that should fail because its content's hash # does not equal the blob_hash length = 64 content = bytearray('0' * length) blob_hash = random_lbry_hash() blob_file = BlobFile(self.blob_dir, blob_hash, length) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(content) yield self.assertFailure(finished_d, InvalidDataError)
def setUp(self): self.db_dir, self.blob_dir = mk_db_and_blob_dir() self.blob_manager = MagicMock() self.client = MagicMock() self.blob_hash = ('d17272b17a1ad61c4316ac13a651c2b0952063214a81333e' '838364b01b2f07edbd165bb7ec60d2fb2f337a2c02923852') self.blob = BlobFile(self.blob_dir, self.blob_hash) self.blob_manager.get_blob.side_effect = lambda _: defer.succeed(self.blob) self.response = MagicMock(code=200, length=400) self.client.get.side_effect = lambda uri: defer.succeed(self.response) self.downloader = HTTPBlobDownloader(self.blob_manager, [self.blob_hash], ['server1'], self.client) self.downloader.interval = 0
def main(args=None): conf.initialize_settings() parser = argparse.ArgumentParser() parser.add_argument('--timeout', type=int, default=30) parser.add_argument('peer') parser.add_argument('blob_hash') parser.add_argument('directory', type=str, default=os.getcwd()) args = parser.parse_args(args) log_support.configure_console(level='DEBUG') announcer = HashAnnouncer.DummyHashAnnouncer() blob_manager = MyBlobManager(announcer) blob = BlobFile(args.directory, args.blob_hash) download_manager = SingleBlobDownloadManager(blob) peer = Peer.Peer(*conf.server_port(args.peer)) payment_rate_manager = DumbPaymentRateManager() wallet = getWallet() requester = SingleBlobRequester(peer, blob_manager, payment_rate_manager, wallet, download_manager) rate_limiter = RateLimiter.DummyRateLimiter() downloader = SingleBlobDownloader() connection_manager = ConnectionManager.ConnectionManager( downloader, rate_limiter, [requester], [wallet.get_info_exchanger()]) reactor.callLater(args.timeout, reactor.stop) d = connection_manager.start() d.addErrback(log_support.failure, 'Something bad happened: %s') reactor.run() if SUCCESS: sys.exit(0) else: sys.exit(1)
def decrypt_blob(blob_file, key, iv, output): filename = os.path.abspath(blob_file) length = os.path.getsize(filename) directory, blob_hash = os.path.split(filename) blob = BlobFile(directory, blob_hash, length) decryptor = CryptBlob.StreamBlobDecryptor( blob, binascii.unhexlify(key), binascii.unhexlify(iv), length) with open(output, 'w') as f: yield decryptor.decrypt(f.write)
def test_close_on_incomplete_write(self): # write all but 1 byte of data, blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content[:self.fake_content_len-1]) writer.close() yield self.assertFailure(finished_d, DownloadCanceledError) # writes after close will throw a IOError exception with self.assertRaises(IOError): writer.write(self.fake_content) # another call to close will do nothing writer.close() # file should not exist, since we did not finish write blob_file_2 = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) out = blob_file_2.open_for_reading() self.assertEqual(None, out)
def test_multiple_writers(self): # start first writer and write half way, and then start second writer and write everything blob_hash = self.fake_content_hash blob_file = BlobFile(self.blob_dir, blob_hash, self.fake_content_len) writer_1, finished_d_1 = blob_file.open_for_writing(peer=1) writer_1.write(self.fake_content[:self.fake_content_len / 2]) writer_2, finished_d_2 = blob_file.open_for_writing(peer=2) writer_2.write(self.fake_content) out_2 = yield finished_d_2 out_1 = yield self.assertFailure(finished_d_1, DownloadCanceledError) self.assertTrue(isinstance(out_2, BlobFile)) self.assertTrue(out_2.verified) self.assertEqual(self.fake_content_len, out_2.get_length()) f = blob_file.open_for_reading() c = f.read() self.assertEqual(self.fake_content_len, len(c)) self.assertEqual(bytearray(c), self.fake_content)
def test_multiple_writers(self): # start first writer and write half way, and then start second writer and write everything blob_hash = self.fake_content_hash blob_file = BlobFile(self.blob_dir, blob_hash, self.fake_content_len) writer_1, finished_d_1 = blob_file.open_for_writing(peer=1) writer_1.write(self.fake_content[:self.fake_content_len/2]) writer_2, finished_d_2 = blob_file.open_for_writing(peer=2) writer_2.write(self.fake_content) out_2 = yield finished_d_2 out_1 = yield self.assertFailure(finished_d_1, DownloadCanceledError) self.assertTrue(isinstance(out_2, BlobFile)) self.assertTrue(out_2.verified) self.assertEqual(self.fake_content_len, out_2.get_length()) f = blob_file.open_for_reading() c = f.read() self.assertEqual(self.fake_content_len, len(c)) self.assertEqual(bytearray(c), self.fake_content)
def test_delete_fail(self): # deletes should fail if being written to blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) yield self.assertFailure(blob_file.delete(), ValueError) writer.write(self.fake_content) writer.close() # deletes should fail if being read and not closed blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) self.assertTrue(blob_file.verified) f = blob_file.open_for_reading() yield self.assertFailure(blob_file.delete(), ValueError)
def test_multiple_writers_save_at_same_time(self): blob_hash = self.fake_content_hash blob_file = BlobFile(self.blob_dir, blob_hash, self.fake_content_len) writer_1, finished_d_1 = blob_file.open_for_writing(peer=1) writer_2, finished_d_2 = blob_file.open_for_writing(peer=2) blob_file.save_verified_blob(writer_1) # second write should fail to save yield self.assertFailure(blob_file.save_verified_blob(writer_2), DownloadCanceledError) # schedule a close, just to leave the reactor clean finished_d_1.addBoth(lambda x: None) finished_d_2.addBoth(lambda x: None) self.addCleanup(writer_1.close) self.addCleanup(writer_2.close)
def test_close_on_incomplete_write(self): # write all but 1 byte of data, blob_file = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) writer, finished_d = blob_file.open_for_writing(peer=1) writer.write(self.fake_content[:self.fake_content_len - 1]) writer.close() yield self.assertFailure(finished_d, DownloadCanceledError) # writes after close will throw a IOError exception with self.assertRaises(IOError): writer.write(self.fake_content) # another call to close will do nothing writer.close() # file should not exist, since we did not finish write blob_file_2 = BlobFile(self.blob_dir, self.fake_content_hash, self.fake_content_len) out = blob_file_2.open_for_reading() self.assertEqual(None, out)
def __init__(self, blob_dir, blob_hash, timeout): BlobFile.__init__(self, blob_dir, blob_hash) self.callback = defer.Deferred() reactor.callLater(timeout, self._cancel)
def save_verified_blob(self, writer): result = BlobFile.save_verified_blob(self, writer) if not self.callback.called: self.callback.callback(True) return result
class HTTPBlobDownloaderTest(unittest.TestCase): def setUp(self): self.db_dir, self.blob_dir = mk_db_and_blob_dir() self.blob_manager = MagicMock() self.client = MagicMock() self.blob_hash = ('d17272b17a1ad61c4316ac13a651c2b0952063214a81333e' '838364b01b2f07edbd165bb7ec60d2fb2f337a2c02923852') self.blob = BlobFile(self.blob_dir, self.blob_hash) self.blob_manager.get_blob.side_effect = lambda _: defer.succeed(self.blob) self.response = MagicMock(code=200, length=400) self.client.get.side_effect = lambda uri: defer.succeed(self.response) self.downloader = HTTPBlobDownloader(self.blob_manager, [self.blob_hash], ['server1'], self.client) self.downloader.interval = 0 def tearDown(self): rm_db_and_blob_dir(self.db_dir, self.blob_dir) @defer.inlineCallbacks def test_download_successful(self): self.client.collect.side_effect = collect yield self.downloader.start() self.blob_manager.get_blob.assert_called_with(self.blob_hash) self.client.get.assert_called_with('http://{}/{}'.format('server1', self.blob_hash)) self.client.collect.assert_called() self.assertEqual(self.blob.get_length(), self.response.length) self.assertEqual(self.blob.get_is_verified(), True) self.assertEqual(self.blob.writers, {}) @defer.inlineCallbacks def test_download_invalid_content(self): self.client.collect.side_effect = bad_collect yield self.downloader.start() self.assertEqual(self.blob.get_length(), self.response.length) self.assertEqual(self.blob.get_is_verified(), False) self.assertEqual(self.blob.writers, {}) @defer.inlineCallbacks def test_peer_finished_first_causing_a_write_on_closed_handle(self): self.client.collect.side_effect = lambda response, write: defer.fail(IOError('I/O operation on closed file')) yield self.downloader.start() self.blob_manager.get_blob.assert_called_with(self.blob_hash) self.client.get.assert_called_with('http://{}/{}'.format('server1', self.blob_hash)) self.client.collect.assert_called() self.assertEqual(self.blob.get_length(), self.response.length) self.assertEqual(self.blob.writers, {}) @defer.inlineCallbacks def test_download_transfer_failed(self): self.client.collect.side_effect = lambda response, write: defer.fail(Exception()) yield self.downloader.start() self.assertEqual(len(self.client.collect.mock_calls), self.downloader.max_failures) self.blob_manager.get_blob.assert_called_with(self.blob_hash) self.assertEqual(self.blob.get_length(), self.response.length) self.assertEqual(self.blob.get_is_verified(), False) self.assertEqual(self.blob.writers, {}) @defer.inlineCallbacks def test_blob_not_found(self): self.response.code = 404 yield self.downloader.start() self.blob_manager.get_blob.assert_called_with(self.blob_hash) self.client.get.assert_called_with('http://{}/{}'.format('server1', self.blob_hash)) self.client.collect.assert_not_called() self.assertEqual(self.blob.get_is_verified(), False) self.assertEqual(self.blob.writers, {}) @defer.inlineCallbacks def test_stop(self): self.client.collect.side_effect = lambda response, write: defer.Deferred() self.downloader.start() # hangs if yielded, as intended, to simulate a long ongoing write while we call stop yield self.downloader.stop() self.blob_manager.get_blob.assert_called_with(self.blob_hash) self.client.get.assert_called_with('http://{}/{}'.format('server1', self.blob_hash)) self.client.collect.assert_called() self.assertEqual(self.blob.get_length(), self.response.length) self.assertEqual(self.blob.get_is_verified(), False) self.assertEqual(self.blob.writers, {})