def add_downloaded_file(self, stream_hash, sd_hash, download_directory, payment_rate_manager=None, blob_data_rate=None, status=None, file_name=None): status = status or ManagedEncryptedFileDownloader.STATUS_STOPPED payment_rate_manager = payment_rate_manager or self.session.payment_rate_manager blob_data_rate = blob_data_rate or payment_rate_manager.min_blob_data_payment_rate stream_metadata = yield get_sd_info(self.session.storage, stream_hash, include_blobs=False) key = stream_metadata['key'] stream_name = stream_metadata['stream_name'] file_name = file_name or stream_metadata['suggested_file_name'] # when we save the file we'll atomic touch the nearest file to the suggested file name # that doesn't yet exist in the download directory rowid = yield self.storage.save_downloaded_file( stream_hash, os.path.basename(file_name.decode('hex')).encode('hex'), download_directory, blob_data_rate ) file_name = yield self.session.storage.get_filename_for_rowid(rowid) lbry_file = self._get_lbry_file( rowid, stream_hash, payment_rate_manager, sd_hash, key, stream_name, file_name, download_directory, stream_metadata['suggested_file_name'] ) lbry_file.restore(status) yield lbry_file.get_claim_info(include_supports=False) self.storage.content_claim_callbacks[stream_hash] = lbry_file.get_claim_info self.lbry_files.append(lbry_file) defer.returnValue(lbry_file)
def test_can_create_file(self): expected_stream_hash = "41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \ "7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25" expected_sd_hash = "db043b44384c149126685990f6bb6563aa565ae331303d522" \ "c8728fe0534dd06fbcacae92b0891787ad9b68ffc8d20c1" filename = 'test.file' lbry_file = yield self.create_file(filename) sd_hash = yield self.storage.get_sd_blob_hash_for_stream( lbry_file.stream_hash) # read the sd blob file sd_blob = self.blob_manager.blobs[sd_hash] sd_reader = BlobStreamDescriptorReader(sd_blob) sd_file_info = yield sd_reader.get_info() # this comes from the database, the blobs returned are sorted sd_info = yield get_sd_info(self.storage, lbry_file.stream_hash, include_blobs=True) self.assertDictEqual(sd_info, sd_file_info) self.assertListEqual(sd_info['blobs'], sd_file_info['blobs']) self.assertEqual(sd_info['stream_hash'], expected_stream_hash) self.assertEqual(len(sd_info['blobs']), 3) self.assertNotEqual(sd_info['blobs'][0]['length'], 0) self.assertNotEqual(sd_info['blobs'][1]['length'], 0) self.assertEqual(sd_info['blobs'][2]['length'], 0) self.assertEqual(expected_stream_hash, lbry_file.stream_hash) self.assertEqual(sd_hash, lbry_file.sd_hash) self.assertEqual(sd_hash, expected_sd_hash) blobs = yield self.blob_manager.get_all_verified_blobs() self.assertEqual(3, len(blobs)) num_should_announce_blobs = yield self.blob_manager.count_should_announce_blobs( ) self.assertEqual(2, num_should_announce_blobs)
def verify_stream_descriptor_file(stream_hash): self.stream_hash = stream_hash d = get_sd_info(self.lbry_file_manager.session.storage, stream_hash, True) d.addCallback(verify_equal, stream_hash) d.addCallback(save_sd_blob_hash) return d
def _start_lbry_file(self, file_info, payment_rate_manager): lbry_file = self._get_lbry_file( file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'], file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'], file_info['suggested_file_name']) yield lbry_file.get_claim_info() try: # verify the stream is valid (we might have downloaded an invalid stream # in the past when the validation check didn't work) stream_info = yield get_sd_info(self.storage, file_info['stream_hash'], include_blobs=True) validate_descriptor(stream_info) except InvalidStreamDescriptorError as err: log.warning( "Stream for descriptor %s is invalid (%s), cleaning it up", lbry_file.sd_hash, err.message) yield lbry_file.delete_data() yield self.session.storage.delete_stream(lbry_file.stream_hash) else: try: # restore will raise an Exception if status is unknown lbry_file.restore(file_info['status']) self.storage.content_claim_callbacks[ lbry_file.stream_hash] = lbry_file.get_claim_info self.lbry_files.append(lbry_file) if len(self.lbry_files) % 500 == 0: log.info("Started %i files", len(self.lbry_files)) except Exception: log.warning("Failed to start %i", file_info.get('rowid'))
def add_published_file(self, stream_hash, sd_hash, download_directory, payment_rate_manager, blob_data_rate): status = ManagedEncryptedFileDownloader.STATUS_FINISHED stream_metadata = yield get_sd_info(self.storage, stream_hash, include_blobs=False) key = stream_metadata['key'] stream_name = stream_metadata['stream_name'] file_name = stream_metadata['suggested_file_name'] rowid = yield self.storage.save_published_file(stream_hash, file_name, download_directory, blob_data_rate, status) lbry_file = self._get_lbry_file(rowid, stream_hash, payment_rate_manager, sd_hash, key, stream_name, file_name, download_directory, stream_metadata['suggested_file_name'], download_mirrors=None) lbry_file.restore(status) yield lbry_file.get_claim_info() self.storage.content_claim_callbacks[ stream_hash] = lbry_file.get_claim_info self.lbry_files.append(lbry_file) defer.returnValue(lbry_file)
def _start_lbry_files(self): files = yield self.session.storage.get_all_lbry_files() b_prm = self.session.base_payment_rate_manager payment_rate_manager = NegotiatedPaymentRateManager( b_prm, self.session.blob_tracker) log.info("Trying to start %i files", len(files)) for i, file_info in enumerate(files): if len(files) > 500 and i % 500 == 0: log.info("Started %i/%i files", i, len(files)) lbry_file = self._get_lbry_file( file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'], file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'], file_info['suggested_file_name']) yield lbry_file.get_claim_info() try: # verify the stream is valid (we might have downloaded an invalid stream # in the past when the validation check didn't work) stream_info = yield get_sd_info(self.storage, file_info['stream_hash'], include_blobs=True) validate_descriptor(stream_info) except InvalidStreamDescriptorError as err: log.warning( "Stream for descriptor %s is invalid (%s), cleaning it up", lbry_file.sd_hash, err.message) yield lbry_file.delete_data() yield self.session.storage.delete_stream(lbry_file.stream_hash) else: try: # restore will raise an Exception if status is unknown lbry_file.restore(file_info['status']) self.storage.content_claim_callbacks[ lbry_file.stream_hash] = lbry_file.get_claim_info self.lbry_files.append(lbry_file) except Exception: log.warning("Failed to start %i", file_info.get('rowid')) log.info("Started %i lbry files", len(self.lbry_files)) if self.auto_re_reflect is True: safe_start_looping_call(self.lbry_file_reflector, self.auto_re_reflect_interval)
def test_can_create_file(self): expected_stream_hash = "41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \ "7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25" expected_sd_hash = "40c485432daec586c1a2d247e6c08d137640a5af6e81f3f652" \ "3e62e81a2e8945b0db7c94f1852e70e371d917b994352c" filename = 'test.file' lbry_file = yield self.create_file(filename) sd_hash = yield self.storage.get_sd_blob_hash_for_stream( lbry_file.stream_hash) # read the sd blob file sd_blob = self.blob_manager.blobs[sd_hash] sd_reader = BlobStreamDescriptorReader(sd_blob) sd_file_info = yield sd_reader.get_info() # this comes from the database, the blobs returned are sorted sd_info = yield get_sd_info(self.storage, lbry_file.stream_hash, include_blobs=True) self.maxDiff = None unicode_sd_info = json.loads( json.dumps(sd_info, sort_keys=True, cls=JSONBytesEncoder)) self.assertDictEqual(unicode_sd_info, sd_file_info) self.assertEqual(sd_info['stream_hash'], expected_stream_hash) self.assertEqual(len(sd_info['blobs']), 3) self.assertNotEqual(sd_info['blobs'][0]['length'], 0) self.assertNotEqual(sd_info['blobs'][1]['length'], 0) self.assertEqual(sd_info['blobs'][2]['length'], 0) self.assertEqual(expected_stream_hash, lbry_file.stream_hash) self.assertEqual(sd_hash, lbry_file.sd_hash) self.assertEqual(sd_hash, expected_sd_hash) blobs = yield self.blob_manager.get_all_verified_blobs() self.assertEqual(3, len(blobs)) num_should_announce_blobs = yield self.blob_manager.count_should_announce_blobs( ) self.assertEqual(2, num_should_announce_blobs)
def verify_stream_descriptor_file(stream_hash): d = get_sd_info(self.session.storage, stream_hash, True) d.addCallback(verify_equal) return d