def test_create_and_combine_stream(self): wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 2) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=self.blob_dir, peer_port=5553, dht_node_class=mocks.Node, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1" ) self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) @defer.inlineCallbacks def create_stream(): test_file = GenFile(53209343, b''.join([chr(i + 5) for i in xrange(0, 64, 6)])) lbry_file = yield create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file) sd_hash = yield self.session.storage.get_sd_blob_hash_for_stream(lbry_file.stream_hash) self.assertTrue(lbry_file.sd_hash, sd_hash) yield lbry_file.start() f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") d = self.session.setup() d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: create_stream()) return d
def start(self): use_epoll_on_linux() from twisted.internet import reactor self.reactor = reactor logging.debug("Starting the uploader") Random.atfork() r = random.Random() r.seed("start_lbry_uploader") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = RateLimiter() self.sd_identifier = StreamDescriptorIdentifier() db_dir = "server" os.mkdir(db_dir) self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node, is_generous=self.is_generous) stream_info_manager = TempEncryptedFileMetadataManager() self.lbry_file_manager = EncryptedFileManager( self.session, stream_info_manager, self.sd_identifier) if self.ul_rate_limit is not None: self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) reactor.callLater(1, self.start_all) if not reactor.running: reactor.run()
def test_create_and_combine_stream(self): wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 2) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1" ) self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir) self.lbry_file_manager = EncryptedFileManager( self.session, self.stream_info_manager, sd_identifier) def start_lbry_file(lbry_file): logging.debug("Calling lbry_file.start()") d = lbry_file.start() return d def combine_stream(stream_hash): prm = self.session.payment_rate_manager d = self.lbry_file_manager.add_lbry_file(stream_hash, prm) d.addCallback(start_lbry_file) def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") d.addCallback(lambda _: check_md5_sum()) return d @defer.inlineCallbacks def create_stream(): test_file = GenFile(53209343, b''.join([chr(i + 5) for i in xrange(0, 64, 6)])) stream_hash = yield create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, suggested_file_name="test_file") yield publish_sd_blob(self.stream_info_manager, self.session.blob_manager, stream_hash) defer.returnValue(stream_hash) d = self.session.setup() d.addCallback(lambda _: self.stream_info_manager.setup()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: create_stream()) d.addCallback(combine_stream) return d
def test_create_stream(self): wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 2) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=self.blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=self.is_generous, external_ip="127.0.0.1", dht_node_class=mocks.Node) self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) d = self.session.setup() d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) def verify_equal(sd_info): self.assertEqual(sd_info, test_create_stream_sd_file) def verify_stream_descriptor_file(stream_hash): d = get_sd_info(self.session.storage, stream_hash, True) d.addCallback(verify_equal) return d def iv_generator(): iv = 0 while 1: iv += 1 yield "%016d" % iv def create_stream(): test_file = GenFile( 5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)])) d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, key="0123456701234567", iv_generator=iv_generator()) d.addCallback(lambda lbry_file: lbry_file.stream_hash) return d d.addCallback(lambda _: create_stream()) d.addCallback(verify_stream_descriptor_file) return d
def __init__(self, peer_port, dht_node_port, known_dht_nodes, fake_wallet, lbrycrd_conf, lbrycrd_dir, use_upnp, data_dir, created_data_dir, lbrycrdd_path): """ @param peer_port: the network port on which to listen for peers @param dht_node_port: the network port on which to listen for dht node requests @param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network """ self.peer_port = peer_port self.dht_node_port = dht_node_port self.known_dht_nodes = known_dht_nodes self.fake_wallet = fake_wallet self.lbrycrd_conf = lbrycrd_conf self.lbrycrd_dir = lbrycrd_dir if not self.lbrycrd_dir: if sys.platform == "darwin": self.lbrycrd_dir = os.path.join( os.path.expanduser("~"), "Library/Application Support/lbrycrd") else: self.lbrycrd_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd") if not self.lbrycrd_conf: self.lbrycrd_conf = os.path.join(self.lbrycrd_dir, "lbrycrd.conf") self.lbrycrdd_path = lbrycrdd_path self.use_upnp = use_upnp self.lbry_server_port = None self.session = None self.lbry_file_metadata_manager = None self.lbry_file_manager = None self.db_dir = data_dir self.current_db_revision = 1 self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.created_data_dir = created_data_dir self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces([ os.path.join(self.db_dir, "plugins"), os.path.join(os.path.dirname(__file__), "plugins"), ]) self.command_handlers = [] self.query_handlers = {} self.settings = Settings(self.db_dir) self.blob_request_payment_rate_manager = None self.lbryid = None self.sd_identifier = StreamDescriptorIdentifier() self.plugin_objects = [] self.db_migration_revisions = None
def setUp(self): mocks.mock_conf_settings(self) self.tmp_db_dir, self.tmp_blob_dir = mk_db_and_blob_dir() self.wallet = FakeWallet() self.peer_manager = PeerManager() self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2) self.rate_limiter = DummyRateLimiter() self.sd_identifier = StreamDescriptorIdentifier() self.storage = SQLiteStorage(self.tmp_db_dir) self.blob_manager = DiskBlobManager(self.tmp_blob_dir, self.storage) self.prm = OnlyFreePaymentsManager() self.lbry_file_manager = EncryptedFileManager( self.peer_finder, self.rate_limiter, self.blob_manager, self.wallet, self.prm, self.storage, self.sd_identifier) d = self.storage.setup() d.addCallback(lambda _: self.lbry_file_manager.setup()) return d
def setup(self): init_conf_windows() self.db_dir, self.blob_dir = mk_db_and_blob_dir() self.wallet = FakeWallet() self.peer_manager = PeerManager() self.rate_limiter = RateLimiter() if self.ul_rate_limit is not None: self.rate_limiter.set_ul_limit(self.ul_rate_limit) self.prm = OnlyFreePaymentsManager() self.storage = SQLiteStorage(self.db_dir) self.blob_manager = DiskBlobManager(self.blob_dir, self.storage) self.lbry_file_manager = EncryptedFileManager( FakePeerFinder(5553, self.peer_manager, 1), self.rate_limiter, self.blob_manager, self.wallet, self.prm, self.storage, StreamDescriptorIdentifier()) yield self.storage.setup() yield self.blob_manager.setup() yield self.lbry_file_manager.setup() query_handler_factories = { 1: BlobAvailabilityHandlerFactory(self.blob_manager), 2: BlobRequestHandlerFactory(self.blob_manager, self.wallet, self.prm, None), 3: self.wallet.get_wallet_info_query_handler_factory(), } server_factory = ServerProtocolFactory(self.rate_limiter, query_handler_factories, self.peer_manager) self.server_port = reactor.listenTCP(5553, server_factory, interface="localhost") test_file = GenFile(self.file_size, b''.join([chr(i) for i in xrange(0, 64, 6)])) lbry_file = yield create_lbry_file(self.blob_manager, self.storage, self.prm, self.lbry_file_manager, "test_file", test_file) defer.returnValue(lbry_file.sd_hash)
def setUp(self): mocks.mock_conf_settings(self) self.db_dir, self.blob_dir = mk_db_and_blob_dir() self.wallet = FakeWallet() self.peer_manager = PeerManager() self.peer_finder = FakePeerFinder(5553, self.peer_manager, 1) self.rate_limiter = RateLimiter() self.prm = OnlyFreePaymentsManager() self.storage = SQLiteStorage(self.db_dir) self.blob_manager = DiskBlobManager(self.blob_dir, self.storage) self.sd_identifier = StreamDescriptorIdentifier() self.lbry_file_manager = EncryptedFileManager( self.peer_finder, self.rate_limiter, self.blob_manager, self.wallet, self.prm, self.storage, self.sd_identifier) self.uploader = LbryUploader(5209343) self.sd_hash = yield self.uploader.setup() yield self.storage.setup() yield self.blob_manager.setup() yield self.lbry_file_manager.setup() yield add_lbry_file_to_sd_identifier(self.sd_identifier)
def test_store_file(self): session = MocSession(self.storage) session.db_dir = self.db_dir sd_identifier = StreamDescriptorIdentifier() download_directory = self.db_dir manager = EncryptedFileManager(session, sd_identifier) out = yield manager.session.storage.get_all_lbry_files() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() sd_hash = random_lbry_hash() blob1 = random_lbry_hash() blob2 = random_lbry_hash() yield self.store_fake_blob(sd_hash) yield self.store_fake_blob(blob1) yield self.store_fake_blob(blob2) yield self.store_fake_stream(stream_hash, sd_hash) yield self.store_fake_stream_blob(stream_hash, blob1, 1) yield self.store_fake_stream_blob(stream_hash, blob2, 2) blob_data_rate = 0 file_name = "test file" out = yield manager.session.storage.save_published_file( stream_hash, file_name, download_directory, blob_data_rate) rowid = yield manager.session.storage.get_rowid_for_stream_hash( stream_hash) self.assertEqual(out, rowid) files = yield manager.session.storage.get_all_lbry_files() self.assertEqual(1, len(files)) status = yield manager.session.storage.get_lbry_file_status(rowid) self.assertEqual(status, ManagedEncryptedFileDownloader.STATUS_STOPPED) running = ManagedEncryptedFileDownloader.STATUS_RUNNING yield manager.session.storage.change_file_status(rowid, running) status = yield manager.session.storage.get_lbry_file_status(rowid) self.assertEqual(status, ManagedEncryptedFileDownloader.STATUS_RUNNING)
def start(self): use_epoll_on_linux() init_conf_windows() from twisted.internet import reactor self.reactor = reactor logging.debug("Starting the uploader") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = RateLimiter() self.sd_identifier = StreamDescriptorIdentifier() self.db_dir, self.blob_dir = mk_db_and_blob_dir() self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, blob_dir=self.blob_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) if self.ul_rate_limit is not None: self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) reactor.callLater(1, self.start_all) if not reactor.running: reactor.run()
def setUp(self): mocks.mock_conf_settings(self) self.session = None self.lbry_file_manager = None self.is_generous = True self.db_dir = tempfile.mkdtemp() self.blob_dir = os.path.join(self.db_dir, "blobfiles") os.mkdir(self.blob_dir) self.dht_node = FakeNode() self.wallet = FakeWallet() self.peer_manager = PeerManager() self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2) self.rate_limiter = DummyRateLimiter() self.sd_identifier = StreamDescriptorIdentifier() self.storage = SQLiteStorage(self.db_dir) self.blob_manager = DiskBlobManager(self.blob_dir, self.storage, self.dht_node._dataStore) self.prm = OnlyFreePaymentsManager() self.lbry_file_manager = EncryptedFileManager( self.peer_finder, self.rate_limiter, self.blob_manager, self.wallet, self.prm, self.storage, self.sd_identifier) d = self.storage.setup() d.addCallback(lambda _: self.lbry_file_manager.setup()) return d
def test_store_content_claim(self): session = MocSession(self.storage) session.db_dir = self.db_dir sd_identifier = StreamDescriptorIdentifier() download_directory = self.db_dir manager = EncryptedFileManager(session, sd_identifier) out = yield manager.session.storage.get_all_lbry_files() self.assertEqual(len(out), 0) stream_hash = random_lbry_hash() sd_hash = fake_claim_info['value']['stream']['source']['source'] # test that we can associate a content claim to a file # use the generated sd hash in the fake claim fake_outpoint = "%s:%i" % (fake_claim_info['txid'], fake_claim_info['nout']) yield self.make_and_store_fake_stream(blob_count=2, stream_hash=stream_hash, sd_hash=sd_hash) blob_data_rate = 0 file_name = "test file" yield manager.session.storage.save_published_file( stream_hash, file_name, download_directory, blob_data_rate) yield self.storage.save_claim(fake_claim_info) yield self.storage.save_content_claim(stream_hash, fake_outpoint) stored_content_claim = yield self.storage.get_content_claim( stream_hash) self.assertDictEqual(stored_content_claim, fake_claim_info) stream_hashes = yield self.storage.get_old_stream_hashes_for_claim_id( fake_claim_info['claim_id'], stream_hash) self.assertListEqual(stream_hashes, []) # test that we can't associate a claim update with a new stream to the file second_stream_hash, second_sd_hash = random_lbry_hash( ), random_lbry_hash() yield self.make_and_store_fake_stream(blob_count=2, stream_hash=second_stream_hash, sd_hash=second_sd_hash) try: yield self.storage.save_content_claim(second_stream_hash, fake_outpoint) raise Exception("test failed") except Exception as err: self.assertTrue(err.message == "stream mismatch") # test that we can associate a new claim update containing the same stream to the file update_info = deepcopy(fake_claim_info) update_info['txid'] = "beef0000" * 12 update_info['nout'] = 0 second_outpoint = "%s:%i" % (update_info['txid'], update_info['nout']) yield self.storage.save_claim(update_info) yield self.storage.save_content_claim(stream_hash, second_outpoint) update_info_result = yield self.storage.get_content_claim(stream_hash) self.assertDictEqual(update_info_result, update_info) # test that we can't associate an update with a mismatching claim id invalid_update_info = deepcopy(fake_claim_info) invalid_update_info['txid'] = "beef0001" * 12 invalid_update_info['nout'] = 0 invalid_update_info['claim_id'] = "beef0002" * 5 invalid_update_outpoint = "%s:%i" % (invalid_update_info['txid'], invalid_update_info['nout']) yield self.storage.save_claim(invalid_update_info) try: yield self.storage.save_content_claim(stream_hash, invalid_update_outpoint) raise Exception("test failed") except Exception as err: self.assertTrue(err.message == "invalid stream update") current_claim_info = yield self.storage.get_content_claim(stream_hash) # this should still be the previous update self.assertDictEqual(current_claim_info, update_info)
def test_multiple_uploaders(self): sd_hash_queue = Queue() num_uploaders = 3 kill_event = Event() dead_events = [Event() for _ in range(num_uploaders)] ready_events = [Event() for _ in range(1, num_uploaders)] lbry_uploader = LbryUploader(sd_hash_queue, kill_event, dead_events[0], 5209343, 9373419, 2**22) uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) logging.debug("Testing multiple uploaders") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, num_uploaders) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1]) self.stream_info_manager = TempEncryptedFileMetadataManager() self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) def start_additional_uploaders(sd_hash): for i in range(1, num_uploaders): uploader = Process(target=start_lbry_reuploader, args=(sd_hash, kill_event, dead_events[i], ready_events[i - 1], i, 2**10)) uploader.start() self.server_processes.append(uploader) return defer.succeed(True) def wait_for_ready_events(): return defer.DeferredList([ self.wait_for_event(ready_event, 60) for ready_event in ready_events ]) def make_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [ o.default_value for o in options.get_downloader_options(info_validator, prm) ] return factories[0].make_downloader(metadata, chosen_options, prm) def download_file(sd_hash): prm = self.session.payment_rate_manager d = download_sd_blob(self.session, sd_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(make_downloader, prm) d.addCallback(lambda downloader: downloader.start()) return d def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "e5941d615f53312fd66638239c1f90d5") def start_transfer(sd_hash): logging.debug("Starting the transfer") d = start_additional_uploaders(sd_hash) d.addCallback(lambda _: wait_for_ready_events()) d.addCallback(lambda _: self.session.setup()) d.addCallback( lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: download_file(sd_hash)) d.addCallback(lambda _: check_md5_sum()) return d def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = defer.DeferredList([ self.wait_for_event(dead_event, 15) for dead_event in dead_events ]) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: arg) return d d = self.wait_for_hash_from_queue(sd_hash_queue) d.addCallback(start_transfer) d.addBoth(stop) return d
def test_double_download(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() lbry_uploader = LbryUploader(sd_hash_queue, kill_event, dead_event, 5209343) uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) logging.debug("Testing double download") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() downloaders = [] db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1]) self.stream_info_manager = DBEncryptedFileMetadataManager( self.session.db_dir) self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) @defer.inlineCallbacks def make_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [ o.default_value for o in options.get_downloader_options(info_validator, prm) ] downloader = yield factories[0].make_downloader( metadata, chosen_options, prm) defer.returnValue(downloader) def append_downloader(downloader): downloaders.append(downloader) return downloader @defer.inlineCallbacks def download_file(sd_hash): prm = self.session.payment_rate_manager sd_blob = yield download_sd_blob(self.session, sd_hash, prm) metadata = yield sd_identifier.get_metadata_for_sd_blob(sd_blob) downloader = yield make_downloader(metadata, prm) downloaders.append(downloader) finished_value = yield downloader.start() defer.returnValue(finished_value) def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") def delete_lbry_file(): logging.debug("deleting the file") d = self.lbry_file_manager.delete_lbry_file(downloaders[0]) d.addCallback( lambda _: self.lbry_file_manager.get_count_for_stream_hash( downloaders[0].stream_hash)) d.addCallback(lambda c: self.stream_info_manager.delete_stream( downloaders[1].stream_hash) if c == 0 else True) return d def check_lbry_file(): d = downloaders[1].status() d.addCallback(lambda _: downloaders[1].status()) def check_status_report(status_report): self.assertEqual(status_report.num_known, status_report.num_completed) self.assertEqual(status_report.num_known, 3) d.addCallback(check_status_report) return d @defer.inlineCallbacks def start_transfer(sd_hash): logging.debug("Starting the transfer") yield self.session.setup() yield self.stream_info_manager.setup() yield add_lbry_file_to_sd_identifier(sd_identifier) yield self.lbry_file_manager.setup() yield download_file(sd_hash) yield check_md5_sum() yield download_file(sd_hash) yield check_lbry_file() yield delete_lbry_file() def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = self.wait_for_event(dead_event, 15) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: arg) return d d = self.wait_for_hash_from_queue(sd_hash_queue) d.addCallback(start_transfer) d.addBoth(stop) return d
def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_rate_limit=None, is_generous=False): use_epoll_on_linux() from twisted.internet import reactor logging.debug("Starting the uploader") Random.atfork() r = random.Random() r.seed("start_lbry_reuploader") wallet = FakeWallet() peer_port = 5553 + n peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = RateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "server_" + str(n) blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd" + str(n), peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None, peer_port=peer_port, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1]) stream_info_manager = TempEncryptedFileMetadataManager() lbry_file_manager = EncryptedFileManager(session, stream_info_manager, sd_identifier) if ul_rate_limit is not None: session.rate_limiter.set_ul_limit(ul_rate_limit) def make_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [ o.default_value for o in options.get_downloader_options(info_validator, prm) ] return factories[0].make_downloader(metadata, chosen_options, prm) def download_file(): prm = session.payment_rate_manager d = download_sd_blob(session, sd_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(make_downloader, prm) d.addCallback(lambda downloader: downloader.start()) return d def start_transfer(): logging.debug("Starting the transfer") d = session.setup() d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: lbry_file_manager.setup()) d.addCallback(lambda _: download_file()) return d def start_server(): server_port = None query_handler_factories = { 1: BlobAvailabilityHandlerFactory(session.blob_manager), 2: BlobRequestHandlerFactory(session.blob_manager, session.wallet, session.payment_rate_manager, None), 3: session.wallet.get_wallet_info_query_handler_factory(), } server_factory = ServerProtocolFactory(session.rate_limiter, query_handler_factories, session.peer_manager) server_port = reactor.listenTCP(peer_port, server_factory) logging.debug("Started listening") def kill_server(): ds = [] ds.append(session.shut_down()) ds.append(lbry_file_manager.stop()) if server_port: ds.append(server_port.stopListening()) kill_check.stop() dead_event.set() dl = defer.DeferredList(ds) dl.addCallback(lambda _: reactor.stop()) return dl def check_for_kill(): if kill_event.is_set(): kill_server() kill_check = task.LoopingCall(check_for_kill) kill_check.start(1.0) ready_event.set() logging.debug("set the ready event") d = task.deferLater(reactor, 1.0, start_transfer) d.addCallback(lambda _: start_server()) if not reactor.running: reactor.run()
def __init__(self, component_manager): Component.__init__(self, component_manager) self.sd_identifier = StreamDescriptorIdentifier()
def test_create_stream(self): wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 2) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=self.is_generous) self.stream_info_manager = TempEncryptedFileMetadataManager() self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) d = self.session.setup() d.addCallback(lambda _: self.stream_info_manager.setup()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) def verify_equal(sd_info): self.assertEqual(sd_info, test_create_stream_sd_file) def verify_stream_descriptor_file(stream_hash): d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True) d.addCallback(verify_equal) return d def iv_generator(): iv = 0 while 1: iv += 1 yield "%016d" % iv def create_stream(): test_file = GenFile( 5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)])) d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, key="0123456701234567", iv_generator=iv_generator()) return d d.addCallback(lambda _: create_stream()) d.addCallback(verify_stream_descriptor_file) return d
def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_rate_limit=None, is_generous=False): use_epoll_on_linux() init_conf_windows() from twisted.internet import reactor logging.debug("Starting the uploader") wallet = FakeWallet() peer_port = 5553 + n peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = RateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir, blob_dir = mk_db_and_blob_dir() session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd" + str(n), dht_node_port=4446, dht_node_class=FakeNode, peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=peer_port, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") lbry_file_manager = EncryptedFileManager(session, sd_identifier) if ul_rate_limit is not None: session.rate_limiter.set_ul_limit(ul_rate_limit) def make_downloader(metadata, prm, download_directory): factories = metadata.factories return factories[0].make_downloader(metadata, prm.min_blob_data_payment_rate, prm, download_directory) def download_file(): prm = session.payment_rate_manager d = download_sd_blob(session, sd_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(make_downloader, prm, db_dir) d.addCallback(lambda downloader: downloader.start()) return d def start_transfer(): logging.debug("Starting the transfer") d = session.setup() d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: lbry_file_manager.setup()) d.addCallback(lambda _: download_file()) return d def start_server(): server_port = None query_handler_factories = { 1: BlobAvailabilityHandlerFactory(session.blob_manager), 2: BlobRequestHandlerFactory(session.blob_manager, session.wallet, session.payment_rate_manager, None), 3: session.wallet.get_wallet_info_query_handler_factory(), } server_factory = ServerProtocolFactory(session.rate_limiter, query_handler_factories, session.peer_manager) server_port = reactor.listenTCP(peer_port, server_factory) logging.debug("Started listening") def kill_server(): ds = [] ds.append(session.shut_down()) ds.append(lbry_file_manager.stop()) if server_port: ds.append(server_port.stopListening()) ds.append(rm_db_and_blob_dir(db_dir, blob_dir)) kill_check.stop() dead_event.set() dl = defer.DeferredList(ds) dl.addCallback(lambda _: reactor.stop()) return dl def check_for_kill(): if kill_event.is_set(): kill_server() kill_check = task.LoopingCall(check_for_kill) kill_check.start(1.0) ready_event.set() logging.debug("set the ready event") d = task.deferLater(reactor, 1.0, start_transfer) d.addCallback(lambda _: start_server()) if not reactor.running: reactor.run()
def test_double_download(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() lbry_uploader = LbryUploader(sd_hash_queue, kill_event, dead_event, 5209343) uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) logging.debug("Testing double download") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() downloaders = [] db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, dht_node_class=FakeNode, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) @defer.inlineCallbacks def make_downloader(metadata, prm): factories = metadata.factories downloader = yield factories[0].make_downloader( metadata, prm.min_blob_data_payment_rate, prm, db_dir) defer.returnValue(downloader) @defer.inlineCallbacks def download_file(sd_hash): prm = self.session.payment_rate_manager sd_blob = yield download_sd_blob(self.session, sd_hash, prm) metadata = yield sd_identifier.get_metadata_for_sd_blob(sd_blob) downloader = yield make_downloader(metadata, prm) downloaders.append(downloader) yield downloader.start() defer.returnValue(downloader) def check_md5_sum(): f = open(os.path.join(db_dir, 'test_file')) hashsum = md5() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") def delete_lbry_file(downloader): logging.debug("deleting the file") return self.lbry_file_manager.delete_lbry_file(downloader) def check_lbry_file(downloader): d = downloader.status() def check_status_report(status_report): self.assertEqual(status_report.num_known, status_report.num_completed) self.assertEqual(status_report.num_known, 3) d.addCallback(check_status_report) return d @defer.inlineCallbacks def start_transfer(sd_hash): # download a file, delete it, and download it again logging.debug("Starting the transfer") yield self.session.setup() yield add_lbry_file_to_sd_identifier(sd_identifier) yield self.lbry_file_manager.setup() downloader = yield download_file(sd_hash) yield check_md5_sum() yield check_lbry_file(downloader) yield delete_lbry_file(downloader) downloader = yield download_file(sd_hash) yield check_lbry_file(downloader) yield check_md5_sum() yield delete_lbry_file(downloader) def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = self.wait_for_event(dead_event, 15) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: rm_db_and_blob_dir(db_dir, blob_dir)) d.addCallback(lambda _: arg) return d d = self.wait_for_hash_from_queue(sd_hash_queue) d.addCallback(start_transfer) d.addBoth(stop) return d
def test_lbry_transfer(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() lbry_uploader = LbryUploader(sd_hash_queue, kill_event, dead_event, 5209343) uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) logging.debug("Testing transfer") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node, is_generous=self.is_generous) self.stream_info_manager = TempEncryptedFileMetadataManager() self.lbry_file_manager = EncryptedFileManager( self.session, self.stream_info_manager, sd_identifier) def make_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [ o.default_value for o in options.get_downloader_options(info_validator, prm) ] return factories[0].make_downloader(metadata, chosen_options, prm) def download_file(sd_hash): prm = self.session.payment_rate_manager d = download_sd_blob(self.session, sd_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(make_downloader, prm) d.addCallback(lambda downloader: downloader.start()) return d def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") @defer.inlineCallbacks def start_transfer(sd_hash): logging.debug("Starting the transfer") yield self.session.setup() yield add_lbry_file_to_sd_identifier(sd_identifier) yield self.lbry_file_manager.setup() yield download_file(sd_hash) yield check_md5_sum() def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = self.wait_for_event(dead_event, 15) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: rm_db_and_blob_dir(db_dir, blob_dir)) d.addCallback(lambda _: arg) return d d = self.wait_for_hash_from_queue(sd_hash_queue) d.addCallback(start_transfer) d.addBoth(stop) return d
def test_live_transfer(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() server_args = (sd_hash_queue, kill_event, dead_event) server = Process(target=start_live_server, args=server_args) server.start() self.server_processes.append(server) wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" os.mkdir(db_dir) self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node) self.stream_info_manager = TempLiveStreamMetadataManager( hash_announcer) d = self.wait_for_hash_from_queue(sd_hash_queue) def create_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [ o.default_value for o in options.get_downloader_options(info_validator, prm) ] return factories[0].make_downloader(metadata, chosen_options, prm) def start_lbry_file(lbry_file): lbry_file = lbry_file return lbry_file.start() def download_stream(sd_blob_hash): prm = self.session.payment_rate_manager d = download_sd_blob(self.session, sd_blob_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(create_downloader, prm) d.addCallback(start_lbry_file) return d def do_download(sd_blob_hash): logging.debug("Starting the download") d = self.session.setup() d.addCallback(lambda _: enable_live_stream()) d.addCallback(lambda _: download_stream(sd_blob_hash)) return d def enable_live_stream(): add_live_stream_to_sd_identifier(sd_identifier, self.session.payment_rate_manager) add_full_live_stream_downloader_to_sd_identifier( self.session, self.stream_info_manager, sd_identifier, self.session.payment_rate_manager) d.addCallback(do_download) def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "215b177db8eed86d028b37e5cbad55c7") d.addCallback(lambda _: check_md5_sum()) def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = self.wait_for_event(dead_event, 15) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: arg) return d d.addBoth(stop) return d
def start_live_server(sd_hash_queue, kill_event, dead_event): use_epoll_on_linux() from twisted.internet import reactor logging.debug("In start_server.") Random.atfork() r = random.Random() r.seed("start_live_server") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "server" os.mkdir(db_dir) session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1]) stream_info_manager = DBLiveStreamMetadataManager(session.db_dir, hash_announcer) logging.debug("Created the session") server_port = [] def start_listening(): logging.debug("Starting the server protocol") query_handler_factories = { 1: CryptBlobInfoQueryHandlerFactory(stream_info_manager, session.wallet, session.payment_rate_manager), 2: BlobRequestHandlerFactory(session.blob_manager, session.wallet, session.payment_rate_manager, analytics.Track()), 3: session.wallet.get_wallet_info_query_handler_factory() } server_factory = ServerProtocolFactory(session.rate_limiter, query_handler_factories, session.peer_manager) server_port.append(reactor.listenTCP(5553, server_factory)) logging.debug("Server protocol has started") def create_stream(): logging.debug("Making the live stream") test_file = GenFile(5209343, b''.join([chr(i + 2) for i in xrange(0, 64, 6)])) stream_creator_helper = FileLiveStreamCreator(session.blob_manager, stream_info_manager, "test_file", test_file) d = stream_creator_helper.setup() d.addCallback( lambda _: stream_creator_helper.publish_stream_descriptor()) d.addCallback(put_sd_hash_on_queue) d.addCallback(lambda _: stream_creator_helper.start_streaming()) return d def put_sd_hash_on_queue(sd_hash): logging.debug("Telling the client to start running. Stream hash: %s", str(sd_hash)) sd_hash_queue.put(sd_hash) logging.debug("sd hash has been added to the queue") def set_dead_event(): logging.debug("Setting the dead event") dead_event.set() def print_error(err): logging.debug("An error occurred during shutdown: %s", err.getTraceback()) def stop_reactor(): logging.debug("Server is stopping its reactor") reactor.stop() def shut_down(arg): logging.debug("Shutting down") if isinstance(arg, Failure): logging.error("Shut down is due to an error: %s", arg.getTraceback()) d = defer.maybeDeferred(server_port[0].stopListening) d.addErrback(print_error) d.addCallback(lambda _: session.shut_down()) d.addCallback(lambda _: stream_info_manager.stop()) d.addErrback(print_error) d.addCallback(lambda _: set_dead_event()) d.addErrback(print_error) d.addCallback(lambda _: reactor.callLater(0, stop_reactor)) d.addErrback(print_error) return d def wait_for_kill_event(): d = defer.Deferred() def check_for_kill(): if kill_event.is_set(): logging.debug("Kill event has been found set") kill_check.stop() d.callback(True) kill_check = task.LoopingCall(check_for_kill) kill_check.start(1.0) return d def enable_live_stream(): add_live_stream_to_sd_identifier(sd_identifier, session.base_payment_rate_manager) add_full_live_stream_downloader_to_sd_identifier( session, stream_info_manager, sd_identifier, session.base_payment_rate_manager) def run_server(): d = session.setup() d.addCallback(lambda _: stream_info_manager.setup()) d.addCallback(lambda _: enable_live_stream()) d.addCallback(lambda _: start_listening()) d.addCallback(lambda _: create_stream()) d.addCallback(lambda _: wait_for_kill_event()) d.addBoth(shut_down) return d reactor.callLater(1, run_server) if not reactor.running: reactor.run()