class LBRYStdoutDownloader(): """This class downloads a live stream from the network and outputs it to standard out.""" def __init__(self, dht_node_port, known_dht_nodes, stream_info_manager_class=DBLiveStreamMetadataManager, blob_manager_class=TempBlobManager): """ @param dht_node_port: the network port on which to listen for DHT node requests @param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network """ self.session = LBRYSession(blob_manager_class=blob_manager_class, stream_info_manager_class=stream_info_manager_class, dht_node_class=Node, dht_node_port=dht_node_port, known_dht_nodes=known_dht_nodes, use_upnp=False) self.payment_rate_manager = BaseLiveStreamPaymentRateManager() def start(self): """Initialize the session""" d = self.session.setup() return d def read_sd_file(self, sd_blob): reader = BlobStreamDescriptorReader(sd_blob) return save_sd_info(self.stream_info_manager, reader, ignore_duplicate=True) def download_sd_file_from_hash(self, sd_hash): downloader = StandaloneBlobDownloader(sd_hash, self.session.blob_manager, self.session.peer_finder, self.session.rate_limiter, self.session.wallet) d = downloader.download() return d def start_download(self, sd_hash): """Start downloading the stream from the network and outputting it to standard out""" d = self.download_sd_file_from_hash(sd_hash) d.addCallbacks(self.read_sd_file) def start_stream(stream_hash): consumer = LBRYLiveStreamDownloader(stream_hash, self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.payment_rate_manager, self.session.wallet) return consumer.start() d.addCallback(start_stream) return d def shut_down(self): """End the session""" d = self.session.shut_down() return d
class LBRYDaemon(xmlrpc.XMLRPC): """ LBRYnet daemon """ def setup(self): def _set_vars(): self.fetcher = None self.current_db_revision = 1 self.run_server = True self.session = None self.known_dht_nodes = [('104.236.42.182', 4000)] self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrynet") self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.peer_port = 3333 self.dht_node_port = 4444 self.first_run = False if os.name == "nt": from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle self.download_directory = get_path(FOLDERID.Downloads, UserHandle.current) self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd") elif sys.platform == "darwin": self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') self.wallet_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd") else: self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd") self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf") self.wallet_user = None self.wallet_password = None self.sd_identifier = StreamDescriptorIdentifier() self.stream_info_manager = TempLBRYFileMetadataManager() self.wallet_rpc_port = 8332 self.downloads = [] self.stream_frames = [] self.default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE self.use_upnp = True self.start_lbrycrdd = True if os.name == "nt": self.lbrycrdd_path = "lbrycrdd.exe" else: self.lbrycrdd_path = "./lbrycrdd" self.delete_blobs_on_remove = True self.blob_request_payment_rate_manager = None self.lbry_file_metadata_manager = None self.lbry_file_manager = None self.settings = LBRYSettings(self.db_dir) self.wallet_type = "lbrycrd" self.lbrycrd_conf = os.path.join(self.wallet_dir, "lbrycrd.conf") self.autofetcher_conf = os.path.join(self.wallet_dir, "autofetcher.conf") self.rpc_conn = None self.files = [] self.created_data_dir = False if not os.path.exists(self.db_dir): os.mkdir(self.db_dir) self.created_data_dir = True self.session_settings = None self.data_rate = 0.5 self.max_key_fee = 100.0 return defer.succeed(None) def _disp_startup(): print "Started LBRYnet daemon" print "The daemon can be shut down by running 'stop-lbrynet-daemon' in a terminal" return defer.succeed(None) d = defer.Deferred() d.addCallback(lambda _: _set_vars()) d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory)) d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._get_settings()) d.addCallback(lambda _: self._get_lbrycrdd_path()) d.addCallback(lambda _: self._get_session()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self._setup_stream_identifier()) d.addCallback(lambda _: self._setup_lbry_file_manager()) d.addCallback(lambda _: self._setup_lbry_file_opener()) d.addCallback(lambda _: self._setup_fetcher()) d.addCallback(lambda _: _disp_startup()) d.callback(None) return defer.succeed(None) def _shutdown(self): print 'Closing lbrynet session' if self.session is not None: d = self.session.shut_down() else: d = defer.succeed(True) return d def _update_settings(self): self.data_rate = self.session_settings['data_rate'] self.max_key_fee = self.session_settings['max_key_fee'] def _setup_fetcher(self): self.fetcher = FetcherDaemon(self.session, self.lbry_file_manager, self.lbry_file_metadata_manager, self.session.wallet, self.sd_identifier, self.autofetcher_conf) return defer.succeed(None) def _setup_data_directory(self): print "Loading databases..." if self.created_data_dir: db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w') db_revision.write(str(self.current_db_revision)) db_revision.close() log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision"))) if not os.path.exists(self.blobfile_dir): os.mkdir(self.blobfile_dir) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) def _check_db_migration(self): old_revision = 0 db_revision_file = os.path.join(self.db_dir, "db_revision") if os.path.exists(db_revision_file): old_revision = int(open(db_revision_file).read().strip()) if old_revision < self.current_db_revision: from lbrynet.db_migrator import dbmigrator print "Upgrading your databases..." d = threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision) def print_success(old_dirs): success_string = "Finished upgrading the databases. It is now safe to delete the" success_string += " following directories, if you feel like it. It won't make any" success_string += " difference.\nAnyway here they are: " for i, old_dir in enumerate(old_dirs): success_string += old_dir if i + 1 < len(old_dir): success_string += ", " print success_string d.addCallback(print_success) return d return defer.succeed(True) def _get_settings(self): d = self.settings.start() d.addCallback(lambda _: self.settings.get_lbryid()) d.addCallback(self.set_lbryid) d.addCallback(lambda _: self._get_lbrycrdd_path()) return d def set_lbryid(self, lbryid): if lbryid is None: return self._make_lbryid() else: self.lbryid = lbryid def _make_lbryid(self): self.lbryid = generate_id() d = self.settings.save_lbryid(self.lbryid) return d def _setup_lbry_file_manager(self): self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir) d = self.lbry_file_metadata_manager.setup() def set_lbry_file_manager(): self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier) return self.lbry_file_manager.setup() d.addCallback(lambda _: set_lbry_file_manager()) return d def _get_session(self): def get_default_data_rate(): d = self.settings.get_default_data_payment_rate() d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else MIN_BLOB_DATA_PAYMENT_RATE}) return d def get_wallet(): if self.wallet_type == "lbrycrd": lbrycrdd_path = None if self.start_lbrycrdd is True: lbrycrdd_path = self.lbrycrdd_path if not lbrycrdd_path: lbrycrdd_path = self.default_lbrycrdd_path d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.lbrycrd_conf, lbrycrdd_path=lbrycrdd_path)) else: d = defer.succeed(PTCWallet(self.db_dir)) d.addCallback(lambda wallet: {"wallet": wallet}) return d d1 = get_default_data_rate() d2 = get_wallet() def combine_results(results): r = {} for success, result in results: if success is True: r.update(result) return r def create_session(results): self.session = LBRYSession(results['default_data_payment_rate'], db_dir=self.db_dir, lbryid=self.lbryid, blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port, known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port, use_upnp=self.use_upnp, wallet=results['wallet']) self.rpc_conn = self.session.wallet.get_rpc_conn_x() dl = defer.DeferredList([d1, d2], fireOnOneErrback=True) dl.addCallback(combine_results) dl.addCallback(create_session) dl.addCallback(lambda _: self.session.setup()) return dl def _get_lbrycrdd_path(self): def get_lbrycrdd_path_conf_file(): lbrycrdd_path_conf_path = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf") if not os.path.exists(lbrycrdd_path_conf_path): return "" lbrycrdd_path_conf = open(lbrycrdd_path_conf_path) lines = lbrycrdd_path_conf.readlines() return lines d = threads.deferToThread(get_lbrycrdd_path_conf_file) def load_lbrycrdd_path(conf): for line in conf: if len(line.strip()) and line.strip()[0] != "#": self.lbrycrdd_path = line.strip() print self.lbrycrdd_path d.addCallback(load_lbrycrdd_path) return d def _setup_stream_identifier(self): file_saver_factory = LBRYFileSaverFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet, self.download_directory) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_saver_factory) file_opener_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_opener_factory) return defer.succeed(None) def _setup_lbry_file_manager(self): self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir) d = self.lbry_file_metadata_manager.setup() def set_lbry_file_manager(): self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier) return self.lbry_file_manager.setup() d.addCallback(lambda _: set_lbry_file_manager()) return d def _setup_lbry_file_opener(self): downloader_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, downloader_factory) return defer.succeed(True) def _download_name(self, name): def _disp_file(file): print '[' + str(datetime.now()) + ']' + ' Already downloaded: ' + str(file.stream_hash) d = self._path_from_lbry_file(file) return d def _get_stream(name): def _disp(stream): print '[' + str(datetime.now()) + ']' + ' Start stream: ' + stream['stream_hash'] return stream d = self.session.wallet.get_stream_info_for_name(name) stream = GetStream(self.sd_identifier, self.session, self.session.wallet, self.lbry_file_manager, max_key_fee=self.max_key_fee, data_rate=self.data_rate) d.addCallback(_disp) d.addCallback(lambda stream_info: stream.start(stream_info)) d.addCallback(lambda _: self._path_from_name(name)) return d d = self._check_history(name) d.addCallback(lambda lbry_file: _get_stream(name) if not lbry_file else _disp_file(lbry_file)) d.addCallback(lambda _: self._check_history(name)) d.addCallback(lambda lbry_file: self._path_from_lbry_file(lbry_file) if lbry_file else 'Not found') d.addErrback(lambda err: str(err)) return d def _resolve_name(self, name): d = defer.Deferred() d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name)) d.addErrback(lambda _: defer.fail(UnknownNameError)) return d def _resolve_name_wc(self, name): d = defer.Deferred() d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name)) d.addErrback(lambda _: defer.fail(UnknownNameError)) d.callback(None) return d def _check_history(self, name): def _get_lbry_file(path): f = open(path, 'r') l = json.loads(f.read()) f.close() file_name = l['stream_name'].decode('hex') lbry_file = [file for file in self.lbry_file_manager.lbry_files if file.stream_name == file_name][0] return lbry_file def _check(info): stream_hash = info['stream_hash'] path = os.path.join(self.blobfile_dir, stream_hash) if os.path.isfile(path): print "[" + str(datetime.now()) + "] Search for lbry_file, returning: " + stream_hash return defer.succeed(_get_lbry_file(path)) else: print "[" + str(datetime.now()) + "] Search for lbry_file didn't return anything" return defer.succeed(False) d = self._resolve_name(name) d.addCallbacks(_check, lambda _: False) d.callback(None) return d def _delete_lbry_file(self, lbry_file): d = self.lbry_file_manager.delete_lbry_file(lbry_file) def finish_deletion(lbry_file): d = lbry_file.delete_data() d.addCallback(lambda _: _delete_stream_data(lbry_file)) return d def _delete_stream_data(lbry_file): s_h = lbry_file.stream_hash d = self.lbry_file_manager.get_count_for_stream_hash(s_h) # TODO: could possibly be a timing issue here d.addCallback(lambda c: self.stream_info_manager.delete_stream(s_h) if c == 0 else True) return d d.addCallback(lambda _: finish_deletion(lbry_file)) return d def _path_from_name(self, name): d = self._check_history(name) d.addCallback(lambda lbry_file: {'stream_hash': lbry_file.stream_hash, 'path': os.path.join(self.download_directory, lbry_file.file_name)} if lbry_file else defer.fail(UnknownNameError)) return d def _path_from_lbry_file(self, lbry_file): if lbry_file: r = {'stream_hash': lbry_file.stream_hash, 'path': os.path.join(self.download_directory, lbry_file.file_name)} return defer.succeed(r) else: return defer.fail(UnknownNameError) def xmlrpc_get_settings(self): """ Get LBRY payment settings """ if not self.session_settings: self.session_settings = {'data_rate': self.data_rate, 'max_key_fee': self.max_key_fee} print '[' + str(datetime.now()) + '] Get daemon settings' return self.session_settings def xmlrpc_set_settings(self, settings): self.session_settings = settings self._update_settings() print '[' + str(datetime.now()) + '] Set daemon settings' return 'Set' def xmlrpc_start_fetcher(self): """ Start autofetcher """ self.fetcher.start() print '[' + str(datetime.now()) + '] Start autofetcher' return str('Started autofetching') def xmlrpc_stop_fetcher(self): """ Start autofetcher """ self.fetcher.stop() print '[' + str(datetime.now()) + '] Stop autofetcher' return str('Started autofetching') def xmlrpc_fetcher_status(self): """ Start autofetcher """ print '[' + str(datetime.now()) + '] Get fetcher status' return str(self.fetcher.check_if_running()) def xmlrpc_get_balance(self): """ Get LBC balance """ print '[' + str(datetime.now()) + '] Get balance' return str(self.session.wallet.wallet_balance) def xmlrpc_stop(self): """ Stop the reactor """ def _disp_shutdown(): print 'Shutting down lbrynet daemon' d = self._shutdown() d.addCallback(lambda _: _disp_shutdown()) d.addCallback(lambda _: reactor.stop()) d.callback(None) return d def xmlrpc_get_lbry_files(self): """ Get LBRY files @return: Managed LBRY files """ r = [] for f in self.lbry_file_manager.lbry_files: if f.key: t = {'completed': f.completed, 'file_name': f.file_name, 'key': binascii.b2a_hex(f.key), 'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash, 'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name, 'upload_allowed': f.upload_allowed} else: t = {'completed': f.completed, 'file_name': f.file_name, 'key': None, 'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash, 'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name, 'upload_allowed': f.upload_allowed} r.append(json.dumps(t)) print '[' + str(datetime.now()) + '] Get LBRY files' return r def xmlrpc_resolve_name(self, name): """ Resolve stream info from a LBRY uri @param: name """ def _disp(info): log.debug('[' + str(datetime.now()) + ']' + ' Resolved info: ' + str(info['stream_hash'])) print '[' + str(datetime.now()) + ']' + ' Resolved info: ' + str(info['stream_hash']) return info d = self._resolve_name(name) d.addCallbacks(_disp, lambda _: str('UnknownNameError')) d.callback(None) return d def xmlrpc_get(self, name): """ Download stream from a LBRY uri @param: name """ d = self._download_name(name) return d def xmlrpc_stop_lbry_file(self, stream_hash): try: lbry_file = [f for f in self.lbry_file_manager.lbry_files if f.stream_hash == stream_hash][0] except IndexError: return defer.fail(UnknownNameError) if not lbry_file.stopped: d = self.lbry_file_manager.toggle_lbry_file_running(lbry_file) d.addCallback(lambda _: 'Stream has been stopped') d.addErrback(lambda err: str(err)) return d else: return defer.succeed('Stream was already stopped') def xmlrpc_start_lbry_file(self, stream_hash): try: lbry_file = [f for f in self.lbry_file_manager.lbry_files if f.stream_hash == stream_hash][0] except IndexError: return defer.fail(UnknownNameError) if lbry_file.stopped: d = self.lbry_file_manager.toggle_lbry_file_running(lbry_file) d.callback(None) return defer.succeed('Stream started') else: return defer.succeed('Stream was already running') def xmlrpc_render_html(self, html): def _make_file(html, path): f = open(path, 'w') f.write(html) f.close() return defer.succeed(None) def _disp_err(err): print str(err.getTraceback()) return err path = os.path.join(self.download_directory, 'lbry.html') d = defer.Deferred() d.addCallback(lambda _: _make_file(html, path)) d.addCallback(lambda _: webbrowser.open('file://' + path)) d.addErrback(_disp_err) d.callback(None) return d def xmlrpc_render_gui(self): def _disp_err(err): print str(err.getTraceback()) return err d = defer.Deferred() d.addCallback(lambda _: webbrowser.open(os.path.join(self.download_directory, "lbryio/view/page/gui.html"))) d.addErrback(_disp_err) d.callback(None) return d def xmlrpc_search_nametrie(self, search): def _return_d(x): d = defer.Deferred() d.addCallback(lambda _: x) d.callback(None) return d def _clean(n): t = [] for i in n: if i[0]: if i[1][0][0] and i[1][1][0]: i[1][0][1]['value'] = str(i[1][0][1]['value']) t.append([i[1][0][1], i[1][1][1]]) return t def _parse(results): f = [] for chain, meta in results: t = {} if 'name' in chain.keys(): t['name'] = chain['name'] if 'thumbnail' in meta.keys(): t['img'] = meta['thumbnail'] if 'name' in meta.keys(): t['title'] = meta['name'] if 'description' in meta.keys(): t['description'] = meta['description'] if 'key_fee' in meta.keys(): t['cost_est'] = meta['key_fee'] else: t['cost_est'] = 0.0 f.append(t) return f def _disp(results): print '[' + str(datetime.now()) + '] Found ' + str(len(results)) + ' results' return results print '[' + str(datetime.now()) + '] Search nametrie: ' + search filtered_results = [n for n in self.rpc_conn.getnametrie() if n['name'].startswith(search)] filtered_results = [n for n in filtered_results if 'txid' in n.keys()] resolved_results = [defer.DeferredList([_return_d(n), self._resolve_name_wc(n['name'])]) for n in filtered_results] d = defer.DeferredList(resolved_results) d.addCallback(_clean) d.addCallback(_parse) d.addCallback(_disp) return d def xmlrpc_delete_lbry_file(self, file_name): def _disp(file_name): print '[' + str(datetime.now()) + '] Deleted: ' + file_name return defer.succeed(str('Deleted: ' + file_name)) lbry_files = [self._delete_lbry_file(f) for f in self.lbry_file_manager.lbry_files if file_name == f.file_name] d = defer.DeferredList(lbry_files) d.addCallback(lambda _: _disp(file_name)) return d def xmlrpc_check(self, name): d = self._check_history(name) d.addCallback(lambda lbry_file: self._path_from_lbry_file(lbry_file) if lbry_file else 'Not found') d.addErrback(lambda err: str(err)) return d def xmlrpc_publish(self, metadata): metadata = json.loads(metadata) required = ['name', 'file_path', 'bid'] for r in required: if not r in metadata.keys(): return defer.fail() # if not os.path.isfile(metadata['file_path']): # return defer.fail() if not type(metadata['bid']) is float and metadata['bid'] > 0.0: return defer.fail() name = metadata['name'] file_path = metadata['file_path'] bid = metadata['bid'] if 'title' in metadata.keys(): title = metadata['title'] else: title = None if 'description' in metadata.keys(): description = metadata['description'] else: description = None if 'thumbnail' in metadata.keys(): thumbnail = metadata['thumbnail'] else: thumbnail = None if 'key_fee' in metadata.keys(): if not 'key_fee_address' in metadata.keys(): return defer.fail() key_fee = metadata['key_fee'] else: key_fee = None if 'key_fee_address' in metadata.keys(): key_fee_address = metadata['key_fee_address'] else: key_fee_address = None p = Publisher(self.session, self.lbry_file_manager, self.session.wallet) d = p.start(name, file_path, bid, title, description, thumbnail, key_fee, key_fee_address) return d
class LBRYConsole(): """A class which can upload and download file streams to and from the network""" def __init__(self, peer_port, dht_node_port, known_dht_nodes, fake_wallet, lbrycrd_conf, lbrycrd_dir, use_upnp, data_dir, created_data_dir, lbrycrdd_path): """ @param peer_port: the network port on which to listen for peers @param dht_node_port: the network port on which to listen for dht node requests @param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network """ self.peer_port = peer_port self.dht_node_port = dht_node_port self.known_dht_nodes = known_dht_nodes self.fake_wallet = fake_wallet self.lbrycrd_conf = lbrycrd_conf self.lbrycrd_dir = lbrycrd_dir if not self.lbrycrd_dir: if sys.platform == "darwin": self.lbrycrd_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd") else: self.lbrycrd_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd") if not self.lbrycrd_conf: self.lbrycrd_conf = os.path.join(self.lbrycrd_dir, "lbrycrd.conf") self.lbrycrdd_path = lbrycrdd_path self.use_upnp = use_upnp self.lbry_server_port = None self.session = None self.lbry_file_metadata_manager = None self.lbry_file_manager = None self.db_dir = data_dir self.current_db_revision = 1 self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.created_data_dir = created_data_dir self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces([ os.path.join(self.db_dir, "plugins"), os.path.join(os.path.dirname(__file__), "plugins"), ]) self.command_handlers = [] self.query_handlers = {} self.settings = LBRYSettings(self.db_dir) self.blob_request_payment_rate_manager = None self.lbryid = None self.sd_identifier = StreamDescriptorIdentifier() self.plugin_objects = [] self.db_migration_revisions = None def start(self): """Initialize the session and restore everything to its saved state""" d = self._setup_controller() d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory)) d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._get_settings()) d.addCallback(lambda _: self._get_session()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self._setup_lbry_file_manager()) d.addCallback(lambda _: self._setup_lbry_file_opener()) d.addCallback(lambda _: self._setup_control_handlers()) d.addCallback(lambda _: self._setup_query_handlers()) d.addCallback(lambda _: self._load_plugins()) d.addCallback(lambda _: self._setup_server()) d.addCallback(lambda _: self._start_controller()) d.addErrback(self._show_start_error) return d def _show_start_error(self, error): print error.getTraceback() log.error("An error occurred during start up: %s", error.getTraceback()) return error def _show_shutdown_error(self, error): print error.getErrorMessage() log.error("An error occurred during shutdown: %s", error.getTraceback()) return error def shut_down(self): """Stop the session, all currently running streams, and stop the server""" d = self._shut_down() if self.session is not None: d.addCallback(lambda _: self.session.shut_down()) d.addErrback(self._show_shutdown_error) return d def add_control_handlers(self, control_handlers): for control_handler in control_handlers: self.command_handlers.append(control_handler) def add_query_handlers(self, query_handlers): def _set_query_handlers(statuses): from future_builtins import zip for handler, (success, status) in zip(query_handlers, statuses): if success is True: self.query_handlers[handler] = status ds = [] for handler in query_handlers: ds.append(self.settings.get_query_handler_status(handler.get_primary_query_identifier())) dl = defer.DeferredList(ds) dl.addCallback(_set_query_handlers) return dl def _setup_data_directory(self): alert.info("Loading databases...") if self.created_data_dir: db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w') db_revision.write(str(self.current_db_revision)) db_revision.close() log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision"))) if not os.path.exists(self.blobfile_dir): os.mkdir(self.blobfile_dir) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) def _check_db_migration(self): old_revision = 1 db_revision_file = os.path.join(self.db_dir, "db_revision") if os.path.exists(db_revision_file): old_revision = int(open(db_revision_file).read().strip()) if old_revision < self.current_db_revision: from lbrynet.db_migrator import dbmigrator print "Upgrading your databases..." d = threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision) def print_success(old_dirs): success_string = "Finished upgrading the databases. It is now safe to delete the" success_string += " following directories, if you feel like it. It won't make any" success_string += " difference.\nAnyway here they are: " for i, old_dir in enumerate(old_dirs): success_string += old_dir if i + 1 < len(old_dir): success_string += ", " print success_string d.addCallback(print_success) return d return defer.succeed(True) def _get_settings(self): d = self.settings.start() d.addCallback(lambda _: self.settings.get_lbryid()) d.addCallback(self.set_lbryid) return d def set_lbryid(self, lbryid): if lbryid is None: return self._make_lbryid() else: self.lbryid = lbryid def _make_lbryid(self): self.lbryid = generate_id() d = self.settings.save_lbryid(self.lbryid) return d def _get_session(self): def get_default_data_rate(): d = self.settings.get_default_data_payment_rate() d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else MIN_BLOB_DATA_PAYMENT_RATE}) return d def get_wallet(): if self.fake_wallet: d = defer.succeed(PTCWallet(self.db_dir)) elif self.lbrycrdd_path is not None: d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.lbrycrd_dir, wallet_conf=self.lbrycrd_conf, lbrycrdd_path=self.lbrycrdd_path)) else: d = defer.succeed(LBRYumWallet(self.db_dir)) d.addCallback(lambda wallet: {"wallet": wallet}) return d d1 = get_default_data_rate() d2 = get_wallet() def combine_results(results): r = {} for success, result in results: if success is True: r.update(result) return r def create_session(results): alert.info("Databases loaded.") self.session = LBRYSession(results['default_data_payment_rate'], db_dir=self.db_dir, lbryid=self.lbryid, blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port, known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port, use_upnp=self.use_upnp, wallet=results['wallet']) dl = defer.DeferredList([d1, d2], fireOnOneErrback=True) dl.addCallback(combine_results) dl.addCallback(create_session) dl.addCallback(lambda _: self.session.setup()) dl.addCallback(lambda _: self.check_first_run()) dl.addCallback(self._show_first_run_result) return dl def check_first_run(self): d = self.session.wallet.is_first_run() d.addCallback(lambda is_first_run: self._do_first_run() if is_first_run else 0.0) return d def _do_first_run(self): d = self.session.wallet.get_new_address() def send_request(url, data): r = requests.post(url, json=data) if r.status_code == 200: return r.json()['credits_sent'] return 0.0 def log_error(err): log.warning("unable to request free credits. %s", err.getErrorMessage()) return 0.0 def request_credits(address): url = "http://credreq.lbry.io/requestcredits" data = {"address": address} d = threads.deferToThread(send_request, url, data) d.addErrback(log_error) return d d.addCallback(request_credits) return d @staticmethod def _show_first_run_result(credits_received): if credits_received != 0.0: points_string = locale.format_string("%.2f LBC", (round(credits_received, 2),), grouping=True) alert.info("\n\nThank you for testing the alpha version of LBRY!\n\n" "You have been given %s for free because we love you.\n" "Please give them a few minutes to show up while you\n" "catch up with our blockchain.\n", points_string) def _setup_lbry_file_manager(self): self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir) d = self.lbry_file_metadata_manager.setup() def set_lbry_file_manager(): self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier) return self.lbry_file_manager.setup() d.addCallback(lambda _: set_lbry_file_manager()) return d def _setup_lbry_file_opener(self): stream_info_manager = TempLBRYFileMetadataManager() downloader_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, stream_info_manager, self.session.wallet) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, downloader_factory) return defer.succeed(True) def _setup_control_handlers(self): handlers = [ ApplicationStatusFactory(self.session.rate_limiter, self.session.dht_node), GetWalletBalancesFactory(self.session.wallet), ModifyApplicationDefaultsFactory(self), ShutDownFactory(self), PeerStatsAndSettingsChooserFactory(self.session.peer_manager), LBRYFileStatusFactory(self.lbry_file_manager), AddStreamFromSDFactory(self.sd_identifier, self.session.base_payment_rate_manager, self.session.wallet), DeleteLBRYFileChooserFactory(self.lbry_file_metadata_manager, self.session.blob_manager, self.lbry_file_manager), ToggleLBRYFileRunningChooserFactory(self.lbry_file_manager), CreateLBRYFileFactory(self.session, self.lbry_file_manager), PublishStreamDescriptorChooserFactory(self.lbry_file_metadata_manager, self.session.blob_manager), ShowPublishedSDHashesChooserFactory(self.lbry_file_metadata_manager, self.lbry_file_manager), CreatePlainStreamDescriptorChooserFactory(self.lbry_file_manager), ShowLBRYFileStreamHashChooserFactory(self.lbry_file_manager), ModifyLBRYFileOptionsChooserFactory(self.lbry_file_manager), AddStreamFromHashFactory(self.sd_identifier, self.session, self.session.wallet), StatusFactory(self, self.session.rate_limiter, self.lbry_file_manager, self.session.blob_manager, self.session.wallet if not self.fake_wallet else None), ImmediateAnnounceAllBlobsFactory(self.session.blob_manager) ] self.add_control_handlers(handlers) if not self.fake_wallet: lbrycrd_handlers = [ AddStreamFromLBRYcrdNameFactory(self.sd_identifier, self.session, self.session.wallet), ClaimNameFactory(self.session.wallet, self.lbry_file_manager, self.session.blob_manager), GetNewWalletAddressFactory(self.session.wallet), PublishFactory(self.session, self.lbry_file_manager, self.session.wallet), BlockchainStatusFactory(self.session.wallet) ] self.add_control_handlers(lbrycrd_handlers) if self.peer_port is not None: server_handlers = [ ShowServerStatusFactory(self), ModifyServerSettingsFactory(self), ] self.add_control_handlers(server_handlers) def _setup_query_handlers(self): handlers = [ #CryptBlobInfoQueryHandlerFactory(self.lbry_file_metadata_manager, self.session.wallet, # self._server_payment_rate_manager), BlobAvailabilityHandlerFactory(self.session.blob_manager), #BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet, # self._server_payment_rate_manager), self.session.wallet.get_wallet_info_query_handler_factory(), ] def get_blob_request_handler_factory(rate): self.blob_request_payment_rate_manager = PaymentRateManager( self.session.base_payment_rate_manager, rate ) handlers.append(BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet, self.blob_request_payment_rate_manager)) d1 = self.settings.get_server_data_payment_rate() d1.addCallback(get_blob_request_handler_factory) dl = defer.DeferredList([d1]) dl.addCallback(lambda _: self.add_query_handlers(handlers)) return dl def _load_plugins(self): d = threads.deferToThread(self.plugin_manager.collectPlugins) def setup_plugins(): ds = [] for plugin in self.plugin_manager.getAllPlugins(): self.plugin_objects.append(plugin.plugin_object) ds.append(plugin.plugin_object.setup(self)) return defer.DeferredList(ds) d.addCallback(lambda _: setup_plugins()) return d def _stop_plugins(self): ds = [] for plugin_object in self.plugin_objects: ds.append(defer.maybeDeferred(plugin_object.stop)) return defer.DeferredList(ds) def _setup_server(self): def restore_running_status(running): if running is True: return self.start_server() return defer.succeed(True) dl = self.settings.get_server_running_status() dl.addCallback(restore_running_status) return dl def start_server(self): if self.peer_port is not None: server_factory = ServerProtocolFactory(self.session.rate_limiter, self.query_handlers, self.session.peer_manager) from twisted.internet import reactor try: self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory) except error.CannotListenError as e: import traceback log.error("Couldn't bind to port %d. %s", self.peer_port, traceback.format_exc()) raise ValueError("%s lbrynet may already be running on your computer.", str(e)) return defer.succeed(True) def stop_server(self): if self.lbry_server_port is not None: self.lbry_server_port, p = None, self.lbry_server_port return defer.maybeDeferred(p.stopListening) else: return defer.succeed(True) def _setup_controller(self): self.controller = ConsoleControl() stdio.StandardIO(self.controller) logger = logging.getLogger() formatter = logging.Formatter("%(message)s") alert_handler = logging.StreamHandler(self.controller) alert_handler.setFormatter(formatter) alert_handler.addFilter(logging.Filter("lbryalert")) alert_handler.setLevel(logging.DEBUG) logger.addHandler(alert_handler) return defer.succeed(True) def _start_controller(self): return self.controller.start(self.command_handlers) def _shut_down(self): self.plugin_manager = None ds = [] if self.lbry_file_metadata_manager is not None: d = self.lbry_file_metadata_manager.stop() d.addCallback(lambda _: self.lbry_file_manager.stop()) ds.append(d) ds.append(self.stop_server()) ds.append(self._stop_plugins()) dl = defer.DeferredList(ds) return dl
class LBRYStdinUploader(): """This class reads from standard in, creates a stream, and makes it available on the network.""" def __init__(self, peer_port, dht_node_port, known_dht_nodes, stream_info_manager_class=DBLiveStreamMetadataManager, blob_manager_class=TempBlobManager): """ @param peer_port: the network port on which to listen for peers @param dht_node_port: the network port on which to listen for nodes in the DHT @param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network """ self.peer_port = peer_port self.lbry_server_port = None self.session = LBRYSession(blob_manager_class=blob_manager_class, stream_info_manager_class=stream_info_manager_class, dht_node_class=Node, dht_node_port=dht_node_port, known_dht_nodes=known_dht_nodes, peer_port=self.peer_port, use_upnp=False) self.payment_rate_manager = BaseLiveStreamPaymentRateManager() def start(self): """Initialize the session and start listening on the peer port""" d = self.session.setup() d.addCallback(lambda _: self._start()) return d def _start(self): self._start_server() return True def _start_server(self): query_handler_factories = [ CryptBlobInfoQueryHandlerFactory(self.stream_info_manager, self.session.wallet, self.payment_rate_manager), BlobAvailabilityHandlerFactory(self.session.blob_manager), BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet, self.payment_rate_manager), self.session.wallet.get_wallet_info_query_handler_factory() ] self.server_factory = ServerProtocolFactory(self.session.rate_limiter, query_handler_factories, self.session.peer_manager) from twisted.internet import reactor self.lbry_server_port = reactor.listenTCP(self.peer_port, self.server_factory) def start_live_stream(self, stream_name): """Create the stream and start reading from stdin @param stream_name: a string, the suggested name of this stream """ stream_creator_helper = StdOutLiveStreamCreator(stream_name, self.session.blob_manager, self.stream_info_manager) d = stream_creator_helper.create_and_publish_stream_descriptor() def print_sd_hash(sd_hash): print "Stream descriptor hash:", sd_hash d.addCallback(print_sd_hash) d.addCallback(lambda _: stream_creator_helper.start_streaming()) return d def shut_down(self): """End the session and stop listening on the server port""" d = self.session.shut_down() d.addCallback(lambda _: self._shut_down()) return d def _shut_down(self): if self.lbry_server_port is not None: d = defer.maybeDeferred(self.lbry_server_port.stopListening) else: d = defer.succeed(True) return d
class LBRYDaemon(xmlrpc.XMLRPC): """ LBRYnet daemon """ def setup(self): def _set_vars(): self.fetcher = None self.current_db_revision = 1 self.run_server = True self.session = None self.known_dht_nodes = [('104.236.42.182', 4000)] self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrynet") self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.peer_port = 3333 self.dht_node_port = 4444 self.first_run = False self.current_db_revision = 1 if os.name == "nt": from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle self.download_directory = get_path(FOLDERID.Downloads, UserHandle.current) self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd") elif sys.platform == "darwin": self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') self.wallet_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd") else: self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd") self.download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf") self.wallet_user = None self.wallet_password = None self.sd_identifier = StreamDescriptorIdentifier() self.stream_info_manager = TempLBRYFileMetadataManager() self.wallet_rpc_port = 8332 self.downloads = [] self.stream_frames = [] self.default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE self.use_upnp = True self.start_lbrycrdd = True if os.name == "nt": self.lbrycrdd_path = "lbrycrdd.exe" else: self.lbrycrdd_path = "./lbrycrdd" self.delete_blobs_on_remove = True self.blob_request_payment_rate_manager = None self.lbry_file_metadata_manager = None self.lbry_file_manager = None self.settings = LBRYSettings(self.db_dir) self.wallet_type = "lbrycrd" self.lbrycrd_conf = os.path.join(self.wallet_dir, "lbrycrd.conf") self.autofetcher_conf = os.path.join(self.wallet_dir, "autofetcher.conf") self.rpc_conn = None self.files = [] self.created_data_dir = False if not os.path.exists(self.db_dir): os.mkdir(self.db_dir) self.created_data_dir = True self.session_settings = None self.data_rate = 0.5 self.max_key_fee = 100.0 return defer.succeed(None) d = defer.Deferred() d.addCallback(lambda _: _set_vars()) d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory)) d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._get_settings()) d.addCallback(lambda _: self.get_lbrycrdd_path()) d.addCallback(lambda _: self._get_session()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self._setup_stream_identifier()) d.addCallback(lambda _: self._setup_lbry_file_manager()) d.addCallback(lambda _: self._setup_lbry_file_opener()) d.addCallback(lambda _: self._setup_fetcher()) d.callback(None) return defer.succeed(None) def _update_settings(self): self.data_rate = self.session_settings['data_rate'] self.max_key_fee = self.session_settings['max_key_fee'] def _setup_fetcher(self): self.fetcher = FetcherDaemon(self.session, self.lbry_file_manager, self.lbry_file_metadata_manager, self.session.wallet, self.sd_identifier, self.autofetcher_conf) return defer.succeed(None) def _setup_data_directory(self): print "Loading databases..." if self.created_data_dir: db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w') db_revision.write(str(self.current_db_revision)) db_revision.close() log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision"))) if not os.path.exists(self.blobfile_dir): os.mkdir(self.blobfile_dir) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) def _check_db_migration(self): old_revision = 0 db_revision_file = os.path.join(self.db_dir, "db_revision") if os.path.exists(db_revision_file): old_revision = int(open(db_revision_file).read().strip()) if old_revision < self.current_db_revision: from lbrynet.db_migrator import dbmigrator print "Upgrading your databases..." d = threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision) def print_success(old_dirs): success_string = "Finished upgrading the databases. It is now safe to delete the" success_string += " following directories, if you feel like it. It won't make any" success_string += " difference.\nAnyway here they are: " for i, old_dir in enumerate(old_dirs): success_string += old_dir if i + 1 < len(old_dir): success_string += ", " print success_string d.addCallback(print_success) return d return defer.succeed(True) def _get_settings(self): d = self.settings.start() d.addCallback(lambda _: self.settings.get_lbryid()) d.addCallback(self.set_lbryid) d.addCallback(lambda _: self.get_lbrycrdd_path()) return d def set_lbryid(self, lbryid): if lbryid is None: return self._make_lbryid() else: self.lbryid = lbryid def _make_lbryid(self): self.lbryid = generate_id() d = self.settings.save_lbryid(self.lbryid) return d def _setup_lbry_file_manager(self): self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir) d = self.lbry_file_metadata_manager.setup() def set_lbry_file_manager(): self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier) return self.lbry_file_manager.setup() d.addCallback(lambda _: set_lbry_file_manager()) return d def _get_session(self): def get_default_data_rate(): d = self.settings.get_default_data_payment_rate() d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else MIN_BLOB_DATA_PAYMENT_RATE}) return d def get_wallet(): if self.wallet_type == "lbrycrd": lbrycrdd_path = None if self.start_lbrycrdd is True: lbrycrdd_path = self.lbrycrdd_path if not lbrycrdd_path: lbrycrdd_path = self.default_lbrycrdd_path d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.lbrycrd_conf, lbrycrdd_path=lbrycrdd_path)) else: d = defer.succeed(PTCWallet(self.db_dir)) d.addCallback(lambda wallet: {"wallet": wallet}) return d d1 = get_default_data_rate() d2 = get_wallet() def combine_results(results): r = {} for success, result in results: if success is True: r.update(result) return r def create_session(results): self.session = LBRYSession(results['default_data_payment_rate'], db_dir=self.db_dir, lbryid=self.lbryid, blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port, known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port, use_upnp=self.use_upnp, wallet=results['wallet']) self.rpc_conn = self.session.wallet.get_rpc_conn_x() dl = defer.DeferredList([d1, d2], fireOnOneErrback=True) dl.addCallback(combine_results) dl.addCallback(create_session) dl.addCallback(lambda _: self.session.setup()) return dl def get_lbrycrdd_path(self): def get_lbrycrdd_path_conf_file(): lbrycrdd_path_conf_path = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf") if not os.path.exists(lbrycrdd_path_conf_path): return "" lbrycrdd_path_conf = open(lbrycrdd_path_conf_path) lines = lbrycrdd_path_conf.readlines() return lines d = threads.deferToThread(get_lbrycrdd_path_conf_file) def load_lbrycrdd_path(conf): for line in conf: if len(line.strip()) and line.strip()[0] != "#": self.lbrycrdd_path = line.strip() print self.lbrycrdd_path d.addCallback(load_lbrycrdd_path) return d def _setup_stream_identifier(self): file_saver_factory = LBRYFileSaverFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet, self.download_directory) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_saver_factory) file_opener_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_opener_factory) return defer.succeed(None) def _setup_lbry_file_manager(self): self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir) d = self.lbry_file_metadata_manager.setup() def set_lbry_file_manager(): self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier) return self.lbry_file_manager.setup() d.addCallback(lambda _: set_lbry_file_manager()) return d def _setup_lbry_file_opener(self): downloader_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, downloader_factory) return defer.succeed(True) def _download_name(self, name): def _disp(stream): print '[' + str(datetime.now()) + ']' + ' Downloading: ' + str(stream.stream_hash) log.debug('[' + str(datetime.now()) + ']' + ' Downloading: ' + str(stream.stream_hash)) return defer.succeed(None) stream = GetStream(self.sd_identifier, self.session, self.session.wallet, self.lbry_file_manager, max_key_fee=self.max_key_fee, data_rate=self.data_rate) self.downloads.append(stream) d = self.session.wallet.get_stream_info_for_name(name) d.addCallback(lambda stream_info: stream.start(stream_info)) d.addCallback(lambda _: _disp(stream)) d.addCallback(lambda _: {'ts': datetime.now(),'name': name}) d.addErrback(lambda err: str(err.getTraceback())) return d def _path_from_name(self, name): d = self.session.wallet.get_stream_info_for_name(name) d.addCallback(lambda stream_info: stream_info['stream_hash']) d.addCallback(lambda stream_hash: [{'stream_hash': stream.stream_hash, 'path': os.path.join(stream.downloader.download_directory, stream.downloader.file_name)} for stream in self.downloads if stream.stream_hash == stream_hash][0]) d.addErrback(lambda _: 'UnknownNameError') return d def _get_downloads(self): downloads = [] for stream in self.downloads: try: downloads.append({'stream_hash': stream.stream_hash, 'path': os.path.join(stream.downloader.download_directory, stream.downloader.file_name)}) except: pass return downloads def _resolve_name(self, name): d = defer.Deferred() d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name)) d.addErrback(lambda _: 'UnknownNameError') d.callback(None) return d def xmlrpc_get_settings(self): """ Get LBRY payment settings """ if not self.session_settings: self.session_settings = {'data_rate': self.data_rate, 'max_key_fee': self.max_key_fee} return self.session_settings def xmlrpc_set_settings(self, settings): self.session_settings = settings self._update_settings() return 'Set' def xmlrpc_start_fetcher(self): """ Start autofetcher """ self.fetcher.start() return str('Started autofetching') def xmlrpc_stop_fetcher(self): """ Start autofetcher """ self.fetcher.stop() return str('Started autofetching') def xmlrpc_fetcher_status(self): """ Start autofetcher """ return str(self.fetcher.check_if_running()) def xmlrpc_get_balance(self): """ Get LBC balance """ return str(self.session.wallet.wallet_balance) def xmlrpc_stop(self): """ Stop the reactor """ reactor.stop() return defer.succeed('Stopping') def xmlrpc_get_lbry_files(self): """ Get LBRY files @return: Managed LBRY files """ return [[str(i), str(dir(i))] for i in self.lbry_file_manager.lbry_files] def xmlrpc_resolve_name(self, name): """ Resolve stream info from a LBRY uri @param: name """ def _disp(info): log.debug('[' + str(datetime.now()) + ']' + ' Resolved info: ' + str(info)) print '[' + str(datetime.now()) + ']' + ' Resolved info: ' + str(info) return info d = defer.Deferred() d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name)) d.addErrback(lambda _: 'UnknownNameError') d.addCallback(_disp) d.callback(None) return d def xmlrpc_get_downloads(self): """ Get files downloaded in this session @return: [{stream_hash, path}] """ downloads = [] for stream in self.downloads: try: downloads.append({'stream_hash': stream.stream_hash, 'path': os.path.join(stream.downloader.download_directory, stream.downloader.file_name)}) except: pass return downloads def xmlrpc_download_name(self, name): """ Download stream from a LBRY uri @param: name """ def _disp(stream): print '[' + str(datetime.now()) + ']' + ' Downloading: ' + str(stream.stream_hash) log.debug('[' + str(datetime.now()) + ']' + ' Downloading: ' + str(stream.stream_hash)) return defer.succeed(None) stream = GetStream(self.sd_identifier, self.session, self.session.wallet, self.lbry_file_manager, max_key_fee=self.max_key_fee, data_rate=self.data_rate) self.downloads.append(stream) d = self.session.wallet.get_stream_info_for_name(name) d.addCallback(lambda stream_info: stream.start(stream_info)) d.addCallback(lambda _: _disp(stream)) d.addCallback(lambda _: {'ts': datetime.now(),'name': name}) d.addErrback(lambda err: str(err.getTraceback())) return d def xmlrpc_path_from_name(self, name): """ Get file path for a downloaded name @param: name @return: {stream_hash, path}: """ d = self.session.wallet.get_stream_info_for_name(name) d.addCallback(lambda stream_info: stream_info['stream_hash']) d.addCallback(lambda stream_hash: [{'stream_hash': stream.stream_hash, 'path': os.path.join(stream.downloader.download_directory, stream.downloader.file_name)} for stream in self.downloads if stream.stream_hash == stream_hash][0]) d.addErrback(lambda _: 'UnknownNameError') return d def xmlrpc_get(self, name): """ Download a name and return the path of the resulting file @param: name: @return: {stream_hash, path}: """ d = defer.Deferred(None) d.addCallback(lambda _: self._download_name(name)) d.addCallback(lambda _: self._path_from_name(name)) d.callback(None) return d
class TestStreamify(TestCase): def setUp(self): self.session = None self.stream_info_manager = None self.lbry_file_manager = None self.addCleanup(self.take_down_env) def take_down_env(self): d = defer.succeed(True) if self.lbry_file_manager is not None: d.addCallback(lambda _: self.lbry_file_manager.stop()) if self.session is not None: d.addCallback(lambda _: self.session.shut_down()) if self.stream_info_manager is not None: d.addCallback(lambda _: self.stream_info_manager.stop()) def delete_test_env(): shutil.rmtree('client') d.addCallback(lambda _: threads.deferToThread(delete_test_env)) return d def test_create_stream(self): wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakeTwoPeerFinder(5553, peer_manager) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet) self.stream_info_manager = TempLBRYFileMetadataManager() self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier) d = self.session.setup() d.addCallback(lambda _: self.stream_info_manager.setup()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) def verify_equal(sd_info): self.assertEqual(sd_info, test_create_stream_sd_file) def verify_stream_descriptor_file(stream_hash): d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True) d.addCallback(verify_equal) return d def iv_generator(): iv = 0 while 1: iv += 1 yield "%016d" % iv def create_stream(): test_file = GenFile(5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)])) d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, key="0123456701234567", iv_generator=iv_generator()) return d d.addCallback(lambda _: create_stream()) d.addCallback(verify_stream_descriptor_file) return d def test_create_and_combine_stream(self): wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakeTwoPeerFinder(5553, peer_manager) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet) self.stream_info_manager = DBLBRYFileMetadataManager(self.session.db_dir) self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier) def start_lbry_file(lbry_file): logging.debug("Calling lbry_file.start()") d = lbry_file.start() return d def combine_stream(stream_hash): prm = PaymentRateManager(self.session.base_payment_rate_manager) d = self.lbry_file_manager.add_lbry_file(stream_hash, prm) d.addCallback(start_lbry_file) def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") d.addCallback(lambda _: check_md5_sum()) return d def create_stream(): test_file = GenFile(53209343, b''.join([chr(i + 5) for i in xrange(0, 64, 6)])) return create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, suggested_file_name="test_file") d = self.session.setup() d.addCallback(lambda _: self.stream_info_manager.setup()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: create_stream()) d.addCallback(combine_stream) return d
class TestTransfer(TestCase): def setUp(self): self.server_processes = [] self.session = None self.stream_info_manager = None self.lbry_file_manager = None self.addCleanup(self.take_down_env) def take_down_env(self): d = defer.succeed(True) if self.lbry_file_manager is not None: d.addCallback(lambda _: self.lbry_file_manager.stop()) if self.session is not None: d.addCallback(lambda _: self.session.shut_down()) if self.stream_info_manager is not None: d.addCallback(lambda _: self.stream_info_manager.stop()) def delete_test_env(): dirs = ['server', 'server1', 'server2', 'client'] files = ['test_file'] for di in dirs: if os.path.exists(di): shutil.rmtree(di) for f in files: if os.path.exists(f): os.remove(f) for p in self.server_processes: p.terminate() return True d.addCallback(lambda _: threads.deferToThread(delete_test_env)) return d @staticmethod def wait_for_dead_event(dead_event): from twisted.internet import reactor d = defer.Deferred() def stop(): dead_check.stop() if stop_call.active(): stop_call.cancel() d.callback(True) def check_if_dead_event_set(): if dead_event.is_set(): logging.debug("Dead event has been found set") stop() def done_waiting(): logging.warning("Dead event has not been found set and timeout has expired") stop() dead_check = task.LoopingCall(check_if_dead_event_set) dead_check.start(.1) stop_call = reactor.callLater(15, done_waiting) return d @staticmethod def wait_for_hash_from_queue(hash_queue): logging.debug("Waiting for the sd_hash to come through the queue") d = defer.Deferred() def check_for_start(): if hash_queue.empty() is False: logging.debug("Client start event has been found set") start_check.stop() d.callback(hash_queue.get(False)) else: logging.debug("Client start event has NOT been found set") start_check = task.LoopingCall(check_for_start) start_check.start(1.0) return d def test_lbry_transfer(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_event)) uploader.start() self.server_processes.append(uploader) logging.debug("Testing transfer") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet) self.stream_info_manager = TempLBRYFileMetadataManager() self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier) def make_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)] return factories[0].make_downloader(metadata, chosen_options, prm) def download_file(sd_hash): prm = PaymentRateManager(self.session.base_payment_rate_manager) d = download_sd_blob(self.session, sd_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(make_downloader, prm) d.addCallback(lambda downloader: downloader.start()) return d def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") def start_transfer(sd_hash): logging.debug("Starting the transfer") d = self.session.setup() d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: download_file(sd_hash)) d.addCallback(lambda _: check_md5_sum()) return d def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = self.wait_for_dead_event(dead_event) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: arg) return d d = self.wait_for_hash_from_queue(sd_hash_queue) d.addCallback(start_transfer) d.addBoth(stop) return d def test_live_transfer(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() server_args = (sd_hash_queue, kill_event, dead_event) server = Process(target=start_live_server, args=server_args) server.start() self.server_processes.append(server) wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() db_dir = "client" os.mkdir(db_dir) self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet) self.stream_info_manager = TempLiveStreamMetadataManager(hash_announcer) d = self.wait_for_hash_from_queue(sd_hash_queue) def create_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)] return factories[0].make_downloader(metadata, chosen_options, prm) def start_lbry_file(lbry_file): lbry_file = lbry_file logging.debug("Calling lbry_file.start()") return lbry_file.start() def download_stream(sd_blob_hash): logging.debug("Downloaded the sd blob. Reading it now") prm = PaymentRateManager(self.session.base_payment_rate_manager) d = download_sd_blob(self.session, sd_blob_hash, prm) d.addCallback(sd_identifier.get_metadata_for_sd_blob) d.addCallback(create_downloader, prm) d.addCallback(start_lbry_file) return d def do_download(sd_blob_hash): logging.debug("Starting the download") d = self.session.setup() d.addCallback(lambda _: enable_live_stream()) d.addCallback(lambda _: download_stream(sd_blob_hash)) return d def enable_live_stream(): base_live_stream_payment_rate_manager = BaseLiveStreamPaymentRateManager( MIN_BLOB_INFO_PAYMENT_RATE ) add_live_stream_to_sd_identifier(sd_identifier, base_live_stream_payment_rate_manager) add_full_live_stream_downloader_to_sd_identifier(self.session, self.stream_info_manager, sd_identifier, base_live_stream_payment_rate_manager) d.addCallback(do_download) def check_md5_sum(): f = open('test_file') hashsum = MD5.new() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "215b177db8eed86d028b37e5cbad55c7") d.addCallback(lambda _: check_md5_sum()) def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d = self.wait_for_dead_event(dead_event) def print_shutting_down(): logging.info("Client is shutting down") d.addCallback(lambda _: print_shutting_down()) d.addCallback(lambda _: arg) return d d.addBoth(stop) return d def test_last_blob_retrieval(self): kill_event = Event() dead_event_1 = Event() blob_hash_queue_1 = Queue() blob_hash_queue_2 = Queue() fast_uploader = Process(target=start_blob_uploader, args=(blob_hash_queue_1, kill_event, dead_event_1, False)) fast_uploader.start() self.server_processes.append(fast_uploader) dead_event_2 = Event() slow_uploader = Process(target=start_blob_uploader, args=(blob_hash_queue_2, kill_event, dead_event_2, True)) slow_uploader.start() self.server_processes.append(slow_uploader) logging.debug("Testing transfer") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakeTwoPeerFinder(5553, peer_manager) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet) d1 = self.wait_for_hash_from_queue(blob_hash_queue_1) d2 = self.wait_for_hash_from_queue(blob_hash_queue_2) d = defer.DeferredList([d1, d2], fireOnOneErrback=True) def get_blob_hash(results): self.assertEqual(results[0][1], results[1][1]) return results[0][1] d.addCallback(get_blob_hash) def download_blob(blob_hash): prm = PaymentRateManager(self.session.base_payment_rate_manager) downloader = StandaloneBlobDownloader(blob_hash, self.session.blob_manager, peer_finder, rate_limiter, prm, wallet) d = downloader.download() return d def start_transfer(blob_hash): logging.debug("Starting the transfer") d = self.session.setup() d.addCallback(lambda _: download_blob(blob_hash)) return d d.addCallback(start_transfer) def stop(arg): if isinstance(arg, Failure): logging.debug("Client is stopping due to an error. Error: %s", arg.getTraceback()) else: logging.debug("Client is stopping normally.") kill_event.set() logging.debug("Set the kill event") d1 = self.wait_for_dead_event(dead_event_1) d2 = self.wait_for_dead_event(dead_event_2) dl = defer.DeferredList([d1, d2]) def print_shutting_down(): logging.info("Client is shutting down") dl.addCallback(lambda _: print_shutting_down()) dl.addCallback(lambda _: arg) return dl d.addBoth(stop) return d
class LBRYDownloader(object): def __init__(self): self.session = None self.known_dht_nodes = [('104.236.42.182', 4000)] self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrydownloader") self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.peer_port = 3333 self.dht_node_port = 4444 self.run_server = True self.first_run = False self.current_db_revision = 1 if os.name == "nt": from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle self.download_directory = get_path(FOLDERID.Downloads, UserHandle.current) self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd") else: if sys.platform == 'darwin': self.download_directory = os.path.join(os.path.expanduser("~"), "Downloads") self.wallet_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd") else: self.download_directory = os.getcwd() self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd") self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf") self.wallet_user = None self.wallet_password = None self.sd_identifier = StreamDescriptorIdentifier() self.wallet_rpc_port = 8332 self.download_deferreds = [] self.stream_frames = [] self.default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE self.use_upnp = True self.start_lbrycrdd = True if os.name == "nt": self.lbrycrdd_path = "lbrycrdd.exe" else: self.lbrycrdd_path = None self.default_lbrycrdd_path = "./lbrycrdd" self.delete_blobs_on_remove = True self.blob_request_payment_rate_manager = None def start(self): d = self._load_conf_options() d.addCallback(lambda _: threads.deferToThread(self._create_directory)) d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._get_session()) d.addCallback(lambda _: self._setup_stream_info_manager()) d.addCallback(lambda _: self._setup_stream_identifier()) d.addCallback(lambda _: self.start_server()) return d def stop(self): dl = defer.DeferredList(self.download_deferreds) for stream_frame in self.stream_frames: stream_frame.cancel_func() if self.session is not None: dl.addBoth(lambda _: self.stop_server()) dl.addBoth(lambda _: self.session.shut_down()) return dl def get_new_address(self): return self.session.wallet.get_new_address() def _check_db_migration(self): old_revision = 0 db_revision_file = os.path.join(self.db_dir, "db_revision") if os.path.exists(db_revision_file): old_revision = int(open(db_revision_file).read().strip()) if old_revision < self.current_db_revision: if os.name == "nt": import subprocess import sys def run_migrator(): migrator_exe = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "dbmigrator", "migrator.exe") print "trying to find the migrator at", migrator_exe si = subprocess.STARTUPINFO si.dwFlags = subprocess.STARTF_USESHOWWINDOW si.wShowWindow = subprocess.SW_HIDE print "trying to run the migrator" migrator_proc = subprocess.Popen([migrator_exe, self.db_dir, str(old_revision), str(self.current_db_revision)], startupinfo=si) print "started the migrator" migrator_proc.wait() print "migrator has returned" return threads.deferToThread(run_migrator) else: from lbrynet.db_migrator import dbmigrator return threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision) return defer.succeed(True) def _load_conf_options(self): def get_lbrycrdd_path_conf_file(): if os.name == "nt": return "" lbrycrdd_path_conf_path = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf") if not os.path.exists(lbrycrdd_path_conf_path): return "" lbrycrdd_path_conf = open(lbrycrdd_path_conf_path) lines = lbrycrdd_path_conf.readlines() return lines d = threads.deferToThread(get_lbrycrdd_path_conf_file) def load_lbrycrdd_path(conf): for line in conf: if len(line.strip()) and line.strip()[0] != "#": self.lbrycrdd_path = line.strip() d.addCallback(load_lbrycrdd_path) def get_configuration_file(): if os.name == "nt": lbry_conf_path = "lbry.conf" if not os.path.exists(lbry_conf_path): log.debug("Could not read lbry.conf") return "" else: lbry_conf_path = os.path.join(os.path.expanduser("~"), ".lbrynetgui.conf") if not os.path.exists(lbry_conf_path): clean_conf_path = os.path.join(os.path.dirname(__file__), "lbry.conf") shutil.copy(clean_conf_path, lbry_conf_path) lbry_conf = open(lbry_conf_path) log.debug("Loading configuration options from %s", lbry_conf_path) lines = lbry_conf.readlines() log.debug("%s file contents:\n%s", lbry_conf_path, str(lines)) return lines d.addCallback(lambda _: threads.deferToThread(get_configuration_file)) def load_configuration_file(conf): for line in conf: if len(line.strip()) and line.strip()[0] != "#": try: field_name, field_value = map(lambda x: x.strip(), line.strip().split("=", 1)) field_name = field_name.lower() except ValueError: raise ValueError("Invalid configuration line: %s" % line) if field_name == "known_dht_nodes": known_nodes = [] nodes = field_value.split(",") for n in nodes: if n.strip(): try: ip_address, port_string = map(lambda x: x.strip(), n.split(":")) ip_numbers = ip_address.split(".") assert len(ip_numbers) == 4 for ip_num in ip_numbers: num = int(ip_num) assert 0 <= num <= 255 known_nodes.append((ip_address, int(port_string))) except (ValueError, AssertionError): raise ValueError("Expected known nodes in format 192.168.1.1:4000,192.168.1.2:4001. Got %s" % str(field_value)) log.debug("Setting known_dht_nodes to %s", str(known_nodes)) self.known_dht_nodes = known_nodes elif field_name == "run_server": if field_value.lower() == "true": run_server = True elif field_value.lower() == "false": run_server = False else: raise ValueError("run_server must be set to True or False. Got %s" % field_value) log.debug("Setting run_server to %s", str(run_server)) self.run_server = run_server elif field_name == "data_dir": log.debug("Setting data_dir to %s", str(field_value)) self.db_dir = field_value self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") elif field_name == "wallet_dir": log.debug("Setting wallet_dir to %s", str(field_value)) self.wallet_dir = field_value elif field_name == "wallet_conf": log.debug("Setting wallet_conf to %s", str(field_value)) self.wallet_conf = field_value elif field_name == "peer_port": try: peer_port = int(field_value) assert 0 <= peer_port <= 65535 log.debug("Setting peer_port to %s", str(peer_port)) self.peer_port = peer_port except (ValueError, AssertionError): raise ValueError("peer_port must be set to an integer between 1 and 65535. Got %s" % field_value) elif field_name == "dht_port": try: dht_port = int(field_value) assert 0 <= dht_port <= 65535 log.debug("Setting dht_node_port to %s", str(dht_port)) self.dht_node_port = dht_port except (ValueError, AssertionError): raise ValueError("dht_port must be set to an integer between 1 and 65535. Got %s" % field_value) elif field_name == "use_upnp": if field_value.lower() == "true": use_upnp = True elif field_value.lower() == "false": use_upnp = False else: raise ValueError("use_upnp must be set to True or False. Got %s" % str(field_value)) log.debug("Setting use_upnp to %s", str(use_upnp)) self.use_upnp = use_upnp elif field_name == "default_blob_data_payment_rate": try: rate = float(field_value) assert rate >= 0.0 log.debug("Setting default_blob_data_payment_rate to %s", str(rate)) self.default_blob_data_payment_rate = rate except (ValueError, AssertionError): raise ValueError("default_blob_data_payment_rate must be a positive floating point number, e.g. 0.5. Got %s" % str(field_value)) elif field_name == "start_lbrycrdd": if field_value.lower() == "true": start_lbrycrdd = True elif field_value.lower() == "false": start_lbrycrdd = False else: raise ValueError("start_lbrycrdd must be set to True or False. Got %s" % field_value) log.debug("Setting start_lbrycrdd to %s", str(start_lbrycrdd)) self.start_lbrycrdd = start_lbrycrdd elif field_name == "lbrycrdd_path": self.lbrycrdd_path = field_value elif field_name == "download_directory": log.debug("Setting download_directory to %s", str(field_value)) self.download_directory = field_value elif field_name == "delete_blobs_on_stream_remove": if field_value.lower() == "true": self.delete_blobs_on_remove = True elif field_value.lower() == "false": self.delete_blobs_on_remove = False else: raise ValueError("delete_blobs_on_stream_remove must be set to True or False") else: log.warning("Got unknown configuration field: %s", field_name) d.addCallback(load_configuration_file) return d def _create_directory(self): if not os.path.exists(self.db_dir): os.makedirs(self.db_dir) db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w') db_revision.write(str(self.current_db_revision)) db_revision.close() log.debug("Created the configuration directory: %s", str(self.db_dir)) if not os.path.exists(self.blobfile_dir): os.makedirs(self.blobfile_dir) log.debug("Created the data directory: %s", str(self.blobfile_dir)) if os.name == "nt": if not os.path.exists(self.wallet_dir): os.makedirs(self.wallet_dir) if not os.path.exists(self.wallet_conf): lbrycrd_conf = open(self.wallet_conf, mode='w') self.wallet_user = "******" lbrycrd_conf.write("rpcuser=%s\n" % self.wallet_user) self.wallet_password = binascii.hexlify(Random.new().read(20)) lbrycrd_conf.write("rpcpassword=%s\n" % self.wallet_password) lbrycrd_conf.write("server=1\n") lbrycrd_conf.close() else: lbrycrd_conf = open(self.wallet_conf) for l in lbrycrd_conf: if l.startswith("rpcuser="******"rpcpassword="******"rpcport="): self.wallet_rpc_port = int(l[8:-1].rstrip('\n')) def _get_session(self): lbrycrdd_path = None if self.start_lbrycrdd is True: lbrycrdd_path = self.lbrycrdd_path if not lbrycrdd_path: lbrycrdd_path = self.default_lbrycrdd_path if sys.platform == 'darwin': os.chdir("/Applications/LBRY.app/Contents/Resources") wallet = LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.wallet_conf, lbrycrdd_path=lbrycrdd_path) peer_port = None if self.run_server: peer_port = self.peer_port self.session = LBRYSession(self.default_blob_data_payment_rate, db_dir=self.db_dir, blob_dir=self.blobfile_dir, use_upnp=self.use_upnp, wallet=wallet, known_dht_nodes=self.known_dht_nodes, dht_node_port=self.dht_node_port, peer_port=peer_port) return self.session.setup() def _setup_stream_info_manager(self): self.stream_info_manager = TempLBRYFileMetadataManager() return defer.succeed(True) def start_server(self): if self.run_server: self.blob_request_payment_rate_manager = PaymentRateManager( self.session.base_payment_rate_manager, self.default_blob_data_payment_rate ) handlers = [ BlobAvailabilityHandlerFactory(self.session.blob_manager), self.session.wallet.get_wallet_info_query_handler_factory(), BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet, self.blob_request_payment_rate_manager) ] server_factory = ServerProtocolFactory(self.session.rate_limiter, handlers, self.session.peer_manager) from twisted.internet import reactor self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory) return defer.succeed(True) def stop_server(self): if self.lbry_server_port is not None: self.lbry_server_port, p = None, self.lbry_server_port return defer.maybeDeferred(p.stopListening) else: return defer.succeed(True) def _setup_stream_identifier(self): add_lbry_file_to_sd_identifier(self.sd_identifier) file_saver_factory = LBRYFileSaverFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet, self.download_directory) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_saver_factory) file_opener_factory = LBRYFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, self.stream_info_manager, self.session.wallet) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, file_opener_factory) def check_first_run(self): d = self.session.wallet.check_first_run() d.addCallback(lambda is_first_run: self._do_first_run() if is_first_run else 0.0) return d def _do_first_run(self): d = self.session.wallet.get_new_address() def send_request(url, data): r = requests.post(url, json=data) if r.status_code == 200: return r.json()['credits_sent'] return 0.0 def log_error(err): log.warning("unable to request free credits. %s", err.getErrorMessage()) return 0.0 def request_credits(address): url = "http://credreq.lbry.io/requestcredits" data = {"address": address} d = threads.deferToThread(send_request, url, data) d.addErrback(log_error) return d d.addCallback(request_credits) return d def _resolve_name(self, uri): return self.session.wallet.get_stream_info_for_name(uri) def download_stream(self, stream_frame, uri): resolve_d = self._resolve_name(uri) stream_frame.show_metadata_status("resolving name...") stream_frame.cancel_func = resolve_d.cancel payment_rate_manager = PaymentRateManager(self.session.base_payment_rate_manager) def update_stream_name(value): if 'name' in value: stream_frame.show_name(value['name']) if 'description' in value: stream_frame.show_description(value['description']) if 'thumbnail' in value: stream_frame.show_thumbnail(value['thumbnail']) return value def get_sd_hash(value): if 'stream_hash' in value: return value['stream_hash'] raise UnknownNameError(uri) def get_sd_blob(sd_hash): stream_frame.show_metadata_status("name resolved, fetching metadata...") get_sd_d = StreamDescriptor.download_sd_blob(self.session, sd_hash, payment_rate_manager) get_sd_d.addCallback(self.sd_identifier.get_metadata_for_sd_blob) get_sd_d.addCallbacks(choose_download_factory, bad_sd_blob) return get_sd_d def get_info_from_validator(info_validator): stream_name = None stream_size = None for field, val in info_validator.info_to_show(): if field == "suggested_file_name": stream_name = val elif field == "stream_name" and stream_name is None: stream_name = val elif field == "stream_size": stream_size = int(val) if stream_size is None: stream_size = "unknown" if stream_name is None: stream_name = "unknown" return stream_name, stream_size def choose_download_factory(metadata): #info_validator, options, factories = info_and_factories stream_name, stream_size = get_info_from_validator(metadata.validator) if isinstance(stream_size, (int, long)): price = payment_rate_manager.get_effective_min_blob_data_payment_rate() estimated_cost = stream_size * 1.0 / 2**20 * price else: estimated_cost = "unknown" stream_frame.show_stream_metadata(stream_name, stream_size, estimated_cost) available_options = metadata.options.get_downloader_options(metadata.validator, payment_rate_manager) stream_frame.show_download_options(available_options) get_downloader_d = defer.Deferred() def create_downloader(f, chosen_options): def fire_get_downloader_d(downloader): if not get_downloader_d.called: get_downloader_d.callback(downloader) stream_frame.disable_download_buttons() d = f.make_downloader(metadata, chosen_options, payment_rate_manager) d.addCallback(fire_get_downloader_d) for factory in metadata.factories: def choose_factory(f=factory): chosen_options = stream_frame.get_chosen_options() create_downloader(f, chosen_options) stream_frame.add_download_factory(factory, choose_factory) get_downloader_d.addCallback(start_download) return get_downloader_d def show_stream_status(downloader): total_bytes = downloader.get_total_bytes_cached() bytes_left_to_download = downloader.get_bytes_left_to_download() points_paid = payment_rate_manager.points_paid payment_rate = payment_rate_manager.get_effective_min_blob_data_payment_rate() points_remaining = 1.0 * bytes_left_to_download * payment_rate / 2**20 stream_frame.show_progress(total_bytes, bytes_left_to_download, points_paid, points_remaining) def show_finished(arg, downloader): show_stream_status(downloader) stream_frame.show_download_done(payment_rate_manager.points_paid) return arg def start_download(downloader): stream_frame.stream_hash = downloader.stream_hash l = task.LoopingCall(show_stream_status, downloader) l.start(1) d = downloader.start() stream_frame.cancel_func = downloader.stop def stop_looping_call(arg): l.stop() stream_frame.cancel_func = resolve_d.cancel return arg d.addBoth(stop_looping_call) d.addCallback(show_finished, downloader) return d def lookup_failed(err): stream_frame.show_metadata_status("name lookup failed") return err def bad_sd_blob(err): stream_frame.show_metadata_status("Unknown type or badly formed metadata") return err resolve_d.addCallback(update_stream_name) resolve_d.addCallback(get_sd_hash) resolve_d.addCallbacks(get_sd_blob, lookup_failed) def show_err(err): tkMessageBox.showerror(title="Download Error", message=err.getErrorMessage()) log.error(err.getErrorMessage()) stream_frame.show_download_done(payment_rate_manager.points_paid) resolve_d.addErrback(lambda err: err.trap(defer.CancelledError, UnknownNameError, UnknownStreamTypeError, InvalidStreamDescriptorError, InvalidStreamInfoError)) resolve_d.addErrback(show_err) def delete_associated_blobs(): if stream_frame.stream_hash is None or self.delete_blobs_on_remove is False: return defer.succeed(True) d1 = self.stream_info_manager.get_blobs_for_stream(stream_frame.stream_hash) def get_blob_hashes(blob_infos): return [b[0] for b in blob_infos if b[0] is not None] d1.addCallback(get_blob_hashes) d2 = self.stream_info_manager.get_sd_blob_hashes_for_stream(stream_frame.stream_hash) def combine_blob_hashes(results): blob_hashes = [] for success, result in results: if success is True: blob_hashes.extend(result) return blob_hashes def delete_blobs(blob_hashes): return self.session.blob_manager.delete_blobs(blob_hashes) dl = defer.DeferredList([d1, d2], fireOnOneErrback=True) dl.addCallback(combine_blob_hashes) dl.addCallback(delete_blobs) return dl resolve_d.addCallback(lambda _: delete_associated_blobs()) self._add_download_deferred(resolve_d, stream_frame) def _add_download_deferred(self, d, stream_frame): self.download_deferreds.append(d) self.stream_frames.append(stream_frame) def remove_from_list(): self.download_deferreds.remove(d) self.stream_frames.remove(stream_frame) d.addBoth(lambda _: remove_from_list())
class LBRYConsole: """A class which can upload and download file streams to and from the network""" def __init__( self, peer_port, dht_node_port, known_dht_nodes, wallet_type, lbrycrd_conf, lbrycrd_dir, use_upnp, data_dir, created_data_dir, lbrycrdd_path, start_lbrycrdd, ): """ @param peer_port: the network port on which to listen for peers @param dht_node_port: the network port on which to listen for dht node requests @param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network """ self.peer_port = peer_port self.dht_node_port = dht_node_port self.known_dht_nodes = known_dht_nodes self.wallet_type = wallet_type self.lbrycrd_conf = lbrycrd_conf self.lbrycrd_dir = lbrycrd_dir if not self.lbrycrd_dir: self.lbrycrd_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd") if not self.lbrycrd_conf: self.lbrycrd_conf = os.path.join(self.lbrycrd_dir, "lbrycrd.conf") self.lbrycrdd_path = lbrycrdd_path self.default_lbrycrdd_path = "./lbrycrdd" self.start_lbrycrdd = start_lbrycrdd self.use_upnp = use_upnp self.lbry_server_port = None self.session = None self.lbry_file_metadata_manager = None self.lbry_file_manager = None self.db_dir = data_dir self.current_db_revision = 1 self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.created_data_dir = created_data_dir self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces( [os.path.join(self.db_dir, "plugins"), os.path.join(os.path.dirname(__file__), "plugins")] ) self.command_handlers = [] self.query_handlers = {} self.settings = LBRYSettings(self.db_dir) self.blob_request_payment_rate_manager = None self.lbryid = None self.sd_identifier = StreamDescriptorIdentifier() self.plugin_objects = [] self.db_migration_revisions = None def start(self): """Initialize the session and restore everything to its saved state""" d = self._setup_controller() d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory)) d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._get_settings()) d.addCallback(lambda _: self._get_session()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self._setup_lbry_file_manager()) d.addCallback(lambda _: self._setup_lbry_file_opener()) d.addCallback(lambda _: self._setup_control_handlers()) d.addCallback(lambda _: self._setup_query_handlers()) d.addCallback(lambda _: self._load_plugins()) d.addCallback(lambda _: self._setup_server()) d.addCallback(lambda _: self._start_controller()) d.addErrback(self._show_start_error) return d def _show_start_error(self, error): print error.getErrorMessage() log.error("An error occurred during start up: %s", error.getTraceback()) return error def _show_shutdown_error(self, error): print error.getErrorMessage() log.error("An error occurred during shutdown: %s", error.getTraceback()) return error def shut_down(self): """Stop the session, all currently running streams, and stop the server""" d = self._shut_down() if self.session is not None: d.addCallback(lambda _: self.session.shut_down()) d.addErrback(self._show_shutdown_error) return d def add_control_handlers(self, control_handlers): for control_handler in control_handlers: self.command_handlers.append(control_handler) def add_query_handlers(self, query_handlers): def _set_query_handlers(statuses): from future_builtins import zip for handler, (success, status) in zip(query_handlers, statuses): if success is True: self.query_handlers[handler] = status ds = [] for handler in query_handlers: ds.append(self.settings.get_query_handler_status(handler.get_primary_query_identifier())) dl = defer.DeferredList(ds) dl.addCallback(_set_query_handlers) return dl def _setup_data_directory(self): alert.info("Loading databases...") if self.created_data_dir: db_revision = open(os.path.join(self.db_dir, "db_revision"), mode="w") db_revision.write(str(self.current_db_revision)) db_revision.close() log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision"))) if not os.path.exists(self.blobfile_dir): os.mkdir(self.blobfile_dir) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) def _check_db_migration(self): old_revision = 0 db_revision_file = os.path.join(self.db_dir, "db_revision") if os.path.exists(db_revision_file): old_revision = int(open(db_revision_file).read().strip()) if old_revision < self.current_db_revision: from lbrynet.db_migrator import dbmigrator print "Upgrading your databases..." d = threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision) def print_success(old_dirs): success_string = "Finished upgrading the databases. It is now safe to delete the" success_string += " following directories, if you feel like it. It won't make any" success_string += " difference.\nAnyway here they are: " for i, old_dir in enumerate(old_dirs): success_string += old_dir if i + 1 < len(old_dir): success_string += ", " print success_string d.addCallback(print_success) return d return defer.succeed(True) def _get_settings(self): d = self.settings.start() d.addCallback(lambda _: self.settings.get_lbryid()) d.addCallback(self.set_lbryid) d.addCallback(lambda _: self.get_lbrycrdd_path()) return d def get_lbrycrdd_path(self): if not self.start_lbrycrdd: return defer.succeed(None) def get_lbrycrdd_path_conf_file(): lbrycrdd_path_conf_path = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf") if not os.path.exists(lbrycrdd_path_conf_path): return "" lbrycrdd_path_conf = open(lbrycrdd_path_conf_path) lines = lbrycrdd_path_conf.readlines() return lines d = threads.deferToThread(get_lbrycrdd_path_conf_file) def load_lbrycrdd_path(conf): for line in conf: if len(line.strip()) and line.strip()[0] != "#": self.lbrycrdd_path = line.strip() d.addCallback(load_lbrycrdd_path) return d def set_lbryid(self, lbryid): if lbryid is None: return self._make_lbryid() else: self.lbryid = lbryid def _make_lbryid(self): self.lbryid = generate_id() d = self.settings.save_lbryid(self.lbryid) return d def _get_session(self): def get_default_data_rate(): d = self.settings.get_default_data_payment_rate() d.addCallback( lambda rate: {"default_data_payment_rate": rate if rate is not None else MIN_BLOB_DATA_PAYMENT_RATE} ) return d def get_wallet(): if self.wallet_type == "lbrycrd": lbrycrdd_path = None if self.start_lbrycrdd is True: lbrycrdd_path = self.lbrycrdd_path if not lbrycrdd_path: lbrycrdd_path = self.default_lbrycrdd_path d = defer.succeed( LBRYcrdWallet( self.db_dir, wallet_dir=self.lbrycrd_dir, wallet_conf=self.lbrycrd_conf, lbrycrdd_path=lbrycrdd_path, ) ) else: d = defer.succeed(PTCWallet(self.db_dir)) d.addCallback(lambda wallet: {"wallet": wallet}) return d d1 = get_default_data_rate() d2 = get_wallet() def combine_results(results): r = {} for success, result in results: if success is True: r.update(result) return r def create_session(results): alert.info("Databases loaded.") self.session = LBRYSession( results["default_data_payment_rate"], db_dir=self.db_dir, lbryid=self.lbryid, blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port, known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port, use_upnp=self.use_upnp, wallet=results["wallet"], ) dl = defer.DeferredList([d1, d2], fireOnOneErrback=True) dl.addCallback(combine_results) dl.addCallback(create_session) dl.addCallback(lambda _: self.session.setup()) dl.addCallback(lambda _: self.check_first_run()) dl.addCallback(self._show_first_run_result) return dl def check_first_run(self): d = self.session.wallet.check_first_run() d.addCallback(lambda is_first_run: self._do_first_run() if is_first_run else 0.0) return d def _do_first_run(self): d = self.session.wallet.get_new_address() def send_request(url, data): r = requests.post(url, json=data) if r.status_code == 200: return r.json()["credits_sent"] return 0.0 def log_error(err): log.warning("unable to request free credits. %s", err.getErrorMessage()) return 0.0 def request_credits(address): url = "http://credreq.lbry.io/requestcredits" data = {"address": address} d = threads.deferToThread(send_request, url, data) d.addErrback(log_error) return d d.addCallback(request_credits) return d @staticmethod def _show_first_run_result(credits_received): if credits_received != 0.0: points_string = locale.format_string("%.2f LBC", (round(credits_received, 2),), grouping=True) alert.info( "Thank you for using LBRY! You have been given %s for free because we " "love you. Please give them a few minutes to show up while you catch up " "with our blockchain.\nTo check whether you've caught up with the blockchain, " "use the command 'get-blockchain-status'.\nDownloading some files " "may not work until you have downloaded the LBC blockchain.", points_string, ) def _setup_lbry_file_manager(self): self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir) d = self.lbry_file_metadata_manager.setup() def set_lbry_file_manager(): self.lbry_file_manager = LBRYFileManager(self.session, self.lbry_file_metadata_manager, self.sd_identifier) return self.lbry_file_manager.setup() d.addCallback(lambda _: set_lbry_file_manager()) return d def _setup_lbry_file_opener(self): stream_info_manager = TempLBRYFileMetadataManager() downloader_factory = LBRYFileOpenerFactory( self.session.peer_finder, self.session.rate_limiter, self.session.blob_manager, stream_info_manager, self.session.wallet, ) self.sd_identifier.add_stream_downloader_factory(LBRYFileStreamType, downloader_factory) return defer.succeed(True) def _setup_control_handlers(self): handlers = [ ApplicationStatusFactory(self.session.rate_limiter, self.session.dht_node), GetWalletBalancesFactory(self.session.wallet), ModifyApplicationDefaultsFactory(self), ShutDownFactory(self), PeerStatsAndSettingsChooserFactory(self.session.peer_manager), LBRYFileStatusFactory(self.lbry_file_manager), AddStreamFromSDFactory(self.sd_identifier, self.session.base_payment_rate_manager), DeleteLBRYFileChooserFactory( self.lbry_file_metadata_manager, self.session.blob_manager, self.lbry_file_manager ), ToggleLBRYFileRunningChooserFactory(self.lbry_file_manager), CreateLBRYFileFactory(self.session, self.lbry_file_manager), PublishStreamDescriptorChooserFactory(self.lbry_file_metadata_manager, self.session.blob_manager), ShowPublishedSDHashesChooserFactory(self.lbry_file_metadata_manager, self.lbry_file_manager), CreatePlainStreamDescriptorChooserFactory(self.lbry_file_manager), ShowLBRYFileStreamHashChooserFactory(self.lbry_file_manager), ModifyLBRYFileOptionsChooserFactory(self.lbry_file_manager), AddStreamFromHashFactory(self.sd_identifier, self.session), StatusFactory( self, self.session.rate_limiter, self.lbry_file_manager, self.session.blob_manager, self.session.wallet if self.wallet_type == "lbrycrd" else None, ), ] self.add_control_handlers(handlers) if self.wallet_type == "lbrycrd": lbrycrd_handlers = [ AddStreamFromLBRYcrdNameFactory(self.sd_identifier, self.session, self.session.wallet), ClaimNameFactory(self.session.wallet, self.lbry_file_manager, self.session.blob_manager), GetNewWalletAddressFactory(self.session.wallet), PublishFactory(self.session, self.lbry_file_manager, self.session.wallet), BlockchainStatusFactory(self.session.wallet), ] self.add_control_handlers(lbrycrd_handlers) if self.peer_port is not None: server_handlers = [ShowServerStatusFactory(self), ModifyServerSettingsFactory(self)] self.add_control_handlers(server_handlers) def _setup_query_handlers(self): handlers = [ # CryptBlobInfoQueryHandlerFactory(self.lbry_file_metadata_manager, self.session.wallet, # self._server_payment_rate_manager), BlobAvailabilityHandlerFactory(self.session.blob_manager), # BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet, # self._server_payment_rate_manager), self.session.wallet.get_wallet_info_query_handler_factory(), ] def get_blob_request_handler_factory(rate): self.blob_request_payment_rate_manager = PaymentRateManager(self.session.base_payment_rate_manager, rate) handlers.append( BlobRequestHandlerFactory( self.session.blob_manager, self.session.wallet, self.blob_request_payment_rate_manager ) ) d1 = self.settings.get_server_data_payment_rate() d1.addCallback(get_blob_request_handler_factory) dl = defer.DeferredList([d1]) dl.addCallback(lambda _: self.add_query_handlers(handlers)) return dl def _load_plugins(self): d = threads.deferToThread(self.plugin_manager.collectPlugins) def setup_plugins(): ds = [] for plugin in self.plugin_manager.getAllPlugins(): self.plugin_objects.append(plugin.plugin_object) ds.append(plugin.plugin_object.setup(self)) return defer.DeferredList(ds) d.addCallback(lambda _: setup_plugins()) return d def _stop_plugins(self): ds = [] for plugin_object in self.plugin_objects: ds.append(defer.maybeDeferred(plugin_object.stop)) return defer.DeferredList(ds) def _setup_server(self): def restore_running_status(running): if running is True: return self.start_server() return defer.succeed(True) dl = self.settings.get_server_running_status() dl.addCallback(restore_running_status) return dl def start_server(self): if self.peer_port is not None: server_factory = ServerProtocolFactory( self.session.rate_limiter, self.query_handlers, self.session.peer_manager ) from twisted.internet import reactor try: self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory) except error.CannotListenError as e: import traceback log.error("Couldn't bind to port %d. %s", self.peer_port, traceback.format_exc()) raise ValueError("%s lbrynet may already be running on your computer.", str(e)) return defer.succeed(True) def stop_server(self): if self.lbry_server_port is not None: self.lbry_server_port, p = None, self.lbry_server_port return defer.maybeDeferred(p.stopListening) else: return defer.succeed(True) def _setup_controller(self): self.controller = ConsoleControl() stdio.StandardIO(self.controller) logger = logging.getLogger() formatter = logging.Formatter("%(message)s") alert_handler = logging.StreamHandler(self.controller) alert_handler.setFormatter(formatter) alert_handler.addFilter(logging.Filter("lbryalert")) alert_handler.setLevel(logging.DEBUG) logger.addHandler(alert_handler) return defer.succeed(True) def _start_controller(self): return self.controller.start(self.command_handlers) def _shut_down(self): self.plugin_manager = None ds = [] if self.lbry_file_metadata_manager is not None: d = self.lbry_file_metadata_manager.stop() d.addCallback(lambda _: self.lbry_file_manager.stop()) ds.append(d) ds.append(self.stop_server()) ds.append(self._stop_plugins()) dl = defer.DeferredList(ds) return dl