def load_pickled_state_file(filename): """Loads a file from the config directory, attempting backup if original fails to load. Params: filename (str): Filename to be loaded from config Returns: state: the unpickled state """ from deluge.configmanager import get_config_dir filepath = os.path.join(get_config_dir(), 'gtkui_state', filename) filepath_bak = filepath + '.bak' old_data_filepath = os.path.join(get_config_dir(), filename) for _filepath in (filepath, filepath_bak, old_data_filepath): log.info('Opening %s for load: %s', filename, _filepath) try: with open(_filepath, 'rb') as _file: state = pickle.load(_file) except (IOError, pickle.UnpicklingError) as ex: log.warning('Unable to load %s: %s', _filepath, ex) else: log.info('Successfully loaded %s: %s', filename, _filepath) return state
def __init__(self): self.config = ConfigManager("core.conf") self.state05_location = os.path.join(get_config_dir(), "persistent.state") self.state10_location = os.path.join(get_config_dir(), "state", "torrents.state") if os.path.exists(self.state05_location) and not os.path.exists(self.state10_location): # If the 0.5 state file exists and the 1.0 doesn't, then let's upgrade it self.upgrade05()
def __init__(self): self.config = ConfigManager("core.conf") self.state05_location = os.path.join(get_config_dir(), "persistent.state") self.state10_location = os.path.join(get_config_dir(), "state", "torrents.state") if os.path.exists(self.state05_location) and not os.path.exists( self.state10_location): # If the 0.5 state file exists and the 1.0 doesn't, then let's upgrade it self.upgrade05()
def getContext(self): """Creates an SSL context.""" ctx = SSL.Context(SSL.SSLv3_METHOD) deluge_web = component.get("DelugeWeb") log.debug("Enabling SSL using:") log.debug("Pkey: %s", deluge_web.pkey) log.debug("Cert: %s", deluge_web.cert) ctx.use_privatekey_file(configmanager.get_config_dir(deluge_web.pkey)) ctx.use_certificate_chain_file(configmanager.get_config_dir(deluge_web.cert)) return ctx
def getContext(self): """Creates an SSL context.""" ctx = SSL.Context(SSL.SSLv3_METHOD) deluge_web = component.get("DelugeWeb") log.debug("Enabling SSL using:") log.debug("Pkey: %s", deluge_web.pkey) log.debug("Cert: %s", deluge_web.cert) ctx.use_privatekey_file(configmanager.get_config_dir(deluge_web.pkey)) ctx.use_certificate_chain_file( configmanager.get_config_dir(deluge_web.cert)) return ctx
def start_ssl(self): log.debug("Enabling SSL with PKey: %s, Cert: %s", self.pkey, self.cert) check_ssl_keys() with open(configmanager.get_config_dir(self.cert)) as cert: certificate = Certificate.loadPEM(cert.read()).original with open(configmanager.get_config_dir(self.pkey)) as pkey: private_key = KeyPair.load(pkey.read(), FILETYPE_PEM).original options = CertificateOptions(privateKey=private_key, certificate=certificate, method=SSL.SSLv23_METHOD) options.getContext().set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) self.socket = reactor.listenSSL(self.port, self.site, options) log.info("Serving on %s:%s view at https://127.0.0.1:%s", "0.0.0.0", self.port, self.port)
def start_ssl(self): log.debug("Enabling SSL with PKey: %s, Cert: %s", self.pkey, self.cert) check_ssl_keys() with open(configmanager.get_config_dir(self.cert)) as cert: certificate = Certificate.loadPEM(cert.read()).original with open(configmanager.get_config_dir(self.pkey)) as pkey: private_key = KeyPair.load(pkey.read(), FILETYPE_PEM).original options = CertificateOptions(privateKey=private_key, certificate=certificate, method=SSL.SSLv23_METHOD) ctx = options.getContext() ctx.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) ctx.use_certificate_chain_file(configmanager.get_config_dir(self.cert)) self.socket = reactor.listenSSL(self.port, self.site, options, interface=self.interface) log.info("Serving on %s:%s view at https://%s:%s", self.interface, self.port, self.interface, self.port)
def on_button_plugin_install(self): filter = "%s (%s);;%s (%s)" % (_("Plugin Eggs"), "*.egg", _("All files"), "*") filename = QtGui.QFileDialog.getOpenFileName(self, _("Select the Plugin"), None, filter) if filename: import shutil import os.path filename = QtCore.QDir.toNativeSeparators(filename) basename = os.path.basename(filename) shutil.copyfile( filename, os.path.join(configmanager.get_config_dir(), "plugins", basename)) component.get("PluginManager").scan_for_plugins() if not client.is_localhost(): # We need to send this plugin to the daemon import base64 with open(filename, 'rb') as f: filedump = base64.encodestring(f.read()) client.core.upload_plugin(basename, filedump) client.core.rescan_plugins() self._update_plugins()
def write_auth_file(self): filename = 'auth' filepath = os.path.join(configmanager.get_config_dir(), filename) filepath_bak = filepath + '.bak' filepath_tmp = filepath + '.tmp' try: if os.path.isfile(filepath): log.debug('Creating backup of %s at: %s', filename, filepath_bak) shutil.copy2(filepath, filepath_bak) except IOError as ex: log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex) else: log.info('Saving the %s at: %s', filename, filepath) try: with open(filepath_tmp, 'w', encoding='utf8') as _file: for account in self.__auth.values(): _file.write( '%(username)s:%(password)s:%(authlevel_int)s\n' % account.data()) _file.flush() os.fsync(_file.fileno()) shutil.move(filepath_tmp, filepath) except IOError as ex: log.error('Unable to save %s: %s', filename, ex) if os.path.isfile(filepath_bak): log.info('Restoring backup of %s from: %s', filename, filepath_bak) shutil.move(filepath_bak, filepath) self.__load_auth_file()
def write_auth_file(self): filename = 'auth' filepath = os.path.join(configmanager.get_config_dir(), filename) filepath_bak = filepath + '.bak' filepath_tmp = filepath + '.tmp' try: if os.path.isfile(filepath): log.debug('Creating backup of %s at: %s', filename, filepath_bak) shutil.copy2(filepath, filepath_bak) except IOError as ex: log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex) else: log.info('Saving the %s at: %s', filename, filepath) try: with open(filepath_tmp, 'w', encoding='utf8') as _file: for account in self.__auth.values(): _file.write('%(username)s:%(password)s:%(authlevel_int)s\n' % account.data()) _file.flush() os.fsync(_file.fileno()) shutil.move(filepath_tmp, filepath) except IOError as ex: log.error('Unable to save %s: %s', filename, ex) if os.path.isfile(filepath_bak): log.info('Restoring backup of %s from: %s', filename, filepath_bak) shutil.move(filepath_bak, filepath) self.__load_auth_file()
def tweak_logging_levels(): """This function allows tweaking the logging levels for all or some loggers. This is mostly usefull for developing purposes hence the contents of the file are NOT like regular deluge config file's. To use is, create a file named "logging.conf" on your Deluge's config dir with contents like for example: deluge:warn deluge.core:debug deluge.plugin:error What the above mean is the logger "deluge" will be set to the WARN level, the "deluge.core" logger will be set to the DEBUG level and the "deluge.plugin" will be set to the ERROR level. Remember, one rule per line and this WILL override the setting passed from the command line. """ from deluge import configmanager logging_config_file = os.path.join(configmanager.get_config_dir(), 'logging.conf') if not os.path.isfile(logging_config_file): return log = logging.getLogger(__name__) log.warn("logging.conf found! tweaking logging levels from %s", logging_config_file) for line in open(logging_config_file, 'r').readlines(): if line.strip().startswith("#"): continue name, level = line.strip().split(':') if level not in levels: continue log.warn("Setting logger \"%s\" to logging level \"%s\"", name, level) setLoggerLevel(level, name)
def _save_session_state(self): """Saves the libtorrent session state""" filename = 'session.state' filepath = get_config_dir(filename) filepath_bak = filepath + '.bak' filepath_tmp = filepath + '.tmp' try: if os.path.isfile(filepath): log.debug('Creating backup of %s at: %s', filename, filepath_bak) shutil.copy2(filepath, filepath_bak) except IOError as ex: log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex) else: log.info('Saving the %s at: %s', filename, filepath) try: with open(filepath_tmp, 'wb') as _file: _file.write(lt.bencode(self.session.save_state())) _file.flush() os.fsync(_file.fileno()) shutil.move(filepath_tmp, filepath) except (IOError, EOFError) as ex: log.error('Unable to save %s: %s', filename, ex) if os.path.isfile(filepath_bak): log.info('Restoring backup of %s from: %s', filename, filepath_bak) shutil.move(filepath_bak, filepath)
def save_resume_data_file(self, resume_data=None): """ Saves the resume data file with the contents of self.resume_data. If `resume_data` is None, then we grab the resume_data from the file on disk, else, we update `resume_data` with self.resume_data and save that to disk. :param resume_data: the current resume_data, this will be loaded from disk if not provided :type resume_data: dict """ # Check to see if we're waiting on more resume data if self.num_resume_data or not self.resume_data: return path = os.path.join(get_config_dir(), "state", "torrents.fastresume") # First step is to load the existing file and update the dictionary if resume_data is None: resume_data = self.load_resume_data_file() resume_data.update(self.resume_data) self.resume_data = {} try: log.debug("Saving fastresume file: %s", path) fastresume_file = open(path, "wb") fastresume_file.write(lt.bencode(resume_data)) fastresume_file.flush() os.fsync(fastresume_file.fileno()) fastresume_file.close() except IOError: log.warning("Error trying to save fastresume file")
def upload_plugin(self, filename, path): """Upload a plugin to config.""" main_deferred = Deferred() shutil.copyfile(path, os.path.join(get_config_dir(), 'plugins', filename)) component.get('Web.PluginManager').scan_for_plugins() if client.is_localhost(): client.core.rescan_plugins() return True with open(path, 'rb') as _file: plugin_data = b64encode(_file.read()) def on_upload_complete(*args): client.core.rescan_plugins() component.get('Web.PluginManager').scan_for_plugins() main_deferred.callback(True) def on_upload_error(*args): main_deferred.callback(False) d = client.core.upload_plugin(filename, plugin_data) d.addCallback(on_upload_complete) d.addErrback(on_upload_error) return main_deferred
def save_pickled_state_file(filename, state): """Save a file in the config directory and creates a backup Params: filename (str): Filename to be saved to config state (state): The data to be pickled and written to file """ from deluge.configmanager import get_config_dir filepath = os.path.join(get_config_dir(), 'gtkui_state', filename) filepath_bak = filepath + '.bak' filepath_tmp = filepath + '.tmp' try: if os.path.isfile(filepath): log.debug('Creating backup of %s at: %s', filename, filepath_bak) shutil.copy2(filepath, filepath_bak) except IOError as ex: log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex) else: log.info('Saving the %s at: %s', filename, filepath) try: with open(filepath_tmp, 'wb') as _file: # Pickle the state object pickle.dump(state, _file) _file.flush() os.fsync(_file.fileno()) shutil.move(filepath_tmp, filepath) except (IOError, EOFError, pickle.PicklingError) as ex: log.error('Unable to save %s: %s', filename, ex) if os.path.isfile(filepath_bak): log.info('Restoring backup of %s from: %s', filename, filepath_bak) shutil.move(filepath_bak, filepath)
def start_ssl(self): check_ssl_keys() log.debug('Enabling SSL with PKey: %s, Cert: %s', self.pkey, self.cert) cert = configmanager.get_config_dir(self.cert) pkey = configmanager.get_config_dir(self.pkey) self.socket = reactor.listenSSL( self.port, self.site, get_context_factory(cert, pkey), interface=self.interface, ) ip = self.socket.getHost().host ip = '[%s]' % ip if is_ipv6(ip) else ip log.info('Serving at https://%s:%s%s', ip, self.port, self.base)
def save_resume_data_file(self, resume_data=None): """ Saves the resume data file with the contents of self.resume_data. If `resume_data` is None, then we grab the resume_data from the file on disk, else, we update `resume_data` with self.resume_data and save that to disk. :param resume_data: the current resume_data, this will be loaded from disk if not provided :type resume_data: dict """ # Check to see if we're waiting on more resume data if self.num_resume_data or not self.resume_data: return filepath = os.path.join(get_config_dir(), "state", "torrents.fastresume") filepath_tmp = filepath + ".tmp" filepath_bak = filepath + ".bak" # First step is to load the existing file and update the dictionary if resume_data is None: resume_data = self.load_resume_data_file() resume_data.update(self.resume_data) self.resume_data = {} try: os.remove(filepath_bak) except OSError: pass try: log.debug("Creating backup of fastresume at: %s", filepath_bak) os.rename(filepath, filepath_bak) except OSError, ex: log.error("Unable to backup %s to %s: %s", filepath, filepath_bak, ex)
def get_localhost_auth(): """Grabs the localclient auth line from the 'auth' file and creates a localhost uri. Returns: tuple: With the username and password to login as. """ from deluge.configmanager import get_config_dir auth_file = get_config_dir('auth') if not os.path.exists(auth_file): from deluge.common import create_localclient_account create_localclient_account() with open(auth_file) as auth: for line in auth: line = line.strip() if line.startswith('#') or not line: # This is a comment or blank line continue lsplit = line.split(':') if len(lsplit) == 2: username, password = lsplit elif len(lsplit) == 3: username, password, level = lsplit else: log.error( 'Your auth file is malformed: Incorrect number of fields!') continue if username == 'localclient': return (username, password)
def save_pickled_state_file(filename, state): """Save a file in the config directory and creates a backup Params: filename (str): Filename to be saved to config state (state): The data to be pickled and written to file """ from deluge.configmanager import get_config_dir filepath = os.path.join(get_config_dir(), 'gtkui_state', filename) filepath_bak = filepath + '.bak' filepath_tmp = filepath + '.tmp' try: if os.path.isfile(filepath): log.debug('Creating backup of %s at: %s', filename, filepath_bak) shutil.copy2(filepath, filepath_bak) except IOError as ex: log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex) else: log.info('Saving the %s at: %s', filename, filepath) try: with open(filepath_tmp, 'wb') as _file: # Pickle the state object pickle.dump(state, _file, protocol=2) _file.flush() os.fsync(_file.fileno()) shutil.move(filepath_tmp, filepath) except (IOError, EOFError, pickle.PicklingError) as ex: log.error('Unable to save %s: %s', filename, ex) if os.path.isfile(filepath_bak): log.info('Restoring backup of %s from: %s', filename, filepath_bak) shutil.move(filepath_bak, filepath)
def on_button_connect_clicked(self, widget=None): """Button handler for connect to or disconnect from daemon.""" model, row = self.treeview.get_selection().get_selected() if not row: return host_id, host, port, __, __, status, __ = model[row] # If status is connected then connect button disconnects instead. if status == 'Connected': def on_disconnect(reason): self._update_host_status() return client.disconnect().addCallback(on_disconnect) try_counter = 0 auto_start = self.builder.get_object('chk_autostart').get_active() if auto_start and host in LOCALHOST and status == 'Offline': # Start the local daemon and then connect with retries set. if self.start_daemon(port, get_config_dir()): try_counter = 6 else: # Don't attempt to connect to offline daemon. return self._connect(host_id, try_counter=try_counter)
def upload_plugin(self, filename, path): main_deferred = Deferred() shutil.copyfile(path, os.path.join(get_config_dir(), "plugins", filename)) component.get("Web.PluginManager").scan_for_plugins() if client.is_localhost(): client.core.rescan_plugins() return True plugin_data = base64.encodestring(open(path, "rb").read()) def on_upload_complete(*args): client.core.rescan_plugins() component.get("Web.PluginManager").scan_for_plugins() main_deferred.callback(True) def on_upload_error(*args): main_deferred.callback(False) d = client.core.upload_plugin(filename, plugin_data) d.addCallback(on_upload_complete) d.addErrback(on_upload_error) return main_deferred
def __init__(self, icon_dir=None, no_icon=None): """ Initialises a new TrackerIcons object :param icon_dir: the (optional) directory of where to store the icons :type icon_dir: string :param no_icon: the (optional) path name of the icon to show when no icon can be fetched :type no_icon: string """ Component.__init__(self, "TrackerIcons") if not icon_dir: icon_dir = get_config_dir("icons") self.dir = icon_dir if not os.path.isdir(self.dir): os.makedirs(self.dir) self.icons = {} for icon in os.listdir(self.dir): if icon != no_icon: host = icon_name_to_host(icon) try: self.icons[host] = TrackerIcon(os.path.join(self.dir, icon)) except KeyError: log.warning("invalid icon %s", icon) if no_icon: self.icons[None] = TrackerIcon(no_icon) else: self.icons[None] = None self.icons[''] = self.icons[None] self.pending = {} self.redirects = {}
def get_localhost_auth(): """Grabs the localclient auth line from the 'auth' file and creates a localhost uri. Returns: tuple: With the username and password to login as. """ from deluge.configmanager import get_config_dir auth_file = get_config_dir('auth') if not os.path.exists(auth_file): from deluge.common import create_localclient_account create_localclient_account() with open(auth_file) as auth: for line in auth: line = line.strip() if line.startswith('#') or not line: # This is a comment or blank line continue lsplit = line.split(':') if len(lsplit) == 2: username, password = lsplit elif len(lsplit) == 3: username, password, level = lsplit else: log.error('Your auth file is malformed: Incorrect number of fields!') continue if username == 'localclient': return (username, password)
def __init__(self, icon_dir=None, no_icon=None): """ Initialises a new TrackerIcons object :param icon_dir: the (optional) directory of where to store the icons :type icon_dir: string :param no_icon: the (optional) path name of the icon to show when no icon can be fetched :type no_icon: string """ Component.__init__(self, 'TrackerIcons') if not icon_dir: icon_dir = get_config_dir('icons') self.dir = icon_dir if not os.path.isdir(self.dir): os.makedirs(self.dir) self.icons = {} for icon in os.listdir(self.dir): if icon != no_icon: host = icon_name_to_host(icon) try: self.icons[host] = TrackerIcon(os.path.join( self.dir, icon)) except KeyError: log.warning('invalid icon %s', icon) if no_icon: self.icons[None] = TrackerIcon(no_icon) else: self.icons[None] = None self.icons[''] = self.icons[None] self.pending = {} self.redirects = {}
def __init__(self, listen_interface=None, interface=None, port=None, standalone=False, read_only_config_keys=None): """ Args: listen_interface (str, optional): The IP address to listen to bittorrent connections on. interface (str, optional): The IP address the daemon will listen for UI connections on. port (int, optional): The port the daemon will listen for UI connections on. standalone (bool, optional): If True the client is in Standalone mode otherwise, if False, start the daemon as separate process. read_only_config_keys (list of str, optional): A list of config keys that will not be altered by core.set_config() RPC method. """ self.standalone = standalone self.pid_file = get_config_dir('deluged.pid') log.info('Deluge daemon %s', get_version()) if is_daemon_running(self.pid_file): raise DaemonRunningError( 'Deluge daemon already running with this config directory!') # Twisted catches signals to terminate, so just have it call the shutdown method. reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) # Catch some Windows specific signals if windows_check(): def win_handler(ctrl_type): """Handle the Windows shutdown or close events.""" log.debug('windows handler ctrl_type: %s', ctrl_type) if ctrl_type == CTRL_CLOSE_EVENT or ctrl_type == CTRL_SHUTDOWN_EVENT: self._shutdown() return 1 SetConsoleCtrlHandler(win_handler) # Start the core as a thread and join it until it's done self.core = Core(listen_interface=listen_interface, read_only_config_keys=read_only_config_keys) if port is None: port = self.core.config['daemon_port'] self.port = port if interface and not is_ip(interface): log.error('Invalid UI interface (must be IP Address): %s', interface) interface = None self.rpcserver = RPCServer( port=port, allow_remote=self.core.config['allow_remote'], listen=not standalone, interface=interface) log.debug('Listening to UI on: %s:%s and bittorrent on: %s', interface, port, listen_interface)
def legacy_delete_resume_data(self, torrent_id): """Deletes the .fastresume file""" path = os.path.join(get_config_dir(), "state", torrent_id + ".fastresume") log.debug("Deleting fastresume file: %s", path) try: os.remove(path) except Exception, e: log.warning("Unable to delete the fastresume file: %s", e)
def start_ssl(self): check_ssl_keys() log.debug('Enabling SSL with PKey: %s, Cert: %s', self.pkey, self.cert) with open(configmanager.get_config_dir(self.cert)) as cert: certificate = Certificate.loadPEM(cert.read()).original with open(configmanager.get_config_dir(self.pkey)) as pkey: private_key = KeyPair.load(pkey.read(), FILETYPE_PEM).original options = CertificateOptions(privateKey=private_key, certificate=certificate, method=SSL.SSLv23_METHOD) ctx = options.getContext() ctx.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) ctx.use_certificate_chain_file(configmanager.get_config_dir(self.cert)) self.socket = reactor.listenSSL(self.port, self.site, options, interface=self.interface) ip = self.socket.getHost().host ip = '[%s]' % ip if is_ipv6(ip) else ip log.info('Serving at https://%s:%s%s', ip, self.port, self.base)
def migrate_hostlist(old_filename, new_filename): """Check for old hostlist filename and save details to new filename""" old_hostlist = get_config_dir(old_filename) if os.path.isfile(old_hostlist): config_v2 = Config(old_filename, config_dir=get_config_dir()) config_v2.save(get_config_dir(new_filename)) del config_v2 try: os.rename(old_hostlist, old_hostlist + '.old') except OSError as ex: log.exception(ex) try: os.remove(old_hostlist + '.bak') except OSError: pass
def delete_torrentfile(self): """Deletes the .torrent file in the state""" path = "%s/%s.torrent" % (os.path.join(get_config_dir(), "state"), self.torrent_id) log.debug("Deleting torrent file: %s", path) try: os.remove(path) except Exception, e: log.warning("Unable to delete the torrent file: %s", e)
def __init__(self): migrate_hostlist('hostlist.conf.1.2', 'hostlist.conf') self.config = Config('hostlist.conf', default_hostlist(), config_dir=get_config_dir(), file_version=3) self.config.run_converter((1, 2), 3, migrate_config_2_to_3) self.config.save()
def save_state(self): """Save the state of the TorrentManager to the torrents.state file""" state = TorrentManagerState() # Create the state for each Torrent and append to the list for torrent in self.torrents.values(): paused = False if torrent.state == "Paused": paused = True torrent_status = torrent.get_status([ "total_uploaded", "last_seen_complete" ], update=True ) torrent_state = TorrentState( torrent.torrent_id, torrent.filename, torrent_status["total_uploaded"], torrent.trackers, torrent.options["compact_allocation"], paused, torrent.options["download_location"], torrent.options["max_connections"], torrent.options["max_upload_slots"], torrent.options["max_upload_speed"], torrent.options["max_download_speed"], torrent.options["prioritize_first_last_pieces"], torrent.options["sequential_download"], torrent.options["file_priorities"], torrent.get_queue_position(), torrent.options["auto_managed"], torrent.is_finished, torrent.options["stop_ratio"], torrent.options["stop_at_ratio"], torrent.options["remove_at_ratio"], torrent.options["move_completed"], torrent.options["move_completed_path"], torrent.magnet, torrent.time_added, torrent_status["last_seen_complete"], torrent.owner, torrent.options["shared"] ) state.torrents.append(torrent_state) # Pickle the TorrentManagerState object try: log.debug("Saving torrent state file.") state_file = open(os.path.join(get_config_dir(), "state", "torrents.state.new"), "wb") cPickle.dump(state, state_file) state_file.flush() os.fsync(state_file.fileno()) state_file.close() except IOError, e: log.warning("Unable to save state file: %s", e) return True
def legacy_get_resume_data_from_file(self, torrent_id): """Returns an entry with the resume data or None""" fastresume = "" try: _file = open(os.path.join(get_config_dir(), "state", torrent_id + ".fastresume"), "rb") fastresume = _file.read() _file.close() except IOError, e: log.debug("Unable to load .fastresume: %s", e)
def on_button_startdaemon_clicked(self, widget): log.debug('on_button_startdaemon_clicked') if not self.liststore.iter_n_children(None): # There is nothing in the list, so lets create a localhost entry try: self.hostlist.add_default_host() except ValueError as ex: log.error('Error adding default host: %s', ex) else: self.start_daemon(DEFAULT_PORT, get_config_dir()) finally: return paths = self.treeview.get_selection().get_selected_rows()[1] if len(paths): __, host, port, user, password, status, __ = self.liststore[ paths[0]] else: return if host not in LOCALHOST: return def on_daemon_status_change(result): """Daemon start/stop callback""" reactor.callLater(0.7, self._update_host_status) if status in ('Online', 'Connected'): # Button will stop the daemon if status is online or connected. def on_connect(d, c): """Client callback to call daemon shutdown""" c.daemon.shutdown().addCallback(on_daemon_status_change) if client.connected() and (host, port, user) == client.connection_info(): client.daemon.shutdown().addCallback(on_daemon_status_change) elif user and password: c = Client() c.connect(host, port, user, password).addCallback(on_connect, c) else: # Otherwise button will start the daemon. self.start_daemon(port, get_config_dir())
def load_resume_data_file(self): resume_data = {} try: log.debug("Opening torrents fastresume file for load.") fastresume_file = open(os.path.join(get_config_dir(), "state", "torrents.fastresume"), "rb") resume_data = lt.bdecode(fastresume_file.read()) fastresume_file.close() except (EOFError, IOError, Exception), e: log.warning("Unable to load fastresume file: %s", e)
def save_state(self): """Save the state of the TorrentManager to the torrents.state file""" state = TorrentManagerState() # Create the state for each Torrent and append to the list for torrent in self.torrents.values(): if self.session.is_paused(): paused = torrent.handle.is_paused() elif torrent.forced_error: paused = torrent.forced_error.was_paused elif torrent.state == "Paused": paused = True else: paused = False torrent_state = TorrentState( torrent.torrent_id, torrent.filename, torrent.get_status(["total_uploaded"])["total_uploaded"], torrent.trackers, torrent.options["compact_allocation"], paused, torrent.options["download_location"], torrent.options["max_connections"], torrent.options["max_upload_slots"], torrent.options["max_upload_speed"], torrent.options["max_download_speed"], torrent.options["prioritize_first_last_pieces"], torrent.options["file_priorities"], torrent.get_queue_position(), torrent.options["auto_managed"], torrent.is_finished, torrent.options["stop_ratio"], torrent.options["stop_at_ratio"], torrent.options["remove_at_ratio"], torrent.options["move_completed"], torrent.options["move_completed_path"], torrent.magnet, torrent.time_added, ) state.torrents.append(torrent_state) # Pickle the TorrentManagerState object filepath = os.path.join(get_config_dir(), "state", "torrents.state") filepath_tmp = filepath + ".tmp" filepath_bak = filepath + ".bak" try: os.remove(filepath_bak) except OSError: pass try: log.debug("Creating backup of state at: %s", filepath_bak) os.rename(filepath, filepath_bak) except OSError, ex: log.error("Unable to backup %s to %s: %s", filepath, filepath_bak, ex)
def load_resume_data_file(self): resume_data = {} try: log.debug("Opening torrents fastresume file for load.") fastresume_file = open( os.path.join(get_config_dir(), "state", "torrents.fastresume"), "rb") resume_data = lt.bdecode(fastresume_file.read()) fastresume_file.close() except (EOFError, IOError, Exception), e: log.warning("Unable to load fastresume file: %s", e)
def load_state(self): """Load the state of the TorrentManager from the torrents.state file""" state = TorrentManagerState() try: log.debug("Opening torrent state file for load.") state_file = open( os.path.join(get_config_dir(), "state", "torrents.state"), "rb") state = cPickle.load(state_file) state_file.close() except (EOFError, IOError, Exception, cPickle.UnpicklingError), e: log.warning("Unable to load state file: %s", e)
def load_torrent(self, torrent_id): """Load a torrent file from state and return it's torrent info""" filedump = None # Get the torrent data from the torrent file try: log.debug("Attempting to open %s for add.", torrent_id) _file = open(os.path.join(get_config_dir(), "state", torrent_id + ".torrent"), "rb") filedump = lt.bdecode(_file.read()) _file.close() except (IOError, RuntimeError), e: log.warning("Unable to open %s: %s", torrent_id, e) return False
def update(self): auth_file = configmanager.get_config_dir('auth') # Check for auth file and create if necessary if not os.path.isfile(auth_file): log.info('Authfile not found, recreating it.') self.__load_auth_file() return auth_file_modification_time = os.stat(auth_file).st_mtime if self.__auth_modification_time != auth_file_modification_time: log.info('Auth file changed, reloading it!') self.__load_auth_file()
def on_button_startdaemon_clicked(self, widget): log.debug('on_button_startdaemon_clicked') if not self.liststore.iter_n_children(None): # There is nothing in the list, so lets create a localhost entry try: self.hostlist.add_default_host() except ValueError as ex: log.error('Error adding default host: %s', ex) else: self.start_daemon(DEFAULT_PORT, get_config_dir()) finally: return paths = self.treeview.get_selection().get_selected_rows()[1] if len(paths): __, host, port, user, password, status, __ = self.liststore[paths[0]] else: return if host not in LOCALHOST: return def on_daemon_status_change(result): """Daemon start/stop callback""" reactor.callLater(0.7, self._update_host_status) if status in ('Online', 'Connected'): # Button will stop the daemon if status is online or connected. def on_connect(d, c): """Client callback to call daemon shutdown""" c.daemon.shutdown().addCallback(on_daemon_status_change) if client.connected() and (host, port, user) == client.connection_info(): client.daemon.shutdown().addCallback(on_daemon_status_change) elif user and password: c = Client() c.connect(host, port, user, password).addCallback(on_connect, c) else: # Otherwise button will start the daemon. self.start_daemon(port, get_config_dir())
def archive_files(arc_name, filepaths, message=None, rotate=10): """Compress a list of filepaths into timestamped tarball in config dir. The archiving config directory is 'archive'. Args: arc_name (str): The archive output filename (appended with timestamp). filepaths (list): A list of the files to be archived into tarball. Returns: str: The full archive filepath. """ from deluge.configmanager import get_config_dir # Set archive compression to lzma with bz2 fallback. arc_comp = 'xz' if not PY2 else 'bz2' archive_dir = os.path.join(get_config_dir(), 'archive') timestamp = datetime.now().replace(microsecond=0).isoformat().replace( ':', '-') arc_filepath = os.path.join( archive_dir, arc_name + '-' + timestamp + '.tar.' + arc_comp) if not os.path.exists(archive_dir): os.makedirs(archive_dir) else: all_arcs = glob.glob(os.path.join(archive_dir, arc_name) + '*') if len(all_arcs) >= rotate: log.warning( 'Too many existing archives for %s. Deleting oldest archive.', arc_name) os.remove(sorted(all_arcs)[0]) try: with tarfile.open(arc_filepath, 'w:' + arc_comp) as tar: for filepath in filepaths: if not os.path.isfile(filepath): continue tar.add(filepath, arcname=os.path.basename(filepath)) if message: with closing(BytesIO(message.encode('utf8'))) as fobj: tarinfo = tarfile.TarInfo('archive_message.txt') tarinfo.size = len(fobj.getvalue()) tarinfo.mtime = time.time() tar.addfile(tarinfo, fileobj=fobj) except OSError: log.error('Problem occurred archiving filepaths: %s', filepaths) return False else: return arc_filepath
def __init__(self, listen_interface=None, interface=None, port=None, standalone=False, read_only_config_keys=None): """ Args: listen_interface (str, optional): The IP address to listen to bittorrent connections on. interface (str, optional): The IP address the daemon will listen for UI connections on. port (int, optional): The port the daemon will listen for UI connections on. standalone (bool, optional): If True the client is in Standalone mode otherwise, if False, start the daemon as separate process. read_only_config_keys (list of str, optional): A list of config keys that will not be altered by core.set_config() RPC method. """ self.standalone = standalone self.pid_file = get_config_dir('deluged.pid') log.info('Deluge daemon %s', get_version()) if is_daemon_running(self.pid_file): raise DaemonRunningError('Deluge daemon already running with this config directory!') # Twisted catches signals to terminate, so just have it call the shutdown method. reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) # Catch some Windows specific signals if windows_check(): def win_handler(ctrl_type): """Handle the Windows shutdown or close events.""" log.debug('windows handler ctrl_type: %s', ctrl_type) if ctrl_type == CTRL_CLOSE_EVENT or ctrl_type == CTRL_SHUTDOWN_EVENT: self._shutdown() return 1 SetConsoleCtrlHandler(win_handler) # Start the core as a thread and join it until it's done self.core = Core(listen_interface=listen_interface, read_only_config_keys=read_only_config_keys) if port is None: port = self.core.config['daemon_port'] self.port = port if interface and not is_ip(interface): log.error('Invalid UI interface (must be IP Address): %s', interface) interface = None self.rpcserver = RPCServer( port=port, allow_remote=self.core.config['allow_remote'], listen=not standalone, interface=interface ) log.debug('Listening to UI on: %s:%s and bittorrent on: %s', interface, port, listen_interface)
def write_torrentfile(self): """Writes the torrent file""" path = "%s/%s.torrent" % (os.path.join(get_config_dir(), "state"), self.torrent_id) log.debug("Writing torrent file: %s", path) try: self.torrent_info = self.handle.get_torrent_info() # Regenerate the file priorities self.set_file_priorities([]) md = lt.bdecode(self.torrent_info.metadata()) torrent_file = {} torrent_file["info"] = md open(path, "wb").write(lt.bencode(torrent_file)) except Exception, e: log.warning("Unable to save torrent file: %s", e)
def __init__(self): component.Component.__init__(self, "TrackerIcons") self.image_dir = os.path.join(configmanager.get_config_dir(), "icons") if not os.path.exists(self.image_dir): os.mkdir(self.image_dir) self.images = {} for filename in os.listdir(self.image_dir): host, ext = os.path.splitext(filename) if ext in ICON_EXTENSIONS: self.images[host] = os.path.join(self.image_dir, filename) self._waiting = {}
def load_resume_data_file(self): filepath = os.path.join(get_config_dir(), "state", "torrents.fastresume") log.debug("Opening torrents fastresume file for load.") for _filepath in (filepath, filepath + ".bak"): try: fastresume_file = open(_filepath, "rb") resume_data = lt.bdecode(fastresume_file.read()) fastresume_file.close() except (EOFError, IOError, Exception), e: log.warning("Unable to load fastresume file: %s", e) resume_data = None else: log.info("Successfully loaded fastresume file: %s", _filepath) break
def load_state(self): """Load the state of the TorrentManager from the torrents.state file""" filepath = os.path.join(get_config_dir(), "state", "torrents.state") log.debug("Opening torrent state file for load.") for _filepath in (filepath, filepath + ".bak"): try: state_file = open(_filepath, "rb") state = cPickle.load(state_file) state_file.close() except (EOFError, IOError, Exception, cPickle.UnpicklingError), e: log.warning("Unable to load state file: %s", e) state = TorrentManagerState() else: log.info("Successfully loaded state file: %s", _filepath) break
def save_resume_data_file(self): """ Saves the resume data file with the contents of self.resume_data. """ path = os.path.join(get_config_dir(), "state", "torrents.fastresume") try: log.debug("Saving fastresume file: %s", path) fastresume_file = open(path, "wb") fastresume_file.write(lt.bencode(self.resume_data)) fastresume_file.flush() os.fsync(fastresume_file.fileno()) fastresume_file.close() except IOError: log.warning("Error trying to save fastresume file")
def upload_plugin(self, filename, filedump): """This method is used to upload new plugins to the daemon. It is used when connecting to the daemon remotely and installing a new plugin on the client side. 'plugin_data' is a xmlrpc.Binary object of the file data, ie, plugin_file.read()""" try: filedump = base64.decodestring(filedump) except Exception as ex: log.error('There was an error decoding the filedump string!') log.exception(ex) return with open(os.path.join(get_config_dir(), 'plugins', filename), 'wb') as _file: _file.write(filedump) component.get('CorePluginManager').scan_for_plugins()