def set_custom(self, key, value=None): """ Set a custom value. C{key} might have the form "key=value" when value is C{None}. """ # Split combined key/value if value is None: try: key, value = key.split('=', 1) except (ValueError, TypeError) as exc: raise error.UserError( "Bad custom field assignment %r, probably missing a '=' (%s)" % (key, exc)) # Check identifier rules if not key: raise error.UserError("Custom field name cannot be empty!") elif len(key) == 1 and key in "12345": method, args = "custom" + key + ".set", (value, ) elif not (key[0].isalpha() and key.replace("_", "").isalnum()): raise error.UserError( "Bad custom field name %r (must only contain a-z, A-Z, 0-9 and _)" % (key, )) else: method, args = "custom.set", (key, value) # Make the assignment self._make_it_so("setting custom_%s = %r on" % (key, value), [method], *args) self._fields["custom_" + key] = value
def validate_field_list(fields, allow_fmt_specs=False, name_filter=None): """ Make sure the fields in the given list exist. @param fields: List of fields (comma-/space-separated if a string). @type fields: list or str @return: validated field names. @rtype: list """ formats = [i[4:] for i in globals() if i.startswith("fmt_")] try: fields = [i.strip() for i in fields.replace(',', ' ').split()] except AttributeError: # Not a string, expecting an iterable pass if name_filter: fields = [name_filter(name) for name in fields] for name in fields: if allow_fmt_specs and '.' in name: fullname = name name, fmtspecs = name.split('.', 1) for fmtspec in fmtspecs.split('.'): if fmtspec not in formats and fmtspec != "raw": raise error.UserError( "Unknown format specification %r in %r" % (fmtspec, fullname)) if name not in engine.FieldDefinition.FIELDS and not engine.TorrentProxy.add_manifold_attribute( name): raise error.UserError("Unknown field name %r" % (name, )) return fields
def load_config(self, namespace=None, rcfile=None): """ Load file given in "rcfile". """ if namespace is None: namespace = config if namespace.scgi_url: return # already have the connection to rTorrent # Get and check config file name if not rcfile: rcfile = getattr(config, "rtorrent_rc", None) if not rcfile: raise error.UserError( "No 'rtorrent_rc' path defined in configuration!") if not os.path.isfile(rcfile): raise error.UserError("Config file %r doesn't exist!" % (rcfile, )) # Parse the file self.LOG.debug("Loading rtorrent config from %r" % (rcfile, )) rc_vals = Bunch(scgi_local='', scgi_port='') with open(rcfile) as handle: continued = False for line in handle.readlines(): # Skip comments, continuations, and empty lines line = line.strip() continued, was_continued = line.endswith('\\'), continued if not line or was_continued or line.startswith("#"): continue # Be lenient about errors, after all it's not our own config file try: key, val = line.split("=", 1) except ValueError: self.LOG.warning("Ignored invalid line %r in %r!" % (line, rcfile)) continue key, val = key.strip(), val.strip() key = self.RTORRENT_RC_ALIASES.get(key, key).replace('.', '_') # Copy values we're interested in if key in self.RTORRENT_RC_KEYS: self.LOG.debug("rtorrent.rc: %s = %s" % (key, val)) rc_vals[key] = val # Validate fields if rc_vals.scgi_local: rc_vals.scgi_local = os.path.expanduser(rc_vals.scgi_local) if rc_vals.scgi_local.startswith('/'): rc_vals.scgi_local = "scgi://" + rc_vals.scgi_local if rc_vals.scgi_port and not rc_vals.scgi_port.startswith("scgi://"): rc_vals.scgi_port = "scgi://" + rc_vals.scgi_port # Prefer UNIX domain sockets over TCP sockets namespace.scgi_url = rc_vals.scgi_local or rc_vals.scgi_port
def open(self): """ Open connection. """ # Only connect once if self._rpc is not None: return self._rpc # Get connection URL from rtorrent.rc self.load_config() # Reading abilities are on the downfall, so... if not config.scgi_url: raise error.UserError( "You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") # Connect and get instance ID (also ensures we're connectable) self._rpc = xmlrpc.RTorrentProxy(config.scgi_url) self.versions, self.version_info = self._rpc._set_mappings() self.engine_id = self._rpc.session.name() time_usec = self._rpc.system.time_usec() # Make sure xmlrpc-c works as expected if time_usec < 2**32: self.LOG.warn( "Your xmlrpc-c is broken (64 bit integer support missing," " %r returned instead)" % (type(time_usec), )) # Get other manifest values self.engine_software = "rTorrent %s/%s" % self.versions if "+ssh:" in config.scgi_url: self.startup = int(self._rpc.system.startup_time() or time.time()) else: self._session_dir = self._rpc.session.path() if not self._session_dir: raise error.UserError( "You need a session directory, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") if not os.path.exists(self._session_dir): raise error.UserError("Non-existing session directory %r" % self._session_dir) self._download_dir = os.path.expanduser( self._rpc.directory.default()) if not os.path.exists(self._download_dir): raise error.UserError("Non-existing download directory %r" % self._download_dir) self.startup = os.path.getmtime( os.path.join(self._session_dir, "rtorrent.lock")) # Return connection self.LOG.debug(repr(self)) return self._rpc
def __getitem__(self, key): """ Return object attribute named C{key}. Additional formatting is provided by adding modifiers like ".sz" (byte size formatting) to the normal field name. If the wrapped object is None, the upper-case C{key} (without any modifiers) is returned instead, to allow the formatting of a header line. """ # Check for formatter specifications formatter = None have_raw = False if '.' in key: key, formats = key.split('.', 1) formats = formats.split('.') have_raw = formats[0] == "raw" if have_raw: formats = formats[1:] for fmtname in formats: try: fmtfunc = globals()["fmt_" + fmtname] except KeyError: raise error.UserError("Unknown formatting spec %r for %r" % (fmtname, key)) else: formatter = (lambda val, f=fmtfunc, k=formatter: f(k(val)) ) if formatter else fmtfunc # Check for a field formatter try: field = engine.FieldDefinition.FIELDS[key] except KeyError: if key not in self.defaults and not engine.TorrentProxy.add_manifold_attribute( key): raise error.UserError("Unknown field %r" % (key, )) else: if field._formatter and not have_raw: formatter = (lambda val, f=formatter: f(field._formatter(val)) ) if formatter else field._formatter if self.obj is None: # Return column name return '%' if key == "pc" else key.upper() else: # Return formatted value val = super(OutputMapping, self).__getitem__(key) try: return formatter(val) if formatter else val except (TypeError, ValueError, KeyError, IndexError, AttributeError) as exc: raise error.LoggableError("While formatting %s=%r: %s" % (key, val, exc))
def __init__(self, config=None): self.config = config or {} self.LOG = pymagic.get_class_logger(self) if 'log_level' in self.config: self.LOG.setLevel(config.log_level) self.LOG.debug("Tree watcher created with config %r" % self.config) self.manager = None self.handler = None self.notifier = None bool_param = lambda key, default: matching.truth( self.config.get(key, default), "job.%s.%s" % (self.config.job_name, key)) if not self.config.path: raise error.UserError( "You need to set 'job.%s.path' in the configuration!" % self.config.job_name) self.config.quiet = bool_param("quiet", False) self.config.queued = bool_param("queued", False) self.config.trace_inotify = bool_param("trace_inotify", False) self.config.path = set([ os.path.abspath(os.path.expanduser(path.strip()).rstrip(os.sep)) for path in self.config.path.split(os.pathsep) ]) for path in self.config.path: if not os.path.isdir(path): raise error.UserError("Path '%s' is not a directory!" % path) # Assemble custom commands self.custom_cmds = {} for key, val in self.config.items(): if key.startswith("cmd."): _, key = key.split('.', 1) if key in self.custom_cmds: raise error.UserError( "Duplicate custom command definition '%s'" " (%r already registered, you also added %r)!" % (key, self.custom_cmds[key], val)) self.custom_cmds[key] = formatting.preparse(val) self.LOG.debug("custom commands = %r" % self.custom_cmds) # Get client proxy self.proxy = xmlrpc.RTorrentProxy(configuration.scgi_url) self.proxy._set_mappings() # pylint: disable=W0212 if self.config.active: self.setup()
def set_throttle(self, name): """ Assign to throttle group. """ if name.lower() == "null": name = "NULL" if name.lower() == "none": name = '' if name not in self._engine.known_throttle_names: if self._engine._rpc.throttle.up.max(xmlrpc.NOHASH, name) == -1: if self._engine._rpc.throttle.down.max(xmlrpc.NOHASH, name) == -1: raise error.UserError( "Unknown throttle name '{}'".format(name)) self._engine.known_throttle_names.add(name) if (name or "NONE") == self.throttle: self._engine.LOG.debug("Keeping throttle %r on torrent #%s" % (self.throttle, self._fields["hash"])) return active = self.is_active if active: self._engine.LOG.debug("Torrent #%s stopped for throttling" % (self._fields["hash"], )) self.stop() self._make_it_so("setting throttle %r on" % (name, ), ["throttle_name.set"], name) if active: self._engine.LOG.debug("Torrent #%s restarted after throttling" % (self._fields["hash"], )) self.start()
def execute(self, commands): """ Execute XMLRPC command(s). """ try: commands = [i.strip() for i in commands.split(' ; ')] except (TypeError, AttributeError): pass # assume an iterable for command in commands: try: method, args = command.split('=', 1) args = tuple(CommaLexer(args)) except (ValueError, TypeError), exc: raise error.UserError( "Bad command %r, probably missing a '=' (%s)" % (command, exc)) def print_result(data): args_list = '' if args: args_list = '"' + '","'.join(args) + '"' namespace = '' if method[:2].endswith('.') else 'd.' print( '%s\t%s\t%s%s=%s' % (self._fields["hash"], data, namespace, method, args_list)) observer = print_result if method.startswith('>') else None method = method.lstrip('>') self._make_it_so("executing command on", [method], *args, observer=observer)
def load(self, optional_cfg_files=None): """ Actually load the configuation from either the default location or the given directory. """ optional_cfg_files = optional_cfg_files or [] # Guard against coding errors if self._loaded: raise RuntimeError( "INTERNAL ERROR: Attempt to load configuration twice!") try: # Load configuration namespace = {} self._set_defaults(namespace, optional_cfg_files) self._load_ini(namespace, os.path.join(self.config_dir, self.CONFIG_INI)) for cfg_file in optional_cfg_files: if not os.path.isabs(cfg_file): cfg_file = os.path.join(self.config_dir, cfg_file) if os.path.exists(cfg_file): self._load_ini(namespace, cfg_file) self._validate_namespace(namespace) self._load_py(namespace, namespace["config_script"]) self._validate_namespace(namespace) for callback in namespace["config_validator_callbacks"]: callback() except ConfigParser.ParsingError, exc: raise error.UserError(exc)
def add_manifold_attribute(cls, name): """ Register a manifold engine attribute. @return: field definition object, or None if "name" isn't a manifold attribute. """ if name.startswith("custom_"): try: return FieldDefinition.FIELDS[name] except KeyError: field = OnDemandField(str, name, "custom attribute %r" % name.split('_', 1)[1], matcher=matching.PatternFilter) setattr(cls, name, field) # add field to all proxy objects return field elif name.startswith("kind_") and name[5:].isdigit(): try: return FieldDefinition.FIELDS[name] except KeyError: limit = int(name[5:].lstrip('0') or '0', 10) if limit > 100: raise error.UserError("kind_N: N > 100 in %r" % name) field = OnDemandField( set, name, "kinds of files that make up more than %d%% of this item's size" % limit, matcher=matching.TaggedAsFilter, formatter=_fmt_tags, engine_name="kind_%d" % limit) setattr(cls, name, field) return field
def assign_fields(meta, assignments): """ Takes a list of C{key=value} strings and assigns them to the given metafile. If you want to set nested keys (e.g. "info.source"), you have to use a dot as a separator. For exotic keys *containing* a dot, double that dot ("dotted..key"). Numeric values starting with "+" or "-" are converted to integers. If just a key name is given (no '='), the field is removed. """ for assignment in assignments: try: if '=' in assignment: field, val = assignment.split('=', 1) else: field, val = assignment, None if val and val[0] in "+-" and val[1:].isdigit(): val = int(val, 10) # TODO: Allow numerical indices, and "+" for append namespace = meta keypath = [i.replace('\0', '.') for i in field.replace('..', '\0').split('.')] for key in keypath[:-1]: # Create missing dicts as we go... namespace = namespace.setdefault(key, {}) except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Bad assignment %r (%s)!" % (assignment, exc)) else: if val is None: del namespace[keypath[-1]] else: namespace[keypath[-1]] = val return meta
def set_throttle(self, name): """ Assign to throttle group. """ if name.lower() == "null": name = "NULL" if name.lower() == "none": name = "" if (name or "NONE") not in config.throttle_names: raise error.UserError("Unknown throttle name %r" % (name or "NONE", )) if (name or "NONE") == self.throttle: self._engine.LOG.debug("Keeping throttle %r on torrent #%s" % (self.throttle, self._fields["hash"])) return active = self.is_active if active: self._engine.LOG.debug("Torrent #%s stopped for throttling" % (self._fields["hash"], )) self.stop() self._make_it_so("setting throttle %r on" % (name, ), ["set_throttle_name"], name) if active: self._engine.LOG.debug("Torrent #%s restarted after throttling" % (self._fields["hash"], )) self.start()
def set_custom(self, key, value=None): """ Set a custom value. C{key} might have the form "key=value" when value is C{None}. """ # Split combined key/value if value is None: try: key, value = key.split('=', 1) except (ValueError, TypeError), exc: raise error.UserError( "Bad custom field assignment %r, probably missing a '=' (%s)" % (key, exc))
def add_manifold_attribute(cls, name): """ Register a manifold engine attribute. @return: field definition object, or None if "name" isn't a manifold attribute. """ if name.startswith("custom_"): try: return FieldDefinition.FIELDS[name] except KeyError: field = OnDemandField(fmt.to_unicode, name, "custom attribute %r" % name.split('_', 1)[1], matcher=matching.PatternFilter) setattr(cls, name, field) # add field to all proxy objects return field elif name.startswith("kind_") and name[5:].isdigit(): try: return FieldDefinition.FIELDS[name] except KeyError: limit = int(name[5:].lstrip('0') or '0', 10) if limit > 100: raise error.UserError("kind_N: N > 100 in %r" % name) field = OnDemandField( set, name, "kinds of files that make up more than %d%% of this item's size" % limit, matcher=matching.TaggedAsFilter, formatter=_fmt_tags, engine_name="kind_%d" % limit) setattr(cls, name, field) return field elif name.startswith("d_"): try: return FieldDefinition.FIELDS[name] except KeyError: method = 'd.' + name[2:] # TODO check for valid method names, # and map dotted ones from their underscore version #if method not in methods: # raise error.UserError("{}: Unknown XMLRPC getter method".format(method)) field = OnDemandField( fmt.to_unicode, name, "Download item {} XMLRPC value".format(method), matcher=matching.PatternFilter, engine_name=method) setattr(cls, name, field) return field
def read_blob(arg): """Read a BLOB from given ``@arg``.""" result = None if arg == '@-': result = sys.stdin.read() elif any(arg.startswith('@{}://'.format(x)) for x in {'http', 'https', 'ftp', 'file'}): if not requests: raise error.UserError("You must 'pip install requests' to support @URL arguments.") try: response = requests.get(arg[1:]) response.raise_for_status() result = response.content except requests.RequestException as exc: raise error.UserError(str(exc)) else: try: with open(os.path.expanduser(arg[1:]), 'rb') as handle: result = handle.read() except IOError as exc: raise error.UserError('While reading @blob argument: {}'.format(exc)) return result
def preparse(output_format): """ Do any special processing of a template, and return the result. """ try: return templating.preparse( output_format, lambda path: os.path.join(config.config_dir, "templates", path)) except ImportError, exc: if "tempita" in str(exc): raise error.UserError( "To be able to use Tempita templates, install the 'tempita' package (%s)\n" " Possibly USING THE FOLLOWING COMMAND:\n" " %s/easy_install tempita" % (exc, os.path.dirname(sys.executable))) raise
def replace_fields(meta, patterns): """ Replace patterns in fields. """ for pattern in patterns: try: field, regex, subst, _ = pattern.split(pattern[-1]) # TODO: Allow numerical indices, and "+" for append namespace = meta keypath = [i.replace('\0', '.') for i in field.replace('..', '\0').split('.')] for key in keypath[:-1]: namespace = namespace[key] namespace[keypath[-1]] = re.sub(regex, subst, namespace[keypath[-1]]) except (KeyError, IndexError, TypeError, ValueError), exc: raise error.UserError("Bad substitution '%s' (%s)!" % (pattern, exc))
def mainloop(self): """ Manage category views. """ # Get client state proxy = config.engine.open() views = [ x for x in sorted(proxy.view.list()) if x.startswith(self.PREFIX) ] current_view = real_current_view = proxy.ui.current_view() if current_view not in views: if views: current_view = views[0] else: raise error.UserError( "There are no '{}*' views defined at all!".format( self.PREFIX)) # Check options if self.options.list: for name in sorted(views): print("{} {:5d} {}".format( '*' if name == real_current_view else ' ', proxy.view.size(xmlrpc.NOHASH, name), name[self.PREFIX_LEN:])) elif self.options.next or self.options.prev or self.options.update: # Determine next in line if self.options.update: new_view = current_view else: new_view = (views * 2)[views.index(current_view) + (1 if self.options.next else -1)] self.LOG.info("{} category view '{}'.".format( "Updating" if self.options.update else "Switching to", new_view)) # Update and switch to filtered view proxy.pyro.category.update(xmlrpc.NOHASH, new_view[self.PREFIX_LEN:]) proxy.ui.current_view.set(new_view) else: self.LOG.info("Current category view is '{}'.".format( current_view[self.PREFIX_LEN:])) self.LOG.info("Use '--help' to get usage information.")
def get_options(self): """ Get program options. """ super(ScriptBaseWithConfig, self).get_options() load_config.ConfigLoader( self.options.config_dir).load(self.OPTIONAL_CFG_FILES + self.options.config_file) if self.options.debug: config.debug = True for key_val in self.options.defines: try: key, val = key_val.split('=', 1) except ValueError, exc: raise error.UserError("Bad config override %r (%s)" % (key_val, exc)) else: setattr(config, key, load_config.validate(key, val))
def execute(self, commands): """ Execute XMLRPC command(s). """ try: commands = [i.strip() for i in commands.split(' ; ')] except (TypeError, AttributeError): pass # assume an iterable results = {} for command in commands: try: method, args = command.split('=', 1) args = tuple(CommaLexer(args)) except (ValueError, TypeError) as exc: raise error.UserError( "Bad command %r, probably missing a '=' (%s)" % (command, exc)) def print_result(data): "Helper to print XMLRPC call results" args_list = '' if args: args_list = '"' + '","'.join(args) + '"' print('%s\t%s\t%s=%s' % (self._fields["hash"], data, method.lstrip(':'), args_list)) def store_result(data): "Helper to collect XMLRPC call results" results[method.lstrip(':')] = data observer = None if method.startswith('>'): observer = print_result method = method.lstrip('>') elif method.startswith('!'): observer = store_result method = method.lstrip('!') if not (method.startswith(':') or method[:2].endswith('.')): method = 'd.' + method self._make_it_so("executing command on", [method], *args, observer=observer) return results
def get_options(self): """ Get program options. """ super(ScriptBaseWithConfig, self).get_options() self.config_dir = os.path.abspath(os.path.expanduser(self.options.config_dir or os.environ.get('PYRO_CONFIG_DIR', None) or self.CONFIG_DIR_DEFAULT)) load_config.ConfigLoader(self.config_dir).load(self.OPTIONAL_CFG_FILES + self.options.config_file) if self.options.debug: config.debug = True for key_val in self.options.defines: try: key, val = key_val.split('=', 1) except ValueError, exc: raise error.UserError("Bad config override %r (%s)" % (key_val, exc)) else: setattr(config, key, load_config.validate(key, val))
def setup(self): """ Set up inotify manager. See https://github.com/seb-m/pyinotify/. """ if not pyinotify.WatchManager: raise error.UserError("You need to install 'pyinotify' to use %s (%s)!" % ( self.__class__.__name__, pyinotify._import_error)) # pylint: disable=E1101, W0212 self.manager = pyinotify.WatchManager() self.handler = TreeWatchHandler(job=self) self.notifier = pyinotify.AsyncNotifier(self.manager, self.handler) if self.LOG.isEnabledFor(logging.DEBUG): mask = pyinotify.ALL_EVENTS else: mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO # bogus pylint: disable=E1101 # Add all configured base dirs for path in self.config.path: self.manager.add_watch(path.strip(), mask, rec=True, auto_add=True)
def _load_rules(self): """Load rule definitions from config.""" for ruleset in self.active_rulesets: section_name = 'sweep_rules_' + ruleset.lower() try: ruledefs = getattr(self.config, section_name) except AttributeError: raise error.UserError( "There is no [{}] section in your configuration".format( section_name.upper())) for ruledef, filtercond in ruledefs.items(): if ruledef.endswith('.filter'): rulename = ruledef.rsplit('.', 1)[0] rule = SweepRule( ruleset, rulename, int(ruledefs.get(rulename + '.prio', '999')), ruledefs.get(rulename + '.order', self.default_order), parse_cond(filtercond)) self.rules.append(rule) self.rules.sort(key=lambda x: (x.prio, x.name)) return self.rules
def mainloop(self): """ The main loop. """ if not self.args: self.parser.error("No metafiles given, nothing to do!") if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)): self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!") # Set filter criteria for metafiles filter_url_prefix = None if self.options.reannounce: # <scheme>://<netloc>/<path>?<query> filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False) filter_url_prefix = urlparse.urlunsplit(( filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103 )) self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix) if self.options.reannounce_all: self.options.reannounce = self.options.reannounce_all else: # When changing the announce URL w/o changing the domain, don't change the info hash! self.options.no_cross_seed = True # Resolve tracker alias, if URL doesn't look like an URL if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme: tracker_alias, idx = self.options.reannounce, "0" if '.' in tracker_alias: tracker_alias, idx = tracker_alias.split('.', 1) try: idx = int(idx, 10) _, tracker_url = config.lookup_announce_alias(tracker_alias) self.options.reannounce = tracker_url[idx] except (KeyError, IndexError, TypeError, ValueError), exc: raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % ( self.options.reannounce, exc))
def set_custom(self, key, value=None): """ Set a custom value. C{key} might have the form "key=value" when value is C{None}. """ # Split combined key/value if value is None: try: key, value = key.split('=', 1) except (ValueError, TypeError), exc: raise error.UserError( "Bad custom field assignment %r, probably missing a '=' (%s)" % (key, exc)) # Check identifier rules if not key: raise error.UserError("Custom field name cannot be empty!") elif len(key) == 1 and key in "12345": method, args = "set_custom" + key, (value, ) elif not (key[0].isalpha() and key.replace("_", "").isalnum()): raise error.UserError( "Bad custom field name %r (must only contain a-z, A-Z, 0-9 and _)" % (key, )) else: method, args = "set_custom", (key, value) # Make the assignment self._make_it_so("setting custom_%s = %r on" % (key, value), [method], *args) self._fields["custom_" + key] = value def hash_check(self):
def create(self, datapath, tracker_urls, comment=None, root_name=None, created_by=None, private=False, no_date=False, progress=None, callback=None, chunk_min=0, chunk_max=0): """ Create a metafile with the path given on object creation. Returns the last metafile dict that was written (as an object, not bencoded). """ if datapath: self.datapath = datapath try: tracker_urls = ['' + tracker_urls] except TypeError: tracker_urls = list(tracker_urls) multi_mode = len(tracker_urls) > 1 # TODO add optimization so the hashing happens only once for multiple URLs! for tracker_url in tracker_urls: # Lookup announce URLs from config file try: if urlparse.urlparse(tracker_url).scheme: tracker_alias = urlparse.urlparse( tracker_url).netloc.split(':')[0].split('.') tracker_alias = tracker_alias[-2 if len(tracker_alias ) > 1 else 0] else: tracker_alias, tracker_url = config.lookup_announce_alias( tracker_url) tracker_url = tracker_url[0] except (KeyError, IndexError): raise error.UserError("Bad tracker URL %r, or unknown alias!" % (tracker_url, )) # Determine metafile name output_name = self.filename if multi_mode: # Add 2nd level of announce URL domain to metafile name output_name = list(os.path.splitext(output_name)) try: output_name[1:1] = '-' + tracker_alias except (IndexError, ): self.LOG.error("Malformed announce URL %r, skipping!" % (tracker_url, )) continue output_name = ''.join(output_name) # Hash the data self.LOG.info("Creating %r for %s %r..." % ( output_name, "filenames read from" if self._fifo else "data in", self.datapath, )) meta, _ = self._make_meta(tracker_url, root_name, private, progress, chunk_min, chunk_max) # Add optional fields if comment: meta["comment"] = comment if created_by: meta["created by"] = created_by if not no_date: meta["creation date"] = int(time.time()) if callback: callback(meta) # Write metafile to disk self.LOG.debug("Writing %r..." % (output_name, )) bencode.bwrite(output_name, meta) return meta
self.LOG.info("Would call command(s) %r" % (cmds, )) else: for cmd in cmds: if self.options.call: logged_cmd = cmd[0] else: logged_cmd = '"%s"' % ('" "'.join(cmd), ) if self.options.verbose: self.LOG.info("Calling: %s" % (logged_cmd, )) try: if self.options.call: subprocess.check_call(cmd[0], shell=True) else: subprocess.check_call(cmd) except subprocess.CalledProcessError, exc: raise error.UserError("Command failed: %s" % (exc, )) except OSError, exc: raise error.UserError("Command failed (%s): %s" % ( logged_cmd, exc, )) # Dump as JSON array? elif self.options.json: self.json_dump(matches) # Show via template? elif self.options.output_template: output_template = self.options.output_template if not output_template.startswith("file:"): output_template = "file:" + output_template
def load_config(self, namespace=None, rtorrent_rc=None): """ Load file given in "rtorrent_rc". """ def cfgkey(key): "Sanitize rtorrent config keys" return key.replace('.', '_') if namespace is None: namespace = config # Only load when needed (also prevents multiple loading) if not all( getattr(namespace, key, False) for key in self.RTORRENT_RC_KEYS): # Get and check config file name if not rtorrent_rc: rtorrent_rc = getattr(config, "rtorrent_rc", None) if not rtorrent_rc: raise error.UserError( "No 'rtorrent_rc' path defined in configuration!") if not os.path.isfile(rtorrent_rc): raise error.UserError("Config file %r doesn't exist!" % (rtorrent_rc, )) # Parse the file self.LOG.debug("Loading rtorrent config from %r" % (rtorrent_rc, )) with closing(open(rtorrent_rc)) as handle: for line in handle.readlines(): # Skip comments and empty lines line = line.strip() if not line or line.startswith("#"): continue # Be lenient about errors, after all it's not our own config file try: key, val = line.split("=", 1) except ValueError: self.LOG.warning("Ignored invalid line %r in %r!" % (line, rtorrent_rc)) continue key, val = key.strip(), val.strip() key = self.RTORRENT_RC_ALIASES.get(key, key) # Copy values we're interested in if key in self.RTORRENT_RC_THROTTLE_KEYS: val = val.split(',')[0].strip() self.LOG.debug("rtorrent.rc: added throttle %r" % (val, )) namespace.throttle_names.add(val) elif key in self.RTORRENT_RC_KEYS and not getattr( namespace, cfgkey(key), None): self.LOG.debug("rtorrent.rc: %s = %s" % (key, val)) setattr(namespace, cfgkey(key), val) # Validate fields for key in self.RTORRENT_RC_KEYS: key = cfgkey(key) setattr(namespace, key, load_config.validate(key, getattr(namespace, key, None))) if config.scgi_local and config.scgi_local.startswith("/"): config.scgi_local = "scgi://" + config.scgi_local if config.scgi_port and not config.scgi_port.startswith("scgi://"): config.scgi_port = "scgi://" + config.scgi_port # Prefer UNIX domain sockets over TCP sockets config.scgi_url = config.scgi_local or config.scgi_port
break values.append(default) else: values.append(str(val)) else: print '\t'.join(values) elif self.options.create_import: conf_dirs = {} # Scan given directories for pattern in self.options.create_import: folder = os.path.expanduser(os.path.dirname(pattern)) if not os.path.isdir(folder): raise error.UserError( "Parent of --create-import is not a directory: {}". format(os.path.dirname(pattern))) # Read names of files to ignore ignore_file = os.path.join(folder, '.rcignore') rc_ignore = set(['.*', '*~']) if os.path.exists(ignore_file): with open(ignore_file) as handle: for line in handle: line = line.strip() if line and not line.startswith('#'): rc_ignore.add(line) folder = os.path.abspath(folder) files = glob.glob( os.path.join(folder, os.path.basename(pattern)))
def mainloop(self): """ The main loop. """ if not self.args: self.parser.error("No metafiles given, nothing to do!") if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)): self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!") # Set filter criteria for metafiles filter_url_prefix = None if self.options.reannounce: # <scheme>://<netloc>/<path>?<query> filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False) filter_url_prefix = urlparse.urlunsplit(( filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103 )) self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix) if self.options.reannounce_all: self.options.reannounce = self.options.reannounce_all else: # When changing the announce URL w/o changing the domain, don't change the info hash! self.options.no_cross_seed = True # Resolve tracker alias, if URL doesn't look like an URL if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme: tracker_alias, idx = self.options.reannounce, "0" if '.' in tracker_alias: tracker_alias, idx = tracker_alias.split('.', 1) try: idx = int(idx, 10) _, tracker_url = config.lookup_announce_alias(tracker_alias) self.options.reannounce = tracker_url[idx] except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % ( self.options.reannounce, exc)) # go through given files bad = 0 changed = 0 for filename in self.args: try: # Read and remember current content metainfo = bencode.bread(filename) old_metainfo = bencode.bencode(metainfo) except (EnvironmentError, KeyError, bencode.BencodeError) as exc: self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc)) bad += 1 else: # Check metafile integrity try: metafile.check_meta(metainfo) except ValueError as exc: self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,)) if not self.options.no_skip: continue # Skip any metafiles that don't meet the pre-conditions if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix): self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,)) continue # Keep resume info safe libtorrent_resume = {} if "libtorrent_resume" in metainfo: try: libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"] except KeyError: pass # nothing to remember libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"]) # Change private flag? if self.options.make_private and not metainfo["info"].get("private", 0): self.LOG.info("Setting private flag...") metainfo["info"]["private"] = 1 if self.options.make_public and metainfo["info"].get("private", 0): self.LOG.info("Clearing private flag...") del metainfo["info"]["private"] # Remove non-standard keys? if self.options.clean or self.options.clean_all or self.options.clean_xseed: metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info) # Restore resume info? if self.options.clean_xseed: if libtorrent_resume: self.LOG.info("Restoring key 'libtorrent_resume'...") metainfo.setdefault("libtorrent_resume", {}) metainfo["libtorrent_resume"].update(libtorrent_resume) else: self.LOG.warn("No resume information found!") # Clean rTorrent data? if self.options.clean_rtorrent: for key in self.RT_RESUMT_KEYS: if key in metainfo: self.LOG.info("Removing key %r..." % (key,)) del metainfo[key] # Change announce URL? if self.options.reannounce: metainfo['announce'] = self.options.reannounce if "announce-list" in metainfo: del metainfo["announce-list"] if not self.options.no_cross_seed: # Enforce unique hash per tracker metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest() if self.options.no_ssl: # We're assuming here the same (default) port is used metainfo['announce'] = (metainfo['announce'] .replace("https://", "http://").replace(":443/", ":80/")) # Change comment or creation date? if self.options.comment is not None: if self.options.comment: metainfo["comment"] = self.options.comment elif "comment" in metainfo: del metainfo["comment"] if self.options.bump_date: metainfo["creation date"] = int(time.time()) if self.options.no_date and "creation date" in metainfo: del metainfo["creation date"] # Add fast-resume data? if self.options.hashed: try: metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"])) except EnvironmentError as exc: self.fatal("Error making fast-resume data (%s)" % (exc,)) raise # Set specific keys? metafile.assign_fields(metainfo, self.options.set, self.options.debug) replace_fields(metainfo, self.options.regex) # Write new metafile, if changed new_metainfo = bencode.bencode(metainfo) if new_metainfo != old_metainfo: if self.options.output_directory: filename = os.path.join(self.options.output_directory, os.path.basename(filename)) self.LOG.info("Writing %r..." % filename) if not self.options.dry_run: bencode.bwrite(filename, metainfo) if "libtorrent_resume" in metainfo: # Also write clean version filename = filename.replace(".torrent", "-no-resume.torrent") del metainfo["libtorrent_resume"] self.LOG.info("Writing %r..." % filename) bencode.bwrite(filename, metainfo) else: self.LOG.info("Changing %r..." % filename) if not self.options.dry_run: # Write to temporary file tempname = os.path.join( os.path.dirname(filename), '.' + os.path.basename(filename), ) self.LOG.debug("Writing %r..." % tempname) bencode.bwrite(tempname, metainfo) # Replace existing file if os.name != "posix": # cannot rename to existing target on WIN32 os.remove(filename) try: os.rename(tempname, filename) except EnvironmentError as exc: # TODO: Try to write directly, keeping a backup! raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % ( tempname, filename, exc )) changed += 1 # Print summary if changed: self.LOG.info("%s %d metafile(s)." % ( "Would've changed" if self.options.dry_run else "Changed", changed )) if bad: self.LOG.warn("Skipped %d bad metafile(s)!" % (bad))