def _set_mappings(self): """ Set command mappings according to rTorrent version. """ try: self._versions = ( self.system.client_version(), self.system.library_version(), ) self._version_info = tuple( int(i) for i in self._versions[0].split('.')) self._use_deprecated = self._version_info < (0, 8, 7) # Merge mappings for this version self._mapping = self._mapping.copy() for key, val in sorted(i for i in vars(config).items() if i[0].startswith("xmlrpc_")): map_version = tuple(int(i) for i in key.split('_')[1:]) if map_version <= self._version_info: if config.debug: self.LOG.debug("MAPPING for %r added: %r" % (map_version, val)) self._mapping.update(val) self._fix_mappings() except ERRORS as exc: raise error.LoggableError("Can't connect to %s (%s)" % (self._url, exc)) return self._versions, self._version_info
def __getitem__(self, key): """ Return object attribute named C{key}. Additional formatting is provided by adding modifiers like ".sz" (byte size formatting) to the normal field name. If the wrapped object is None, the upper-case C{key} (without any modifiers) is returned instead, to allow the formatting of a header line. """ # Check for formatter specifications formatter = None have_raw = False if '.' in key: key, formats = key.split('.', 1) formats = formats.split('.') have_raw = formats[0] == "raw" if have_raw: formats = formats[1:] for fmtname in formats: try: fmtfunc = globals()["fmt_" + fmtname] except KeyError: raise error.UserError("Unknown formatting spec %r for %r" % (fmtname, key)) else: formatter = (lambda val, f=fmtfunc, k=formatter: f(k(val)) ) if formatter else fmtfunc # Check for a field formatter try: field = engine.FieldDefinition.FIELDS[key] except KeyError: if key not in self.defaults and not engine.TorrentProxy.add_manifold_attribute( key): raise error.UserError("Unknown field %r" % (key, )) else: if field._formatter and not have_raw: formatter = (lambda val, f=formatter: f(field._formatter(val)) ) if formatter else field._formatter if self.obj is None: # Return column name return '%' if key == "pc" else key.upper() else: # Return formatted value val = super(OutputMapping, self).__getitem__(key) try: return formatter(val) if formatter else val except (TypeError, ValueError, KeyError, IndexError, AttributeError) as exc: raise error.LoggableError("While formatting %s=%r: %s" % (key, val, exc))
def preparse(output_format): """ Do any special processing of a template, and return the result. """ try: return templating.preparse( output_format, lambda path: os.path.join(config.config_dir, "templates", path)) except ImportError as exc: if "tempita" in str(exc): raise error.UserError( "To be able to use Tempita templates, install the 'tempita' package (%s)\n" " Possibly USING THE FOLLOWING COMMAND:\n" " %s/easy_install tempita" % (exc, os.path.dirname(sys.executable))) raise except IOError as exc: raise error.LoggableError("Cannot read template: {}".format(exc))
def expand_template(template, namespace): """ Expand the given (preparsed) template. Currently, only Tempita templates are supported. @param template: The template, in preparsed form, or as a string (which then will be preparsed). @param namespace: Custom namespace that is added to the predefined defaults and takes precedence over those. @return: The expanded template. @raise LoggableError: In case of typical errors during template execution. """ # Create helper namespace formatters = dict((name.split('_', 1)[1], method) for name, method in globals().items() if name.startswith("fmt_") or name.startswith("filter_")) helpers = Bunch() helpers.update(formatters) # Default templating namespace variables = dict(h=helpers, c=config.custom_template_helpers) variables.update(formatters) # redundant, for backwards compatibility # Provided namespace takes precedence variables.update(namespace) # Expand template try: template = preparse(template) return template.substitute(**variables) except (AttributeError, ValueError, NameError, TypeError) as exc: hint = '' if "column" in str(exc): try: col = int(str(exc).split("column")[1].split()[0]) except (TypeError, ValueError): pass else: hint = "%svVv\n" % (' ' * (col + 4)) content = getattr(template, "content", template) raise error.LoggableError( "%s: %s in template:\n%s%s" % (type(exc).__name__, exc, hint, "\n".join( "%3d: %s" % (i + 1, line) for i, line in enumerate(content.splitlines()))))
def expand_template(template, namespace): """ Expand the given (preparsed) template. Currently, only Tempita templates are supported. @param template: The template, in preparsed form, or as a string (which then will be preparsed). @param namespace: Custom namespace that is added to the predefined defaults and takes precedence over those. @return: The expanded template. @raise LoggableError: In case of typical errors during template execution. """ # Combine provided namespace with defaults variables = {} # Add format specifiers (for headers, disable them) variables.update((name[4:], method) for name, method in globals().items() if name.startswith("fmt_")) # Provided namespace takes precedence variables.update(namespace) # Expand template try: template = preparse(template) return template.substitute(**variables) except (AttributeError, ValueError, NameError, TypeError), exc: hint = '' if "column" in str(exc): try: col = int(str(exc).split("column")[1].split()[0]) except (TypeError, ValueError): pass else: hint = "%svVv\n" % (' ' * (col + 4)) content = getattr(template, "content", template) raise error.LoggableError( "%s: %s in template:\n%s%s" % (type(exc).__name__, exc, hint, "\n".join( "%3d: %s" % (i + 1, line) for i, line in enumerate(content.splitlines()))))
'.' + os.path.basename(filename), ) self.LOG.debug("Writing %r..." % tempname) bencode.bwrite(tempname, metainfo) # Replace existing file if os.name != "posix": # cannot rename to existing target on WIN32 os.remove(filename) try: os.rename(tempname, filename) except EnvironmentError, exc: # TODO: Try to write directly, keeping a backup! raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % ( tempname, filename, exc )) changed += 1 # Print summary if changed: self.LOG.info("%s %d metafile(s)." % ( "Would've changed" if self.options.dry_run else "Changed", changed )) if bad: self.LOG.warn("Skipped %d bad metafile(s)!" % (bad)) def run(): #pragma: no cover """ The entry point.
def mainloop(self): """ The main loop. """ if not self.args: self.parser.error("No metafiles given, nothing to do!") if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)): self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!") # Set filter criteria for metafiles filter_url_prefix = None if self.options.reannounce: # <scheme>://<netloc>/<path>?<query> filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False) filter_url_prefix = urlparse.urlunsplit(( filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103 )) self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix) if self.options.reannounce_all: self.options.reannounce = self.options.reannounce_all else: # When changing the announce URL w/o changing the domain, don't change the info hash! self.options.no_cross_seed = True # Resolve tracker alias, if URL doesn't look like an URL if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme: tracker_alias, idx = self.options.reannounce, "0" if '.' in tracker_alias: tracker_alias, idx = tracker_alias.split('.', 1) try: idx = int(idx, 10) _, tracker_url = config.lookup_announce_alias(tracker_alias) self.options.reannounce = tracker_url[idx] except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % ( self.options.reannounce, exc)) # go through given files bad = 0 changed = 0 for filename in self.args: try: # Read and remember current content metainfo = bencode.bread(filename) old_metainfo = bencode.bencode(metainfo) except (EnvironmentError, KeyError, bencode.BencodeError) as exc: self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc)) bad += 1 else: # Check metafile integrity try: metafile.check_meta(metainfo) except ValueError as exc: self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,)) if not self.options.no_skip: continue # Skip any metafiles that don't meet the pre-conditions if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix): self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,)) continue # Keep resume info safe libtorrent_resume = {} if "libtorrent_resume" in metainfo: try: libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"] except KeyError: pass # nothing to remember libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"]) # Change private flag? if self.options.make_private and not metainfo["info"].get("private", 0): self.LOG.info("Setting private flag...") metainfo["info"]["private"] = 1 if self.options.make_public and metainfo["info"].get("private", 0): self.LOG.info("Clearing private flag...") del metainfo["info"]["private"] # Remove non-standard keys? if self.options.clean or self.options.clean_all or self.options.clean_xseed: metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info) # Restore resume info? if self.options.clean_xseed: if libtorrent_resume: self.LOG.info("Restoring key 'libtorrent_resume'...") metainfo.setdefault("libtorrent_resume", {}) metainfo["libtorrent_resume"].update(libtorrent_resume) else: self.LOG.warn("No resume information found!") # Clean rTorrent data? if self.options.clean_rtorrent: for key in self.RT_RESUMT_KEYS: if key in metainfo: self.LOG.info("Removing key %r..." % (key,)) del metainfo[key] # Change announce URL? if self.options.reannounce: metainfo['announce'] = self.options.reannounce if "announce-list" in metainfo: del metainfo["announce-list"] if not self.options.no_cross_seed: # Enforce unique hash per tracker metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest() if self.options.no_ssl: # We're assuming here the same (default) port is used metainfo['announce'] = (metainfo['announce'] .replace("https://", "http://").replace(":443/", ":80/")) # Change comment or creation date? if self.options.comment is not None: if self.options.comment: metainfo["comment"] = self.options.comment elif "comment" in metainfo: del metainfo["comment"] if self.options.bump_date: metainfo["creation date"] = int(time.time()) if self.options.no_date and "creation date" in metainfo: del metainfo["creation date"] # Add fast-resume data? if self.options.hashed: try: metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"])) except EnvironmentError as exc: self.fatal("Error making fast-resume data (%s)" % (exc,)) raise # Set specific keys? metafile.assign_fields(metainfo, self.options.set, self.options.debug) replace_fields(metainfo, self.options.regex) # Write new metafile, if changed new_metainfo = bencode.bencode(metainfo) if new_metainfo != old_metainfo: if self.options.output_directory: filename = os.path.join(self.options.output_directory, os.path.basename(filename)) self.LOG.info("Writing %r..." % filename) if not self.options.dry_run: bencode.bwrite(filename, metainfo) if "libtorrent_resume" in metainfo: # Also write clean version filename = filename.replace(".torrent", "-no-resume.torrent") del metainfo["libtorrent_resume"] self.LOG.info("Writing %r..." % filename) bencode.bwrite(filename, metainfo) else: self.LOG.info("Changing %r..." % filename) if not self.options.dry_run: # Write to temporary file tempname = os.path.join( os.path.dirname(filename), '.' + os.path.basename(filename), ) self.LOG.debug("Writing %r..." % tempname) bencode.bwrite(tempname, metainfo) # Replace existing file if os.name != "posix": # cannot rename to existing target on WIN32 os.remove(filename) try: os.rename(tempname, filename) except EnvironmentError as exc: # TODO: Try to write directly, keeping a backup! raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % ( tempname, filename, exc )) changed += 1 # Print summary if changed: self.LOG.info("%s %d metafile(s)." % ( "Would've changed" if self.options.dry_run else "Changed", changed )) if bad: self.LOG.warn("Skipped %d bad metafile(s)!" % (bad))
def __call__(self, *args, **kwargs): """ Execute the method call. `raw_xml=True` returns the unparsed XML-RPC response. `flatten=True` removes one nesting level in a result list (useful for multicalls). """ self._proxy._requests += 1 start = time.time() raw_xml = kwargs.get("raw_xml", False) flatten = kwargs.get("flatten", False) fail_silently = kwargs.get("fail_silently", False) try: # Map multicall arguments if not self._proxy._use_deprecated: if self._method_name.endswith( ".multicall") or self._method_name.endswith( ".multicall.filtered"): if self._method_name in ("d.multicall", "d.multicall.filtered"): args = (0, ) + args if config.debug: self._proxy.LOG.debug("BEFORE MAPPING: %r" % (args, )) if self._method_name == "system.multicall": for call in args[0]: call["methodName"] = self._proxy._map_call( call["methodName"]) else: args = args[0:2] + tuple( self._proxy._map_call(i) for i in args[2:]) if config.debug: self._proxy.LOG.debug("AFTER MAPPING: %r" % (args, )) elif self._method_name in self.NEEDS_FAKE_TARGET: args = (0, ) + args # Prepare request xmlreq = xmlrpclib.dumps(args, self._proxy._map_call(self._method_name)) ##xmlreq = xmlreq.replace('\n', '') self._outbound = len(xmlreq) self._proxy._outbound += self._outbound self._proxy._outbound_max = max(self._proxy._outbound_max, self._outbound) if config.debug: self._proxy.LOG.debug("XMLRPC raw request: %r" % xmlreq) # Send it scgi_req = xmlrpc2scgi.SCGIRequest(self._proxy._transport) xmlresp = scgi_req.send(xmlreq) self._inbound = len(xmlresp) self._proxy._inbound += self._inbound self._proxy._inbound_max = max(self._proxy._inbound_max, self._inbound) self._net_latency = scgi_req.latency self._proxy._net_latency += self._net_latency # Return raw XML response? if raw_xml: return xmlresp # This fixes a bug with the Python xmlrpclib module # (has no handler for <i8> in some versions) xmlresp = xmlresp.replace("<i8>", "<i4>").replace("</i8>", "</i4>") try: # Deserialize data result = xmlrpclib.loads(xmlresp)[0][0] except (KeyboardInterrupt, SystemExit): # Don't catch these raise except: exc_type, exc = sys.exc_info()[:2] if exc_type is xmlrpclib.Fault and exc.faultCode == -501 and exc.faultString == 'Could not find info-hash.': raise HashNotFound("Unknown hash for {}({}) @ {}", self._method_name, args[0] if args else '', self._proxy._url) if not fail_silently: # Dump the bad packet, then re-raise filename = "/tmp/xmlrpc2scgi-%s.xml" % os.getuid() handle = open(filename, "w") try: handle.write("REQUEST\n") handle.write(xmlreq) handle.write("\nRESPONSE\n") handle.write(xmlresp) print >> sys.stderr, "INFO: Bad data packets written to %r" % filename finally: handle.close() raise else: try: return sum(result, []) if flatten else result except TypeError: if result and isinstance(result, list) and isinstance( result[0], dict) and 'faultCode' in result[0]: raise error.LoggableError( "XMLRPC error in multicall: " + repr(result[0])) else: raise finally: # Calculate latency self._latency = time.time() - start self._proxy._latency += self._latency if config.debug: self._proxy.LOG.debug( "%s(%s) took %.3f secs" % (self._method_name, ", ".join( repr(i) for i in args), self._latency))