Beispiel #1
0
    def fetch(self, name, engine_name=None):
        """ Get a field on demand.
        """
        # TODO: Get each on-demand field in a multicall for all other items, since
        # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically
        # with the list of fields from filters and output formats
        try:
            return self._fields[name]
        except KeyError:
            if isinstance(name, (int, long)):
                name = "custom_%d" % name

            if name == "done":
                val = float(
                    self.fetch("completed_chunks")) / self.fetch("size_chunks")
            elif name == "files":
                val = self._get_files()
            elif name.startswith("kind_") and name[5:].isdigit():
                val = self._get_kind(int(name[5:], 10))
            elif name.startswith("d_") or name.startswith("d."):
                val = self.fetch('=' + (engine_name or name)[2:])
            elif name.startswith("custom_"):
                key = name[7:]
                try:
                    if len(key) == 1 and key in "12345":
                        val = getattr(self._engine._rpc.d,
                                      "custom" + key)(self._fields["hash"])
                    else:
                        val = self._engine._rpc.d.custom(
                            self._fields["hash"], key)
                except xmlrpc.ERRORS as exc:
                    raise error.EngineError("While accessing field %r: %s" %
                                            (name, exc))
            else:
                getter_name = engine_name if engine_name else RtorrentEngine.PYRO2RT_MAPPING.get(
                    name, name)
                if getter_name[0] == '=':
                    getter_name = getter_name[1:]
                else:
                    getter_name = "get_" + getter_name
                getter = getattr(self._engine._rpc.d, getter_name)

                try:
                    val = getter(self._fields["hash"])
                except xmlrpc.ERRORS as exc:
                    raise error.EngineError("While accessing field %r: %s" %
                                            (name, exc))

            # TODO: Currently, NOT caching makes no sense; in a demon, it does!
            #if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField):
            self._fields[name] = val

            return val
Beispiel #2
0
    def show(self, items, view=None, append=False, disjoin=False):
        """ Visualize a set of items (search result), and return the view name.
        """
        proxy = self.open()
        view = self._resolve_viewname(view or "rtcontrol")

        if append and disjoin:
            raise error.EngineError(
                "Cannot BOTH append to / disjoin from view '{}'".format(view))

        # Add view if needed
        if view not in proxy.view.list():
            proxy.view.add(xmlrpc.NOHASH, view)

        # Clear view and show it
        if not append and not disjoin:
            proxy.view.filter(xmlrpc.NOHASH, view, "false=")
            proxy.d.multicall2(xmlrpc.NOHASH, 'default',
                               'd.views.remove=' + view)
        proxy.ui.current_view.set(view)

        # Add items
        # TODO: should be a "system.multicall"
        for item in items:
            if disjoin:
                proxy.d.views.remove(item.hash, view)
                proxy.view.set_not_visible(item.hash, view)
            else:
                proxy.d.views.push_back_unique(item.hash, view)
                proxy.view.set_visible(item.hash, view)

        return view
Beispiel #3
0
    def _get_files(self, attrs=None):
        """ Get a list of all files in this download; each entry has the
            attributes C{path} (relative to root), C{size} (in bytes),
            C{mtime}, C{prio} (0=off, 1=normal, 2=high), C{created},
            and C{opened}.

            This is UNCACHED, use C{fetch("files")} instead.

            @param attrs: Optional list of additional attributes to fetch.
        """
        try:
            # Get info for all files
            f_multicall = self._engine._rpc.f.multicall
            f_params = [
                self._fields["hash"],
                0,
                "f.get_path=",
                "f.get_size_bytes=",
                "f.get_last_touched=",
                "f.get_priority=",
                "f.is_created=",
                "f.is_open=",
            ]
            for attr in (attrs or []):
                f_params.append("f.%s=" % attr)
            rpc_result = f_multicall(*tuple(f_params))
        except xmlrpc.ERRORS, exc:
            raise error.EngineError(
                "While %s torrent #%s: %s" %
                ("getting files for", self._fields["hash"], exc))
Beispiel #4
0
 def _resolve_viewname(self, viewname):
     """
     """
     if viewname == "-":
         try:
             # Only works with rTorrent-PS at this time!
             viewname = self.open().ui.current_view()
         except xmlrpc.ERRORS, exc:
             raise error.EngineError("Can't get name of current view: %s" %
                                     (exc))
Beispiel #5
0
 def announce_urls(self, default=[]):
     """ Get a list of all announce URLs. 
         Returns `default` if no trackers are found at all.
     """
     try:
         response = self._engine._rpc.t.multicall(self._fields["hash"], 0,
                                                  "t.get_url=")
     except xmlrpc.ERRORS, exc:
         raise error.EngineError("While getting announce URLs for #%s: %s" %
                                 (self._fields["hash"], exc))
Beispiel #6
0
    def fetch(self, name, engine_name=None):
        """ Get a field on demand.
        """
        # TODO: Get each on-demand field in a multicall for all other items, since
        # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically
        # with the list of fields from filters and output formats
        try:
            return self._fields[name]
        except KeyError:
            if name == "done":
                val = float(
                    self.fetch("completed_chunks")) / self.fetch("size_chunks")
            elif name == "files":
                val = self._get_files()
            elif name.startswith("kind_") and name[5:].isdigit():
                val = self._get_kind(int(name[5:], 10))
            elif name.startswith("custom_"):
                key = name[7:]
                try:
                    if len(key) == 1 and key in "12345":
                        val = getattr(self._engine._rpc.d,
                                      "get_custom" + key)(self._fields["hash"])
                    else:
                        val = self._engine._rpc.d.get_custom(
                            self._fields["hash"], key)
                except xmlrpc.ERRORS, exc:
                    raise error.EngineError("While accessing field %r: %s" %
                                            (name, exc))
            else:
                getter_name = engine_name if engine_name else RtorrentEngine.PYRO2RT_MAPPING.get(
                    name, name)
                if getter_name[0] == '=':
                    getter_name = getter_name[1:]
                else:
                    getter_name = "get_" + getter_name
                getter = getattr(self._engine._rpc.d, getter_name)

                try:
                    val = getter(self._fields["hash"])
                except xmlrpc.ERRORS, exc:
                    raise error.EngineError("While accessing field %r: %s" %
                                            (name, exc))
Beispiel #7
0
    def _get_files(self, attrs=None):
        """ Get a list of all files in this download; each entry has the
            attributes C{path} (relative to root), C{size} (in bytes),
            C{mtime}, C{prio} (0=off, 1=normal, 2=high), C{created},
            and C{opened}.

            This is UNCACHED, use C{fetch("files")} instead.

            @param attrs: Optional list of additional attributes to fetch.
        """
        try:
            # Get info for all files
            f_multicall = self._engine._rpc.f.multicall
            f_params = [
                self._fields["hash"],
                0,
                "f.path=",
                "f.size_bytes=",
                "f.last_touched=",
                "f.priority=",
                "f.is_created=",
                "f.is_open=",
            ]
            for attr in (attrs or []):
                f_params.append("f.%s=" % attr)
            rpc_result = f_multicall(*tuple(f_params))
        except xmlrpc.ERRORS as exc:
            raise error.EngineError(
                "While %s torrent #%s: %s" %
                ("getting files for", self._fields["hash"], exc))
        else:
            #self._engine.LOG.debug("files result: %r" % rpc_result)

            # Return results
            result = [
                Bunch(
                    path=i[0],
                    size=i[1],
                    mtime=i[2] / 1000000.0,
                    prio=i[3],
                    created=i[4],
                    opened=i[5],
                ) for i in rpc_result
            ]

            if attrs:
                for idx, attr in enumerate(attrs):
                    if attr.startswith("get_"):
                        attr = attr[4:]
                    for item, rpc_item in zip(result, rpc_result):
                        item[attr] = rpc_item[6 + idx]

            return result
Beispiel #8
0
    def _resolve_viewname(self, viewname):
        """ Check for special view names and return existing rTorrent one.
        """
        if viewname == "-":
            try:
                # Only works with rTorrent-PS at this time!
                viewname = self.open().ui.current_view()
            except xmlrpc.ERRORS as exc:
                raise error.EngineError("Can't get name of current view: %s" %
                                        (exc))

        return viewname
Beispiel #9
0
 def _make_it_so(self, command, calls, *args):
     """ Perform some error-checked XMLRPC calls.
     """
     args = (self._fields["hash"], ) + args
     try:
         for call in calls:
             self._engine.LOG.debug("%s%s torrent #%s (%s)" %
                                    (command[0].upper(), command[1:],
                                     self._fields["hash"], call))
             getattr(self._engine._rpc.d, call)(*args)
     except xmlrpc.ERRORS, exc:
         raise error.EngineError("While %s torrent #%s: %s" %
                                 (command, self._fields["hash"], exc))
Beispiel #10
0
    def announce_urls(self, default=[]):  # pylint: disable=dangerous-default-value
        """ Get a list of all announce URLs.
            Returns `default` if no trackers are found at all.
        """
        try:
            response = self._engine._rpc.t.multicall(self._fields["hash"], 0,
                                                     "t.url=", "t.is_enabled=")
        except xmlrpc.ERRORS as exc:
            raise error.EngineError("While getting announce URLs for #%s: %s" %
                                    (self._fields["hash"], exc))

        if response:
            return [i[0] for i in response if i[1]]
        else:
            return default
Beispiel #11
0
 def _make_it_so(self, command, calls, *args, **kwargs):
     """ Perform some error-checked XMLRPC calls.
     """
     observer = kwargs.pop('observer', False)
     args = (self._fields["hash"], ) + args
     try:
         for call in calls:
             self._engine.LOG.debug("%s%s torrent #%s (%s)" %
                                    (command[0].upper(), command[1:],
                                     self._fields["hash"], call))
             if call.startswith(':') or call[:2].endswith('.'):
                 namespace = self._engine._rpc
             else:
                 namespace = self._engine._rpc.d
             result = getattr(namespace, call.lstrip(':'))(*args)
             if observer:
                 observer(result)
     except xmlrpc.ERRORS as exc:
         raise error.EngineError("While %s torrent #%s: %s" %
                                 (command, self._fields["hash"], exc))
Beispiel #12
0
    def items(self, view=None, prefetch=None, cache=True):
        """ Get list of download items.

            @param view: Name of the view.
            @param prefetch: OPtional list of field names to fetch initially.
            @param cache: Cache items for the given view?
        """
        # TODO: Cache should be by hash.
        # Then get the initial data when cache is empty,
        # else get a list of hashes from the view, make a diff
        # to what's in the cache, fetch the rest. Getting the
        # fields for one hash might be done by a special view
        # (filter: $d.hash == hashvalue)

        if view is None:
            view = engine.TorrentView(self, "default")
        elif isinstance(view, basestring):
            view = engine.TorrentView(self, self._resolve_viewname(view))
        else:
            view.viewname = self._resolve_viewname(view.viewname)

        if not cache or view.viewname not in self._item_cache:
            # Map pyroscope names to rTorrent ones
            if prefetch:
                prefetch = self.CORE_FIELDS | set(
                    (self.PYRO2RT_MAPPING.get(i, i) for i in prefetch))
            else:
                prefetch = self.PREFETCH_FIELDS

            # Fetch items
            items = []
            try:
                # Prepare multi-call arguments
                args = [
                    "d.%s%s" %
                    ("" if field.startswith("is_") else "get_", field)
                    for field in prefetch
                ]

                infohash = view._check_hash_view()
                if infohash:
                    multi_call = self.open().system.multicall
                    args = [
                        dict(methodName=field.rsplit('=', 1)[0],
                             params=[infohash] +
                             (field.rsplit('=', 1)[1].split(',')
                              if '=' in field else [])) for field in args
                    ]
                    raw_items = [[i[0] for i in multi_call(args)]]
                else:
                    multi_call = self.open().d.multicall
                    args = [view.viewname] + [
                        field if '=' in field else field + '='
                        for field in args
                    ]
                    if view.matcher and int(config.fast_query):
                        pre_filter = matching.unquote_pre_filter(
                            view.matcher.pre_filter())
                        self.LOG.info("!!! pre-filter: {}".format(pre_filter
                                                                  or 'N/A'))
                        if pre_filter:
                            multi_call = self.open().d.multicall.filtered
                            args.insert(1, pre_filter)
                    raw_items = multi_call(*tuple(args))

                ##self.LOG.debug("multicall %r" % (args,))
                ##import pprint; self.LOG.debug(pprint.pformat(raw_items))
                self.LOG.debug("Got %d items with %d attributes from %r [%s]" %
                               (len(raw_items), len(prefetch), self.engine_id,
                                multi_call))

                for item in raw_items:
                    items.append(
                        RtorrentItem(
                            self,
                            zip([
                                self.RT2PYRO_MAPPING.get(i, i)
                                for i in prefetch
                            ], item)))
                    yield items[-1]
            except xmlrpc.ERRORS as exc:
                raise error.EngineError(
                    "While getting download items from %r: %s" % (self, exc))

            # Everything yielded, store for next iteration
            if cache:
                self._item_cache[view.viewname] = items
        else:
            # Yield prefetched results
            for item in self._item_cache[view.viewname]:
                yield item
Beispiel #13
0
    def cull(self, file_filter=None, attrs=None):
        """ Delete ALL data files and remove torrent from client.

            @param file_filter: Optional callable for selecting a subset of all files.
                The callable gets a file item as described for RtorrentItem._get_files
                and must return True for items eligible for deletion.
            @param attrs: Optional list of additional attributes to fetch for a filter.
        """
        dry_run = 0  # set to 1 for testing

        def remove_with_links(path):
            "Remove a path including any symlink chains leading to it."
            rm_paths = []
            while os.path.islink(path):
                target = os.readlink(path)
                rm_paths.append(path)
                path = target

            if os.path.exists(path):
                rm_paths.append(path)
            else:
                self._engine.LOG.debug(
                    "Real path '%s' doesn't exist,"
                    " but %d symlink(s) leading to it will be deleted..." %
                    (path, len(rm_paths)))

            # Remove the link chain, starting at the real path
            # (this prevents losing the chain when there's permission problems)
            for rm_path in reversed(rm_paths):
                is_dir = os.path.isdir(rm_path) and not os.path.islink(rm_path)
                self._engine.LOG.debug("Deleting '%s%s'" %
                                       (rm_path, '/' if is_dir else ''))
                if not dry_run:
                    try:
                        (os.rmdir if is_dir else os.remove)(rm_path)
                    except OSError as exc:
                        if exc.errno == errno.ENOENT:
                            # Seems this disappeared somehow inbetween (race condition)
                            self._engine.LOG.info(
                                "Path '%s%s' disappeared before it could be deleted"
                                % (rm_path, '/' if is_dir else ''))
                        else:
                            raise

            return rm_paths

        # Assemble doomed files and directories
        files, dirs = set(), set()
        base_path = os.path.expanduser(self.directory)
        item_files = list(self._get_files(attrs=attrs))

        if not self.directory:
            raise error.EngineError(
                "Directory for item #%s is empty,"
                " you might want to add a filter 'directory=!'" %
                (self._fields["hash"], ))
        if not os.path.isabs(base_path):
            raise error.EngineError(
                "Directory '%s' for item #%s is not absolute, which is a bad idea;"
                " fix your .rtorrent.rc, and use 'directory.default.set = /...'"
                % (
                    self.directory,
                    self._fields["hash"],
                ))
        if self.fetch("=is_multi_file") and os.path.isdir(self.directory):
            dirs.add(self.directory)

        for item_file in item_files:
            if file_filter and not file_filter(item_file):
                continue
            #print repr(item_file)
            path = os.path.join(base_path, item_file.path)
            files.add(path)
            if '/' in item_file.path:
                dirs.add(os.path.dirname(path))

        # Delete selected files
        if not dry_run:
            self.stop()
        for path in sorted(files):
            ##self._engine.LOG.debug("Deleting file '%s'" % (path,))
            remove_with_links(path)

        # Prune empty directories (longer paths first)
        doomed = files | dirs
        for path in sorted(dirs, reverse=True):
            residue = set(os.listdir(path) if os.path.exists(path) else [])
            ignorable = set(i for i in residue if any(
                fnmatch.fnmatch(i, pat) for pat in config.waif_pattern_list)
                            #or os.path.join(path, i) in doomed
                            )
            ##print "---", residue - ignorable
            if residue and residue != ignorable:
                self._engine.LOG.info(
                    "Keeping non-empty directory '%s' with %d %s%s!" % (
                        path,
                        len(residue),
                        "entry" if len(residue) == 1 else "entries",
                        (" (%d ignorable)" %
                         len(ignorable)) if ignorable else "",
                    ))
            else:
                ##print "---", ignorable
                for waif in ignorable:  # - doomed:
                    waif = os.path.join(path, waif)
                    self._engine.LOG.debug("Deleting waif '%s'" % (waif, ))
                    if not dry_run:
                        try:
                            os.remove(waif)
                        except EnvironmentError as exc:
                            self._engine.LOG.warn(
                                "Problem deleting waif '%s' (%s)" %
                                (waif, exc))

                ##self._engine.LOG.debug("Deleting empty directory '%s'" % (path,))
                doomed.update(remove_with_links(path))

        # Delete item from engine
        if not dry_run:
            self.delete()
Beispiel #14
0
                            self._engine.LOG.info(
                                "Path '%s%s' disappeared before it could be deleted"
                                % (path, '/' if is_dir else ''))
                        else:
                            raise

            return rm_paths

        # Assemble doomed files and directories
        files, dirs = set(), set()
        base_path = os.path.expanduser(self.directory)
        item_files = list(self._get_files(attrs=attrs))

        if not self.directory:
            raise error.EngineError(
                "Directory for item #%s is empty,"
                " you might want to add a filter 'directory=!'" %
                (self._fields["hash"], ))
        if not os.path.isabs(base_path):
            raise error.EngineError(
                "Directory '%s' for item #%s is not absolute, which is a bad idea;"
                " fix your .rtorrent.rc, and use 'directory.default.set = /...' with rTorrent 0.8.7+"
                % (
                    self.directory,
                    self._fields["hash"],
                ))
        if self.fetch("=is_multi_file") and os.path.isdir(self.directory):
            dirs.add(self.directory)

        for item_file in item_files:
            if file_filter and not file_filter(item_file):
                continue
Beispiel #15
0
    def items(self, view=None, prefetch=None, cache=True):
        """ Get list of download items.
        
            @param view: Name of the view.
            @param prefetch: OPtional list of field names to fetch initially.
            @param cache: Cache items for the given view?
        """
        # TODO: Cache should be by hash.
        # Then get the initial data when cache is empty,
        # else get a list of hashes from the view, make a diff
        # to what's in the cache, fetch the rest. Getting the
        # fields for one hash might be done by a special view
        # (filter: $d.get_hash == hashvalue)

        if view is None:
            view = engine.TorrentView(self, "main")
        elif isinstance(view, basestring):
            view = engine.TorrentView(self, self._resolve_viewname(view))
        else:
            view.viewname = self._resolve_viewname(view.viewname)

        if not cache or view.viewname not in self._item_cache:
            # Map pyroscope names to rTorrent ones
            if prefetch:
                prefetch = self.CORE_FIELDS | set(
                    (self.PYRO2RT_MAPPING.get(i, i) for i in prefetch))
            else:
                prefetch = self.PREFETCH_FIELDS

            # Fetch items
            items = []
            try:
                # Prepare multi-call arguments
                args = [
                    "d.%s%s" %
                    ("" if field.startswith("is_") else "get_", field)
                    for field in prefetch
                ]

                infohash = None
                if view.viewname.startswith('#'):
                    infohash = view.viewname[1:]
                elif len(view.viewname) == 40:
                    try:
                        int(view.viewname, 16)
                    except (TypeError, ValueError):
                        pass
                    else:
                        infohash = view.viewname

                if infohash:
                    multi_call = self.open().system.multicall
                    args = [
                        dict(methodName=field, params=[infohash])
                        for field in args
                    ]
                    raw_items = [[i[0] for i in multi_call(args)]]
                else:
                    multi_call = self.open().d.multicall
                    args = [view.viewname] + [field + '=' for field in args]
                    raw_items = multi_call(*tuple(args))

                ##self.LOG.debug("multicall %r" % (args,))
                ##import pprint; self.LOG.debug(pprint.pformat(raw_items))
                self.LOG.debug("Got %d items with %d attributes from %r [%s]" %
                               (len(raw_items), len(prefetch), self.engine_id,
                                multi_call))

                for item in raw_items:
                    items.append(
                        RtorrentItem(
                            self,
                            zip([
                                self.RT2PYRO_MAPPING.get(i, i)
                                for i in prefetch
                            ], item)))
                    yield items[-1]
            except xmlrpc.ERRORS, exc:
                raise error.EngineError(
                    "While getting download items from %r: %s" % (self, exc))

            # Everything yielded, store for next iteration
            if cache:
                self._item_cache[view.viewname] = items