예제 #1
0
 def format(self, obj, context, maxlevels, level):  # pylint: disable=arguments-differ
     """ Mask obj if it looks like an URL, then pass it to the super class.
     """
     if isinstance(obj, basestring) and "://" in fmt.to_unicode(obj):
         obj = mask_keys(obj)
     return pprint.PrettyPrinter.format(self, obj, context, maxlevels,
                                        level)
예제 #2
0
def check_info(info):
    """ Validate info dict.

        Raise ValueError if validation fails.
    """
    if not isinstance(info, dict):
        raise ValueError("bad metainfo - not a dictionary")

    pieces = info.get("pieces")
    if not isinstance(pieces, basestring) or len(pieces) % 20 != 0:
        raise ValueError("bad metainfo - bad pieces key")

    piece_size = info.get("piece length")
    if not isinstance(piece_size, (int, long)) or piece_size <= 0:
        raise ValueError("bad metainfo - illegal piece length")

    name = info.get("name")
    if not isinstance(name, basestring):
        raise ValueError("bad metainfo - bad name (type is %r)" % type(name).__name__)
    if not ALLOWED_ROOT_NAME.match(name):
        raise ValueError("name %s disallowed for security reasons" % name)

    if ("files" in info) == ("length" in info):
        raise ValueError("single/multiple file mix")

    if "length" in info:
        length = info.get("length")
        if not isinstance(length, (int, long)) or length < 0:
            raise ValueError("bad metainfo - bad length")
    else:
        files = info.get("files")
        if not isinstance(files, (list, tuple)):
            raise ValueError("bad metainfo - bad file list")

        for item in files:
            if not isinstance(item, dict):
                raise ValueError("bad metainfo - bad file value")

            length = item.get("length")
            if not isinstance(length, (int, long)) or length < 0:
                raise ValueError("bad metainfo - bad length")

            path = item.get("path")
            if not isinstance(path, (list, tuple)) or not path:
                raise ValueError("bad metainfo - bad path")

            for part in path:
                if not isinstance(part, basestring):
                    raise ValueError("bad metainfo - bad path dir")
                part = fmt.to_unicode(part)
                if part == '..':
                    raise ValueError("relative path in %s disallowed for security reasons" % '/'.join(path))
                if part and not ALLOWED_PATH_NAME.match(part):
                    raise ValueError("path %s disallowed for security reasons" % part)

        file_paths = [os.sep.join(item["path"]) for item in files]
        if len(set(file_paths)) != len(file_paths):
            raise ValueError("bad metainfo - duplicate path")

    return info
예제 #3
0
파일: rtorrent.py 프로젝트: r4b3rt/pyrocore
 def datapath(self):
     """ Get an item's data path.
     """
     path = self._fields['path']
     if not path:  # stopped item with no base_dir?
         path = self.fetch('directory')
         if path and not self._fields['is_multi_file']:
             path = os.path.join(path, self._fields['name'])
     return os.path.expanduser(fmt.to_unicode(path))
예제 #4
0
def assign_fields(meta, assignments, options_debug=False):
    """ Takes a list of C{key=value} strings and assigns them to the
        given metafile. If you want to set nested keys (e.g. "info.source"),
        you have to use a dot as a separator. For exotic keys *containing*
        a dot, double that dot ("dotted..key").

        Numeric values starting with "+" or "-" are converted to integers.

        If just a key name is given (no '='), the field is removed.
    """
    for assignment in assignments:
        assignment = fmt.to_unicode(assignment)
        try:
            if '=' in assignment:
                field, val = assignment.split('=', 1)
            else:
                field, val = assignment, None

            if val and val[0] in "+-" and val[1:].isdigit():
                val = int(val, 10)

            # TODO: Allow numerical indices, and "+" for append
            namespace = meta
            keypath = [
                i.replace('\0', '.')
                for i in field.replace('..', '\0').split('.')
            ]
            for key in keypath[:-1]:
                # Create missing dicts as we go...
                namespace = namespace.setdefault(fmt.to_utf8(key), {})
        except (KeyError, IndexError, TypeError, ValueError) as exc:
            if options_debug:
                raise
            raise error.UserError("Bad assignment %r (%s)!" %
                                  (assignment, exc))
        else:
            if val is None:
                del namespace[fmt.to_utf8(keypath[-1])]
            else:
                namespace[fmt.to_utf8(keypath[-1])] = fmt.to_utf8(val)

    return meta
예제 #5
0
    def listing(self, masked=True):
        """ List torrent info & contents. Returns a list of formatted lines.
        """
        # Assemble data
        metainfo, bad_encodings, bad_fields = sanitize(bencode.bread(
            self.filename),
                                                       diagnostics=True)
        announce = metainfo['announce']
        info = metainfo['info']
        infohash = hashlib.sha1(bencode.bencode(info))

        total_size = data_size(metainfo)
        piece_length = info['piece length']
        piece_number, last_piece_length = divmod(total_size, piece_length)

        # Build result
        result = [
            "NAME %s" % (os.path.basename(fmt.to_unicode(self.filename))),
            "SIZE %s (%i * %s + %s)" % (
                fmt.human_size(total_size).strip(),
                piece_number,
                fmt.human_size(piece_length).strip(),
                fmt.human_size(last_piece_length).strip(),
            ),
            "META %s (pieces %s %.1f%%)" % (
                fmt.human_size(os.path.getsize(self.filename)).strip(),
                fmt.human_size(len(info["pieces"])).strip(),
                100.0 * len(info["pieces"]) / os.path.getsize(self.filename),
            ),
            "HASH %s" % (infohash.hexdigest().upper()),
            "URL  %s" % (mask_keys if masked else str)(announce),
            "PRV  %s" % ("YES (DHT/PEX disabled)"
                         if info.get("private") else "NO (DHT/PEX enabled)"),
            "TIME %s" %
            ("N/A" if "creation date" not in metainfo else time.strftime(
                "%Y-%m-%d %H:%M:%S", time.localtime(
                    metainfo["creation date"]))),
        ]

        for label, key in (("BY  ", "created by"), ("REM ", "comment")):
            if key in metainfo:
                result.append("%s %s" % (label, metainfo.get(key, "N/A")))

        result.extend([
            "",
            "FILE LISTING%s" % ("" if 'length' in info else " [%d file(s)]" %
                                len(info['files']), ),
        ])
        if 'length' in info:
            # Single file
            result.append("%-69s%9s" % (
                fmt.to_unicode(info['name']),
                fmt.human_size(total_size),
            ))
        else:
            # Directory structure
            result.append("%s/" % fmt.to_unicode(info['name']))
            oldpaths = [None] * 99
            for entry in info['files']:
                # Remove crap that certain PHP software puts in paths
                entry_path = [fmt.to_unicode(i) for i in entry["path"] if i]

                for idx, item in enumerate(entry_path[:-1]):
                    if item != oldpaths[idx]:
                        result.append("%s%s/" % (' ' * (4 * (idx + 1)), item))
                        oldpaths[idx] = item
                result.append("%-69s%9s" % (
                    ' ' * (4 * len(entry_path)) + entry_path[-1],
                    fmt.human_size(entry['length']),
                ))

        if bad_encodings:
            result.extend([
                "",
                "WARNING: Bad encoding(s) {} in these fields: {}".format(
                    ', '.join(sorted(bad_encodings)),
                    ', '.join(sorted(bad_fields))),
                "Use the --raw option to inspect these encoding issues.",
            ])

        return result
예제 #6
0
    def _make_info(self, piece_size, progress, walker, piece_callback=None):
        """ Create info dict.
        """
        # These collect the file descriptions and piece hashes
        file_list = []
        pieces = []

        # Initialize progress state
        hashing_secs = time.time()
        totalsize = -1 if self._fifo else self._calc_size()
        totalhashed = 0

        # Start a new piece
        sha1sum = hashlib.sha1()
        done = 0
        filename = None

        # Hash all files
        for filename in walker:
            # Assemble file info
            filesize = os.path.getsize(filename)
            filepath = filename[len(
                os.path.dirname(self.datapath) if self._fifo else self.datapath
            ):].lstrip(os.sep)
            file_list.append({
                "length":
                filesize,
                "path": [
                    fmt.to_utf8(x) for x in fmt.to_unicode(filepath).replace(
                        os.sep, '/').split('/')
                ],
            })
            self.LOG.debug("Hashing %r, size %d..." % (filename, filesize))

            # Open file and hash it
            fileoffset = 0
            handle = open(filename, "rb")
            try:
                while fileoffset < filesize:
                    # Read rest of piece or file, whatever is smaller
                    chunk = handle.read(
                        min(filesize - fileoffset, piece_size - done))
                    sha1sum.update(chunk)  # bogus pylint: disable=E1101
                    done += len(chunk)
                    fileoffset += len(chunk)
                    totalhashed += len(chunk)

                    # Piece is done
                    if done == piece_size:
                        pieces.append(sha1sum.digest())  # bogus pylint: disable=E1101
                        if piece_callback:
                            piece_callback(filename, pieces[-1])

                        # Start a new piece
                        sha1sum = hashlib.sha1()
                        done = 0

                    # Report progress
                    if progress:
                        progress(totalhashed, totalsize)
            finally:
                handle.close()

        # Add hash of partial last piece
        if done > 0:
            pieces.append(sha1sum.digest())  # bogus pylint: disable=E1103
            if piece_callback:
                piece_callback(filename, pieces[-1])

        # Build the meta dict
        metainfo = {
            "pieces": b"".join(pieces),
            "piece length": piece_size,
            "name": os.path.basename(self.datapath),
        }

        # Handle directory/FIFO vs. single file
        if self._fifo or os.path.isdir(self.datapath):
            metainfo["files"] = file_list
        else:
            metainfo["length"] = totalhashed

        hashing_secs = time.time() - hashing_secs
        self.LOG.info("Hashing of %s took %.1f secs (%s/s)" % (
            fmt.human_size(totalhashed).strip(),
            hashing_secs,
            fmt.human_size(totalhashed / hashing_secs).strip(),
        ))

        # Return validated info dict
        return check_info(metainfo), totalhashed
예제 #7
0
파일: matching.py 프로젝트: zapras/pyrocore
 def __init__(self, name, value):
     """ Store field name and filter value for later evaluations.
     """
     self._name = name
     self._condition = self._value = fmt.to_unicode(value)
     self.validate()
예제 #8
0
class TorrentProxy(object):
    """ A single download item.
    """
    @classmethod
    def add_manifold_attribute(cls, name):
        """ Register a manifold engine attribute.

            @return: field definition object, or None if "name" isn't a manifold attribute.
        """
        if name.startswith("custom_"):
            try:
                return FieldDefinition.FIELDS[name]
            except KeyError:
                field = OnDemandField(str,
                                      name,
                                      "custom attribute %r" %
                                      name.split('_', 1)[1],
                                      matcher=matching.PatternFilter)
                setattr(cls, name, field)  # add field to all proxy objects

                return field
        elif name.startswith("kind_") and name[5:].isdigit():
            try:
                return FieldDefinition.FIELDS[name]
            except KeyError:
                limit = int(name[5:].lstrip('0') or '0', 10)
                if limit > 100:
                    raise error.UserError("kind_N: N > 100 in %r" % name)
                field = OnDemandField(
                    set,
                    name,
                    "kinds of files that make up more than %d%% of this item's size"
                    % limit,
                    matcher=matching.TaggedAsFilter,
                    formatter=_fmt_tags,
                    engine_name="kind_%d" % limit)
                setattr(cls, name, field)

                return field

    @classmethod
    def add_custom_fields(cls, *args, **kw):
        """ Add any custom fields defined in the configuration.
        """
        for factory in config.custom_field_factories:
            for field in factory():
                setattr(cls, field.name, field)

    def __init__(self):
        """ Initialize object.
        """
        self._fields = {}

    def __repr__(self):
        """ Return a representation of internal state.
        """
        attrs = set((field.name for field in FieldDefinition.FIELDS.values()
                     if field._accessor or field.name in self._fields))
        return "<%s(%s)>" % (self.__class__.__name__, ", ".join(
            sorted(["%s=%r" % (i, getattr(self, i)) for i in attrs] + [
                "%s=%r" % (i, self._fields[i])
                for i in (set(self._fields) - attrs)
            ])))

    def fetch(self, name, engine_name=None):
        """ Get a field on demand.

            "engine_name" is the internal name of the client engine.
        """
        raise NotImplementedError()

    def announce_urls(self):
        """ Get a list of all announce URLs.
        """
        raise NotImplementedError()

    def start(self):
        """ (Re-)start downloading or seeding.
        """
        raise NotImplementedError()

    def stop(self):
        """ Stop and close download.
        """
        raise NotImplementedError()

    def ignore(self, flag):
        """ Set ignore status.
        """
        raise NotImplementedError()

    def tag(self, tags):
        """ Add or remove tags.
        """
        raise NotImplementedError()

    def set_throttle(self, name):
        """ Assign to throttle group.
        """
        # TODO: A better way would be to have a MutableField class, i.e. item.throttle = "name"
        raise NotImplementedError()

    def set_custom(self, key, value=None):
        """ Set a custom value. C{key} might have the form "key=value" when value is C{None}.
        """
        raise NotImplementedError()

    def hash_check(self):
        """ Hash check a download.
        """
        raise NotImplementedError()

    def delete(self):
        """ Remove torrent from client.
        """
        raise NotImplementedError()

    def flush(self):
        """ Write volatile data to disk.
        """
        # This can be empty in derived classes

    # Basic fields
    hash = ConstantField(str,
                         "hash",
                         "info hash",
                         matcher=matching.PatternFilter)
    name = ConstantField(fmt.to_unicode,
                         "name",
                         "name (file or root directory)",
                         matcher=matching.PatternFilter)
    size = ConstantField(int,
                         "size",
                         "data size",
                         matcher=matching.ByteSizeFilter)
    prio = OnDemandField(int,
                         "prio",
                         "priority (0=off, 1=low, 2=normal, 3=high)",
                         matcher=matching.FloatFilter,
                         formatter=lambda val: "X- +"[val])
    tracker = ConstantField(str,
                            "tracker",
                            "first in the list of announce URLs",
                            matcher=matching.PatternFilter,
                            accessor=lambda o:
                            (o.announce_urls(default=[None]) or [None])[0])
    alias = ConstantField(
        config.map_announce2alias,
        "alias",
        "tracker alias or domain",
        matcher=matching.PatternFilter,
        accessor=lambda o: o._memoize("alias", getattr, o, "tracker"))
    #matcher=matching.PatternFilter, accessor=operator.attrgetter("tracker"))
    message = OnDemandField(fmt.to_unicode,
                            "message",
                            "current tracker message",
                            matcher=matching.PatternFilter)

    # State
    is_private = ConstantField(bool,
                               "is_private",
                               "private flag set (no DHT/PEX)?",
                               matcher=matching.BoolFilter,
                               formatter=lambda val: "PRV" if val else "PUB")
    is_open = DynamicField(bool,
                           "is_open",
                           "download open?",
                           matcher=matching.BoolFilter,
                           formatter=lambda val: "OPN" if val else "CLS")
    is_active = DynamicField(bool,
                             "is_active",
                             "download active?",
                             matcher=matching.BoolFilter,
                             formatter=lambda val: "ACT" if val else "STP")
    is_complete = DynamicField(bool,
                               "is_complete",
                               "download complete?",
                               matcher=matching.BoolFilter,
                               formatter=lambda val: "DONE" if val else "PART")
    is_multi_file = OnDemandField(bool,
                                  "is_multi_file",
                                  "single- or multi-file download?",
                                  matcher=matching.BoolFilter,
                                  formatter=lambda val: "DIR "
                                  if val else "FILE")
    is_ignored = OnDemandField(bool,
                               "is_ignored",
                               "ignore commands?",
                               matcher=matching.BoolFilter,
                               formatter=lambda val: "IGN!"
                               if int(val) else "HEED")
    is_ghost = DynamicField(bool,
                            "is_ghost",
                            "has no data file or directory?",
                            matcher=matching.BoolFilter,
                            accessor=lambda o: o._fields["path"] and not os.
                            path.exists(fmt.to_unicode(o._fields["path"])),
                            formatter=lambda val: "GHST" if val else "DATA")

    # Paths
    """ Shining a light on the naming and paths mess:

        hash=xxx
        for i in d.name d.base_filename d.base_path d.directory d.directory_base d.is_multi_file; do \
            echo -n "$(printf '%20.20s ' $i)"; rtxmlrpc $i $hash
        done

        Basics:
            * d.base_filename is always the basename of d.base_path
            * d.directory_base and d.directory are always the same
            * d.base_filename and d.base_path are empty on closed items, after a restart, i.e. not too useful (since 0.9.1 or so)

        Behaviour of d.directory.set + d.directory_base.set (tested with 0.9.4):
            * d.base_path always remains unchanged, and item gets closed
            * d.start sets d.base_path if resume data ok
            * single:
                * d.directory[_base].set → d.name NEVER appended (only in d.base_path)
                * after start, d.base_path := d.directory/d.name
            * multi:
                * d.directory.set → d.name is appended
                * d.directory_base.set → d.name is NOT appended (i.e. item renamed to last path part)
                * after start, d.base_path := d.directory

        Making sense of it (trying to at least):
            * d.directory is *always* a directory (thus, single items auto-append d.name in d.base_path and cannot be renamed)
            * d.directory_base.set means set path PLUS basename together for a multi item (thus allowing a rename)
            * only d.directory.set behaves consistently for single+multi, regarding the end result in d.base_path
    """
    directory = OnDemandField(fmt.to_unicode,
                              "directory",
                              "directory containing download data",
                              matcher=matching.PatternFilter)
    path = DynamicField(fmt.to_unicode,
                        "path",
                        "path to download data",
                        matcher=matching.PatternFilter,
                        accessor=lambda o: os.path.expanduser(
                            fmt.to_unicode(o._fields["path"]))
                        if o._fields["path"] else "")
    realpath = DynamicField(
        fmt.to_unicode,
        "realpath",
        "real path to download data",
        matcher=matching.PatternFilter,
        accessor=lambda o: os.path.realpath(o.path.encode("UTF-8"))
        if o._fields["path"] else "")
    metafile = ConstantField(fmt.to_unicode,
                             "metafile",
                             "path to torrent file",
                             matcher=matching.PatternFilter,
                             accessor=lambda o: os.path.expanduser(
                                 fmt.to_unicode(o._fields["metafile"])))
    files = OnDemandField(list,
                          "files",
                          "list of files in this item",
                          matcher=matching.FilesFilter,
                          formatter=_fmt_files)
    fno = OnDemandField(int,
                        "fno",
                        "number of files in this item",
                        matcher=matching.FloatFilter,
                        engine_name="size_files")

    # Bandwidth & Data Transfer
    done = OnDemandField(percent,
                         "done",
                         "completion in percent",
                         matcher=matching.FloatFilter)
    ratio = DynamicField(ratio_float,
                         "ratio",
                         "normalized ratio (1:1 = 1.0)",
                         matcher=matching.FloatFilter)
    uploaded = OnDemandField(int,
                             "uploaded",
                             "amount of uploaded data",
                             matcher=matching.ByteSizeFilter,
                             engine_name="up_total")
    xfer = DynamicField(int,
                        "xfer",
                        "transfer rate",
                        matcher=matching.ByteSizeFilter,
                        accessor=lambda o: o.fetch("up") + o.fetch("down"))
    down = DynamicField(int,
                        "down",
                        "download rate",
                        matcher=matching.ByteSizeFilter)
    up = DynamicField(int,
                      "up",
                      "upload rate",
                      matcher=matching.ByteSizeFilter)
    throttle = OnDemandField(
        str,
        "throttle",
        "throttle group name (NULL=unlimited, NONE=global)",
        matcher=matching.PatternFilter,
        accessor=lambda o: o._fields["throttle"] or "NONE")

    # Lifecyle
    loaded = DynamicField(
        long,
        "loaded",
        "time metafile was loaded",
        matcher=matching.TimeFilterNotNull,
        accessor=lambda o: long(o.fetch("custom_tm_loaded") or "0", 10),
        formatter=fmt.iso_datetime_optional)
    started = DynamicField(
        long,
        "started",
        "time download was FIRST started",
        matcher=matching.TimeFilterNotNull,
        accessor=lambda o: long(o.fetch("custom_tm_started") or "0", 10),
        formatter=fmt.iso_datetime_optional)
    leechtime = DynamicField(
        untyped,
        "leechtime",
        "time taken from start to completion",
        matcher=matching.DurationFilter,
        accessor=lambda o: _interval_sum(o, end=o.completed, context=o.name) or
        _duration(o.started, o.completed),
        formatter=_fmt_duration)
    completed = DynamicField(
        long,
        "completed",
        "time download was finished",
        matcher=matching.TimeFilterNotNull,
        accessor=lambda o: long(o.fetch("custom_tm_completed") or "0", 10),
        formatter=fmt.iso_datetime_optional)
    seedtime = DynamicField(
        untyped,
        "seedtime",
        "total seeding time after completion",
        matcher=matching.DurationFilter,
        accessor=lambda o: _interval_sum(o, start=o.completed, context=o.name)
        if o.is_complete else None,
        formatter=_fmt_duration)
    active = DynamicField(long,
                          "active",
                          "last time a peer was connected",
                          matcher=matching.TimeFilter,
                          accessor=lambda o: long(o.fetch("last_active") or 0),
                          formatter=fmt.iso_datetime_optional)
    stopped = DynamicField(
        long,
        "stopped",
        "time download was last stopped or paused",
        matcher=matching.TimeFilterNotNull,
        accessor=lambda o:
        (_interval_split(o, only='P', context=o.name) + [(0, 0)])[0][1],
        formatter=fmt.iso_datetime_optional)

    # Classification
    tagged = DynamicField(
        set,
        "tagged",
        "has certain tags?",
        matcher=matching.TaggedAsFilter,
        accessor=lambda o: set(o.fetch("custom_tags").lower().split()),
        formatter=_fmt_tags)
    views = OnDemandField(set,
                          "views",
                          "views this item is attached to",
                          matcher=matching.TaggedAsFilter,
                          formatter=_fmt_tags,
                          engine_name="=views")
    kind = DynamicField(set,
                        "kind",
                        "ALL kinds of files in this item (the same as kind_0)",
                        matcher=matching.TaggedAsFilter,
                        formatter=_fmt_tags,
                        accessor=lambda o: o.fetch("kind_0"))
    traits = DynamicField(
        list,
        "traits",
        "automatic classification of this item (audio, video, tv, movie, etc.)",
        matcher=matching.TaggedAsFilter,
        formatter=lambda v: '/'.join(v or ["misc", "other"]),
        accessor=lambda o: detect_traits(o))
예제 #9
0
                if os.path.islink(item.path):
                    if os.path.abspath(dst) == os.path.abspath(item.path.rstrip(os.sep)):
                        # Moving back to original place
                        self.LOG.debug("Unlinking %s" % (pretty_path(item.path),))
                        self.guarded(os.remove, item.path)
                        self.guarded(os.rename, path, dst)
                    else:
                        # Moving to another place
                        self.LOG.debug("Re-linking %s" % (pretty_path(item.path),))
                        self.guarded(os.rename, path, dst)
                        self.guarded(os.remove, item.path)
                        self.guarded(os.symlink, os.path.abspath(dst), item.path)
                else:
                    # Moving download initially
                    self.LOG.debug("Symlinking %s" % (pretty_path(item.path),))
                    src1, src2 = os.path.join(download_path, os.path.basename(item.path)), fmt.to_unicode(os.path.realpath(path))
                    assert src1 == src2, 'Item path %r should match %r!' % (src1, src2)
                    self.guarded(os.rename, item.path, dst)
                    self.guarded(os.symlink, os.path.abspath(dst), item.path)

                # Resume torrent?
                # if was_active: sitem.resume()

        # Print stats
        self.LOG.debug("XMLRPC stats: %s" % proxy)
        self.LOG.log(logging.DEBUG if self.options.cron else logging.INFO, "Moved %d path%s (skipped %d)" % (
            moved_count, "" if moved_count == 1 else "s", len(source_paths) - moved_count
        ))


def run(): #pragma: no cover
예제 #10
0
    def mainloop(self):
        """ The main loop.
        """
        # Print usage if not enough args
        if len(self.args) < 2:
            self.parser.print_help()
            self.parser.exit()

        # TODO: Add mode to move tied metafiles, without losing the tie

        # Target handling
        target = self.args[-1]
        if "//" in target.rstrip('/'):
            # Create parts of target path
            existing, _ = target.split("//", 1)
            if not os.path.isdir(existing):
                self.fatal("Path before '//' MUST exists in %s" %
                           (pretty_path(target), ))

            # Possibly create the rest
            target = target.replace("//", "/")
            if not os.path.exists(target):
                self.guarded(os.makedirs, target)

        # Preparation
        # TODO: Handle cases where target is the original download path correctly!
        #       i.e.   rtmv foo/ foo   AND   rtmv foo/ .   (in the download dir)
        proxy = config.engine.open()
        download_path = os.path.realpath(
            os.path.expanduser(
                proxy.directory.default(xmlrpc.NOHASH).rstrip(os.sep)))
        target = self.resolve_slashed(target)
        source_paths = [self.resolve_slashed(i) for i in self.args[:-1]]
        source_realpaths = [os.path.realpath(i) for i in source_paths]
        source_items = defaultdict(list)  # map of source path to item
        items = list(config.engine.items(prefetch=self.PREFETCH_FIELDS))

        # Validate source paths and find matching items
        for item in items:
            if not item.path:
                continue

            realpath = None
            try:
                realpath = os.path.realpath(item.path)
            except (EnvironmentError, UnicodeError) as exc:
                self.LOG.warning("Cannot realpath %r (%s)" % (item.path, exc))

            # Look if item matches a source path
            # TODO: Handle download items nested into each other!
            try:
                path_idx = source_realpaths.index(realpath
                                                  or fmt.to_utf8(item.path))
            except ValueError:
                continue

            if realpath:
                self.LOG.debug('Item path %s resolved to %s' %
                               (pretty_path(item.path), pretty_path(realpath)))
            self.LOG.debug(
                'Found "%s" for %s' %
                (fmt.to_utf8(item.name), pretty_path(source_paths[path_idx])))
            source_items[source_paths[path_idx]].append(item)

        ##for path in source_paths: print path, "==>"; print "  " + "\n  ".join(i.path for i in source_items[path])

        if not os.path.isdir(target) and len(source_paths) > 1:
            self.fatal(
                "Can't move multiple files to %s which is no directory!" %
                (pretty_path(target), ))

        # Actually move the data
        moved_count = 0
        for path in source_paths:
            item = None  # Make sure there's no accidental stale reference

            if not source_items[path]:
                self.LOG.warn("No download item found for %s, skipping!" %
                              (pretty_path(path), ))
                continue

            if len(source_items[path]) > 1:
                self.LOG.warn(
                    "Can't handle multi-item moving yet, skipping %s!" %
                    (pretty_path(path), ))
                continue

            if os.path.islink(path):
                self.LOG.warn("Won't move symlinks, skipping %s!" %
                              (pretty_path(path), ))
                continue

            for item in source_items[path]:
                if os.path.islink(item.path) and os.path.realpath(
                        item.path) != os.readlink(item.path):
                    self.LOG.warn(
                        "Can't handle multi-hop symlinks yet, skipping %s!" %
                        (pretty_path(path), ))
                    continue

                if not item.is_complete:
                    if self.options.force_incomplete:
                        self.LOG.warn("Moving incomplete item '%s'!" %
                                      (item.name, ))
                    else:
                        self.LOG.warn("Won't move incomplete item '%s'!" %
                                      (item.name, ))
                        continue

                moved_count += 1
                dst = target
                if os.path.isdir(dst):
                    dst = os.path.join(dst, os.path.basename(path))
                self.LOG.info("Moving to %s..." % (pretty_path(dst), ))

                # Pause torrent?
                # was_active = item.is_active and not self.options.dry_run
                # if was_active: item.pause()

                # TODO: move across devices
                # TODO: move using "d.directory.set" instead of symlinks
                if os.path.islink(item.path):
                    if os.path.abspath(dst) == os.path.abspath(
                            item.path.rstrip(os.sep)):
                        # Moving back to original place
                        self.LOG.debug("Unlinking %s" %
                                       (pretty_path(item.path), ))
                        self.guarded(os.remove, item.path)
                        self.guarded(os.rename, path, dst)
                    else:
                        # Moving to another place
                        self.LOG.debug("Re-linking %s" %
                                       (pretty_path(item.path), ))
                        self.guarded(os.rename, path, dst)
                        self.guarded(os.remove, item.path)
                        self.guarded(os.symlink, os.path.abspath(dst),
                                     item.path)
                else:
                    # Moving download initially
                    self.LOG.debug("Symlinking %s" %
                                   (pretty_path(item.path), ))
                    src1, src2 = os.path.join(download_path,
                                              os.path.basename(
                                                  item.path)), fmt.to_unicode(
                                                      os.path.realpath(path))
                    assert src1 == src2, 'Item path %r should match %r!' % (
                        src1, src2)
                    self.guarded(os.rename, item.path, dst)
                    self.guarded(os.symlink, os.path.abspath(dst), item.path)

                # Resume torrent?
                # if was_active: sitem.resume()

        # Print stats
        self.LOG.debug("XMLRPC stats: %s" % proxy)
        self.LOG.log(
            logging.DEBUG if self.options.cron else logging.INFO,
            "Moved %d path%s (skipped %d)" %
            (moved_count, "" if moved_count == 1 else "s",
             len(source_paths) - moved_count))
예제 #11
0
 def __init__(self, name, value):
     """ Store field name and filter value for later evaluations.
     """
     self._name = name
     self._condition = self._value = fmt.to_unicode(value)
     self.validate()