Esempio n. 1
0
def migrate_to_transmission(srcdir, dstdir, name, utresume):
    def dnd_map(val):
        return val == 0x00
    def priority_map(val):
        # low
        if val == 0x04:
            return -1
        # high
        elif val == 0x0C:
            return 1
        # normal & do not download
        return 0

    dl_name = ntpath.basename(utresume['path'])
    paused = 0
    if dl_name != name[:-8]:
        print('Warning: Transmission does not support changing target name, pausing {}'.format(name))
        paused = 1
    tmresume = {
            'activity-date': utresume['last_active'],
            'added-date': utresume['added_on'],
            'bandwidth-priority': 0,
            'corrupt': utresume['waste'],
            'destination': ntpath.dirname(utresume['path']),
            'dnd': map(dnd_map, utresume['prio']),
            'done-date': utresume['completed_on'],
            'downloaded': utresume['downloaded'],
            'downloading-time-seconds': 0,
            'idle-limit': {'idle-limit': 30, 'idle-mode': 0},
            'max-peers': 75,
            'paused': paused or not utresume['started'],
            'peers2': 0,
            'peers2-6': 0,
            'priority': map(priority_map, utresume['prio']),
            'progress': {'blocks': 'all', 'have': 'all', 'time-checked': len(utresume['prio']) * [0]},
            'ratio-limit': {'ratio-limit': utresume['wanted_ratio']//10, 'ratio-mode': 0},
            'seeding-time-seconds': utresume['seedtime'],
            'speed-limit-down': {'speed-Bps': 0, 'use-global-speed-limit': 1, 'use-speed-limit': 0},
            'speed-limit-up': {'speed-Bps': 0, 'use-global-speed-limit': 1, 'use-speed-limit': 0},
            'uploaded': utresume['uploaded']
            }
    info_hash_str = ''.join(map(lambda x: hex(x)[2:], struct.unpack('BBBBBBBBBBBBBBBBBBBB', utresume['info'])))

    output_base = '{}.{}'.format(name.replace('/', '_')[:-8], info_hash_str[:16])

    with open(os.path.join(dstdir, 'resume', '{}.resume'.format(output_base)), 'w') as resume:
        resume.write(bencode(tmresume))

    copy2(os.path.join(srcdir, name), os.path.join(dstdir, 'torrents', '{}.torrent'.format(output_base)))
Esempio n. 2
0
def checked_open(filename, log=None, quiet=False):
    """ Open and validate the given metafile.
        Optionally provide diagnostics on the passed logger, for
        invalid metafiles, which then just cause a warning but no exception.
        "quiet" can supress that warning.
    """
    with closing(open(filename, "rb")) as handle:
        raw_data = handle.read()
    data = bencode.bdecode(raw_data)

    try:
        check_meta(data)
        if raw_data != bencode.bencode(data):
            raise ValueError("Bad bencoded data - dict keys out of order?")
    except ValueError, exc:
        if log:
            # Warn about it, unless it's a quiet value query
            if not quiet:
                log.warn("%s: %s" % (filename, exc))
        else:
            raise
Esempio n. 3
0
def checked_open(filename, log=None, quiet=False):
    """ Open and validate the given metafile.
        Optionally provide diagnostics on the passed logger, for
        invalid metafiles, which then just cause a warning but no exception.
        "quiet" can supress that warning.
    """
    with closing(open(filename, "rb")) as handle:
        raw_data = handle.read()
    data = bencode.bdecode(raw_data)

    try:
        check_meta(data)
        if raw_data != bencode.bencode(data):
            raise ValueError("Bad bencoded data - dict keys out of order?")
    except ValueError, exc:
        if log:
            # Warn about it, unless it's a quiet value query
            if not quiet:
                log.warn("%s: %s" % (filename, exc))
        else:
            raise
Esempio n. 4
0
    def listing(self, masked=True):
        """ List torrent info & contents. Returns a list of formatted lines.
        """
        # Assemble data
        metainfo, bad_encodings, bad_fields = sanitize(bencode.bread(
            self.filename),
                                                       diagnostics=True)
        announce = metainfo['announce']
        info = metainfo['info']
        infohash = hashlib.sha1(bencode.bencode(info))

        total_size = data_size(metainfo)
        piece_length = info['piece length']
        piece_number, last_piece_length = divmod(total_size, piece_length)

        # Build result
        result = [
            "NAME %s" % (os.path.basename(fmt.to_unicode(self.filename))),
            "SIZE %s (%i * %s + %s)" % (
                fmt.human_size(total_size).strip(),
                piece_number,
                fmt.human_size(piece_length).strip(),
                fmt.human_size(last_piece_length).strip(),
            ),
            "META %s (pieces %s %.1f%%)" % (
                fmt.human_size(os.path.getsize(self.filename)).strip(),
                fmt.human_size(len(info["pieces"])).strip(),
                100.0 * len(info["pieces"]) / os.path.getsize(self.filename),
            ),
            "HASH %s" % (infohash.hexdigest().upper()),
            "URL  %s" % (mask_keys if masked else str)(announce),
            "PRV  %s" % ("YES (DHT/PEX disabled)"
                         if info.get("private") else "NO (DHT/PEX enabled)"),
            "TIME %s" %
            ("N/A" if "creation date" not in metainfo else time.strftime(
                "%Y-%m-%d %H:%M:%S", time.localtime(
                    metainfo["creation date"]))),
        ]

        for label, key in (("BY  ", "created by"), ("REM ", "comment")):
            if key in metainfo:
                result.append("%s %s" % (label, metainfo.get(key, "N/A")))

        result.extend([
            "",
            "FILE LISTING%s" % ("" if 'length' in info else " [%d file(s)]" %
                                len(info['files']), ),
        ])
        if 'length' in info:
            # Single file
            result.append("%-69s%9s" % (
                fmt.to_unicode(info['name']),
                fmt.human_size(total_size),
            ))
        else:
            # Directory structure
            result.append("%s/" % fmt.to_unicode(info['name']))
            oldpaths = [None] * 99
            for entry in info['files']:
                # Remove crap that certain PHP software puts in paths
                entry_path = [fmt.to_unicode(i) for i in entry["path"] if i]

                for idx, item in enumerate(entry_path[:-1]):
                    if item != oldpaths[idx]:
                        result.append("%s%s/" % (' ' * (4 * (idx + 1)), item))
                        oldpaths[idx] = item
                result.append("%-69s%9s" % (
                    ' ' * (4 * len(entry_path)) + entry_path[-1],
                    fmt.human_size(entry['length']),
                ))

        if bad_encodings:
            result.extend([
                "",
                "WARNING: Bad encoding(s) {} in these fields: {}".format(
                    ', '.join(sorted(bad_encodings)),
                    ', '.join(sorted(bad_fields))),
                "Use the --raw option to inspect these encoding issues.",
            ])

        return result
Esempio n. 5
0
def info_hash(metadata):
    """ Return info hash as a string.
    """
    return hashlib.sha1(bencode.bencode(metadata['info'])).hexdigest().upper()
Esempio n. 6
0
class MetafileChanger(ScriptBaseWithConfig):
    """ Change attributes of a bittorrent metafile.
    """

    # argument description for the usage information
    ARGS_HELP = "<metafile>..."

    # Keys of rTorrent session data
    RT_RESUMT_KEYS = ('libtorrent_resume', 'log_callback', 'err_callback', 'rtorrent')
              

    def add_options(self):
        """ Add program options.
        """
        super(MetafileChanger, self).add_options()

        self.add_bool_option("-n", "--dry-run",
            help="don't write changes to disk, just tell what would happen")
        self.add_bool_option("-V", "--no-skip",
            help="do not skip broken metafiles that fail the integrity check")
        self.add_value_option("-o", "--output-directory", "PATH",
            help="optional output directory for the modified metafile(s)")
        self.add_bool_option("-p", "--make-private",
            help="make torrent private (DHT/PEX disabled)")
        self.add_bool_option("-P", "--make-public",
            help="make torrent public (DHT/PEX enabled)")
        self.add_value_option("-s", "--set", "KEY=VAL [-s ...]",
            action="append", default=[],
            help="set a specific key to the given value")
        self.add_value_option("-r", "--regex", "KEYcREGEXcSUBSTc [-r ...]",
            action="append", default=[],
            help="replace pattern in a specific key by the given substitution")
        self.add_bool_option("-C", "--clean",
            help="remove all non-standard data from metafile outside the info dict")
        self.add_bool_option("-A", "--clean-all",
            help="remove all non-standard data from metafile including inside the info dict")
        self.add_bool_option("-X", "--clean-xseed",
            help="like --clean-all, but keep libtorrent resume information")
        self.add_bool_option("-R", "--clean-rtorrent",
            help="remove all rTorrent session data from metafile")
        self.add_value_option("-H", "--hashed", "--fast-resume", "DATAPATH",
            help="add libtorrent fast-resume information (use {} in place of the torrent's name in DATAPATH)")
        # TODO: chtor --tracker
        ##self.add_value_option("-T", "--tracker", "DOMAIN",
        ##    help="filter given torrents for a tracker domain")
        self.add_value_option("-a", "--reannounce", "URL",
            help="set a new announce URL, but only if the old announce URL matches the new one")
        self.add_value_option("--reannounce-all", "URL",
            help="set a new announce URL on ALL given metafiles")
        self.add_bool_option("--no-ssl",
            help="force announce URL to 'http'")
        self.add_bool_option("--no-cross-seed",
            help="when using --reannounce-all, do not add a non-standard field to the info dict ensuring unique info hashes")
        self.add_value_option("--comment", "TEXT",
            help="set a new comment (an empty value deletes it)")
        self.add_bool_option("--bump-date",
            help="set the creation date to right now")
        self.add_bool_option("--no-date",
            help="remove the 'creation date' field")


    def mainloop(self):
        """ The main loop.
        """
        if not self.args:
            self.parser.error("No metafiles given, nothing to do!")

        if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)):
            self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!")

        # Set filter criteria for metafiles
        filter_url_prefix = None
        if self.options.reannounce:
            # <scheme>://<netloc>/<path>?<query>
            filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False)
            filter_url_prefix = urlparse.urlunsplit((
                filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103
            ))
            self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix)

        if self.options.reannounce_all:
            self.options.reannounce = self.options.reannounce_all
        else:
            # When changing the announce URL w/o changing the domain, don't change the info hash!
            self.options.no_cross_seed = True

        # Resolve tracker alias, if URL doesn't look like an URL
        if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme:
            tracker_alias, idx = self.options.reannounce, "0"
            if '.' in tracker_alias:
                tracker_alias, idx = tracker_alias.split('.', 1)
            try:
                idx = int(idx, 10)
                _, tracker_url = config.lookup_announce_alias(tracker_alias)
                self.options.reannounce = tracker_url[idx]
            except (KeyError, IndexError, TypeError, ValueError), exc:
                raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % (
                    self.options.reannounce, exc))

        # go through given files
        bad = 0
        changed = 0
        for filename in self.args:
            try:
                # Read and remember current content
                metainfo = bencode.bread(filename)
                old_metainfo = bencode.bencode(metainfo)
            except (EnvironmentError, KeyError, bencode.BencodeError), exc:
                self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc))
                bad += 1
            else:
                # Check metafile integrity
                try:
                    metafile.check_meta(metainfo)
                except ValueError, exc:
                    self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,))
                    if not self.options.no_skip:
                        continue
                
                # Skip any metafiles that don't meet the pre-conditions
                if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix):
                    self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,))
                    continue

                # Keep resume info safe
                libtorrent_resume = {}
                if "libtorrent_resume" in metainfo:
                    try:
                        libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"]
                    except KeyError:
                        pass # nothing to remember

                    libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"])

                # Change private flag?
                if self.options.make_private and not metainfo["info"].get("private", 0):
                    self.LOG.info("Setting private flag...")
                    metainfo["info"]["private"] = 1
                if self.options.make_public and metainfo["info"].get("private", 0):
                    self.LOG.info("Clearing private flag...")
                    del metainfo["info"]["private"]

                # Remove non-standard keys?
                if self.options.clean or self.options.clean_all or self.options.clean_xseed:
                    metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info)

                # Restore resume info?
                if self.options.clean_xseed:
                    if libtorrent_resume:
                        self.LOG.info("Restoring key 'libtorrent_resume'...")
                        metainfo.setdefault("libtorrent_resume", {})
                        metainfo["libtorrent_resume"].update(libtorrent_resume)
                    else:
                        self.LOG.warn("No resume information found!")

                # Clean rTorrent data?
                if self.options.clean_rtorrent:
                    for key in self.RT_RESUMT_KEYS:
                        if key in metainfo:
                            self.LOG.info("Removing key %r..." % (key,))
                            del metainfo[key]

                # Change announce URL?
                if self.options.reannounce:
                    metainfo['announce'] = self.options.reannounce 
                    if "announce-list" in metainfo:
                        del metainfo["announce-list"]

                    if not self.options.no_cross_seed:
                        # Enforce unique hash per tracker
                        metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest()
                if self.options.no_ssl:
                    # We're assuming here the same (default) port is used
                    metainfo['announce'] = (metainfo['announce']
                        .replace("https://", "http://").replace(":443/", ":80/"))

                # Change comment or creation date?
                if self.options.comment is not None:
                    if self.options.comment:
                        metainfo["comment"] = self.options.comment
                    elif "comment" in metainfo:
                        del metainfo["comment"]
                if self.options.bump_date:
                    metainfo["creation date"] = long(time.time())
                if self.options.no_date and "creation date" in metainfo:
                    del metainfo["creation date"]
                    
                # Add fast-resume data?
                if self.options.hashed:
                    try:
                        metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"]))
                    except EnvironmentError, exc:
                        self.fatal("Error making fast-resume data (%s)" % (exc,))
                        raise
Esempio n. 7
0
                    del metainfo["creation date"]
                    
                # Add fast-resume data?
                if self.options.hashed:
                    try:
                        metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"]))
                    except EnvironmentError, exc:
                        self.fatal("Error making fast-resume data (%s)" % (exc,))
                        raise

                # Set specific keys?
                metafile.assign_fields(metainfo, self.options.set)
                replace_fields(metainfo, self.options.regex)

                # Write new metafile, if changed
                new_metainfo = bencode.bencode(metainfo)
                if new_metainfo != old_metainfo:
                    if self.options.output_directory:
                        filename = os.path.join(self.options.output_directory, os.path.basename(filename))
                        self.LOG.info("Writing %r..." % filename)

                        if not self.options.dry_run:
                            bencode.bwrite(filename, metainfo)
                            if "libtorrent_resume" in metainfo:
                                # Also write clean version
                                filename = filename.replace(".torrent", "-no-resume.torrent")
                                del metainfo["libtorrent_resume"]
                                self.LOG.info("Writing %r..." % filename)
                                bencode.bwrite(filename, metainfo)
                    else:
                        self.LOG.info("Changing %r..." % filename)
Esempio n. 8
0
    def mainloop(self):
        """ The main loop.
        """
        if not self.args:
            self.parser.error("No metafiles given, nothing to do!")

        if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)):
            self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!")

        # Set filter criteria for metafiles
        filter_url_prefix = None
        if self.options.reannounce:
            # <scheme>://<netloc>/<path>?<query>
            filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False)
            filter_url_prefix = urlparse.urlunsplit((
                filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103
            ))
            self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix)

        if self.options.reannounce_all:
            self.options.reannounce = self.options.reannounce_all
        else:
            # When changing the announce URL w/o changing the domain, don't change the info hash!
            self.options.no_cross_seed = True

        # Resolve tracker alias, if URL doesn't look like an URL
        if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme:
            tracker_alias, idx = self.options.reannounce, "0"
            if '.' in tracker_alias:
                tracker_alias, idx = tracker_alias.split('.', 1)
            try:
                idx = int(idx, 10)
                _, tracker_url = config.lookup_announce_alias(tracker_alias)
                self.options.reannounce = tracker_url[idx]
            except (KeyError, IndexError, TypeError, ValueError) as exc:
                raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % (
                    self.options.reannounce, exc))

        # go through given files
        bad = 0
        changed = 0
        for filename in self.args:
            try:
                # Read and remember current content
                metainfo = bencode.bread(filename)
                old_metainfo = bencode.bencode(metainfo)
            except (EnvironmentError, KeyError, bencode.BencodeError) as exc:
                self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc))
                bad += 1
            else:
                # Check metafile integrity
                try:
                    metafile.check_meta(metainfo)
                except ValueError as exc:
                    self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,))
                    if not self.options.no_skip:
                        continue

                # Skip any metafiles that don't meet the pre-conditions
                if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix):
                    self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,))
                    continue

                # Keep resume info safe
                libtorrent_resume = {}
                if "libtorrent_resume" in metainfo:
                    try:
                        libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"]
                    except KeyError:
                        pass # nothing to remember

                    libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"])

                # Change private flag?
                if self.options.make_private and not metainfo["info"].get("private", 0):
                    self.LOG.info("Setting private flag...")
                    metainfo["info"]["private"] = 1
                if self.options.make_public and metainfo["info"].get("private", 0):
                    self.LOG.info("Clearing private flag...")
                    del metainfo["info"]["private"]

                # Remove non-standard keys?
                if self.options.clean or self.options.clean_all or self.options.clean_xseed:
                    metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info)

                # Restore resume info?
                if self.options.clean_xseed:
                    if libtorrent_resume:
                        self.LOG.info("Restoring key 'libtorrent_resume'...")
                        metainfo.setdefault("libtorrent_resume", {})
                        metainfo["libtorrent_resume"].update(libtorrent_resume)
                    else:
                        self.LOG.warn("No resume information found!")

                # Clean rTorrent data?
                if self.options.clean_rtorrent:
                    for key in self.RT_RESUMT_KEYS:
                        if key in metainfo:
                            self.LOG.info("Removing key %r..." % (key,))
                            del metainfo[key]

                # Change announce URL?
                if self.options.reannounce:
                    metainfo['announce'] = self.options.reannounce
                    if "announce-list" in metainfo:
                        del metainfo["announce-list"]

                    if not self.options.no_cross_seed:
                        # Enforce unique hash per tracker
                        metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest()
                if self.options.no_ssl:
                    # We're assuming here the same (default) port is used
                    metainfo['announce'] = (metainfo['announce']
                        .replace("https://", "http://").replace(":443/", ":80/"))

                # Change comment or creation date?
                if self.options.comment is not None:
                    if self.options.comment:
                        metainfo["comment"] = self.options.comment
                    elif "comment" in metainfo:
                        del metainfo["comment"]
                if self.options.bump_date:
                    metainfo["creation date"] = int(time.time())
                if self.options.no_date and "creation date" in metainfo:
                    del metainfo["creation date"]

                # Add fast-resume data?
                if self.options.hashed:
                    try:
                        metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"]))
                    except EnvironmentError as exc:
                        self.fatal("Error making fast-resume data (%s)" % (exc,))
                        raise

                # Set specific keys?
                metafile.assign_fields(metainfo, self.options.set, self.options.debug)
                replace_fields(metainfo, self.options.regex)

                # Write new metafile, if changed
                new_metainfo = bencode.bencode(metainfo)
                if new_metainfo != old_metainfo:
                    if self.options.output_directory:
                        filename = os.path.join(self.options.output_directory, os.path.basename(filename))
                        self.LOG.info("Writing %r..." % filename)

                        if not self.options.dry_run:
                            bencode.bwrite(filename, metainfo)
                            if "libtorrent_resume" in metainfo:
                                # Also write clean version
                                filename = filename.replace(".torrent", "-no-resume.torrent")
                                del metainfo["libtorrent_resume"]
                                self.LOG.info("Writing %r..." % filename)
                                bencode.bwrite(filename, metainfo)
                    else:
                        self.LOG.info("Changing %r..." % filename)

                        if not self.options.dry_run:
                            # Write to temporary file
                            tempname = os.path.join(
                                os.path.dirname(filename),
                                '.' + os.path.basename(filename),
                            )
                            self.LOG.debug("Writing %r..." % tempname)
                            bencode.bwrite(tempname, metainfo)

                            # Replace existing file
                            if os.name != "posix":
                                # cannot rename to existing target on WIN32
                                os.remove(filename)

                            try:
                                os.rename(tempname, filename)
                            except EnvironmentError as exc:
                                # TODO: Try to write directly, keeping a backup!
                                raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % (
                                    tempname, filename, exc
                                ))

                    changed += 1

        # Print summary
        if changed:
            self.LOG.info("%s %d metafile(s)." % (
                "Would've changed" if self.options.dry_run else "Changed", changed
            ))
        if bad:
            self.LOG.warn("Skipped %d bad metafile(s)!" % (bad))
Esempio n. 9
0
    def listing(self, masked=True):
        """ List torrent info & contents. Returns a list of formatted lines.
        """
        # Assemble data
        metainfo = sanitize(bencode.bread(self.filename))
        announce = metainfo['announce']
        info = metainfo['info']
        info_hash = hashlib.sha1(bencode.bencode(info))

        total_size = data_size(metainfo)
        piece_length = info['piece length']
        piece_number, last_piece_length = divmod(total_size, piece_length)

        # Build result
        result = [
            "NAME %s" % (os.path.basename(self.filename)),
            "SIZE %s (%i * %s + %s)" % (
                fmt.human_size(total_size).strip(),
                piece_number, fmt.human_size(piece_length).strip(),
                fmt.human_size(last_piece_length).strip(),
            ),
            "META %s (pieces %s %.1f%%)" % (
                fmt.human_size(os.path.getsize(self.filename)).strip(),
                fmt.human_size(len(info["pieces"])).strip(),
                100.0 * len(info["pieces"]) / os.path.getsize(self.filename),
            ),
            "HASH %s" % (info_hash.hexdigest().upper()),
            "URL  %s" % (mask_keys if masked else str)(announce),
            "PRV  %s" % ("YES (DHT/PEX disabled)" if info.get("private") else "NO (DHT/PEX enabled)"),
            "TIME %s" % ("N/A" if "creation date" not in metainfo else
                time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(metainfo["creation date"]))
            ),
        ]

        for label, key in (("BY  ", "created by"), ("REM ", "comment")):
            if key in metainfo:
                result.append("%s %s" % (label, metainfo.get(key, "N/A")))

        result.extend([
            "",
            "FILE LISTING%s" % ("" if info.has_key('length') else " [%d file(s)]" % len(info['files']),),
        ])
        if info.has_key('length'):
            # Single file
            result.append("%-69s%9s" % (
                    info['name'],
                    fmt.human_size(total_size),
            ))
        else:
            # Directory structure
            result.append("%s/" % info['name'])
            oldpaths = [None] * 99
            for entry in info['files']:
                # Remove crap that certain PHP software puts in paths
                entry_path = [i for i in entry["path"] if i]

                for idx, item in enumerate(entry_path[:-1]):
                    if item != oldpaths[idx]:
                        result.append("%s%s/" % (' ' * (4*(idx+1)), item))
                        oldpaths[idx] = item
                result.append("%-69s%9s" % (
                    ' ' * (4*len(entry_path)) + entry_path[-1],
                    fmt.human_size(entry['length']),
                ))

        return result
Esempio n. 10
0
def info_hash(metadata):
    """ Return info hash as a string.
    """
    return hashlib.sha1(bencode.bencode(metadata['info'])).hexdigest().upper()