def AddTorrentSkipHashCheck(self, logger, torrentPath, downloadPath): logger.info( "Adding torrent '%s' without hash checking to rTorrent to '%s'." % (torrentPath, downloadPath)) sourceDirectory, sourceFilename = os.path.split(torrentPath) sourceFilename = "fast resume " + sourceFilename destinationTorrentPath = os.path.join(sourceDirectory, sourceFilename) if os.path.exists(destinationTorrentPath): raise PtpUploaderException( "Can't create fast resume torrent because path '%s' already exists." % destinationTorrentPath) shutil.copyfile(torrentPath, destinationTorrentPath) metainfo = bencode.bread(destinationTorrentPath) metafile.add_fast_resume(metainfo, downloadPath.encode('utf-8')) bencode.bwrite(destinationTorrentPath, metainfo) infoHash = "" try: infoHash = self.AddTorrent(logger, destinationTorrentPath, downloadPath) finally: # We always remove the fast resume torrent regardless of result of adding the torrent to rTorrent. # This ensures that even if adding to rTorent fails, then resuming the job will work. os.remove(destinationTorrentPath) return infoHash
def AddTorrent(self, logger, torrentPath, downloadPath): logger.info( "Initiating the download of torrent '%s' with rTorrent to '%s'." % (torrentPath, downloadPath)) file = open(torrentPath, "rb") contents = xmlrpc.client.Binary(file.read()) file.close() torrentData = bencode.bread(torrentPath) metafile.check_meta(torrentData) infoHash = metafile.info_hash(torrentData) self.proxy.load.raw('', contents) # If load_raw is slow then set_directory_base throws an exception (Fault: <Fault -501: 'Could not find info-hash.'>), # so we retry adding the torrent some delay. maximumTries = 15 while True: try: self.proxy.d.directory_base.set(infoHash, downloadPath) self.proxy.d.start(infoHash) break except Exception: if maximumTries > 1: maximumTries -= 1 time.sleep(2) # Two seconds. else: raise return infoHash
def Make(logger, path, torrentPath): logger.info( "Making torrent from '%s' to '%s'." % ( path, torrentPath ) ) if os.path.exists( torrentPath ): raise PtpUploaderException( "Can't create torrent because path '%s' already exists." % torrentPath ) sourceSize = GetPathSize( path ) # Optimal piece size should be automatically calculated by mktorrent... pieceSize = "-l 19" # 512 KB if sourceSize > ( 16 * 1024 * 1024 * 1024 ): pieceSize = "-l 24" # 16 MB elif sourceSize > ( 8 * 1024 * 1024 * 1024 ): pieceSize = "-l 23" # 8 MB elif sourceSize > ( 4 * 1024 * 1024 * 1024 ): pieceSize = "-l 22" # 4 MB elif sourceSize > ( 2 * 1024 * 1024 * 1024 ): pieceSize = "-l 21" # 2 MB elif sourceSize > ( 1 * 1024 * 1024 * 1024 ): pieceSize = "-l 20" # 1 MB args = [ Settings.MktorrentPath, '-a', Settings.PtpAnnounceUrl, '-p', pieceSize, '-o', torrentPath, path ] errorCode = subprocess.call( args ) if errorCode != 0: args[ 2 ] = "OMITTED" # Do not log the announce URL, so it less likely gets posted in the forums. raise PtpUploaderException( "Process execution '%s' returned with error code '%s'." % ( args, errorCode ) ) # Torrents with exactly the same content and piece size get the same info hash regardless of the announcement URL. # To make sure that our new torrent will have unique info hash we add a unused key to the info section of the metadata. # Another way would be to use a different piece size, but this solution is much more elegant. # See: http://wiki.theory.org/BitTorrentSpecification#Metainfo_File_Structure metainfo = bencode.bread( torrentPath ) metafile.assign_fields( metainfo, [ 'info.source=PTP' ] ) bencode.bwrite( torrentPath, metainfo )
def mainloop(self): """ The main loop. """ if not self.args: self.parser.print_help() self.parser.exit() elif len(self.args) < 1: self.parser.error("Expecting at least a metafile name") # Read metafile metapath = self.args[0] try: metainfo = bencode.bread(metapath) except (KeyError, bencode.BencodeError), exc: self.LOG.error("Bad metafile %r (%s: %s)" % (metapath, type(exc).__name__, exc))
def check_torrent(torrent_file, seed_dir): metainfo = bencode.bread(torrent_file) name = metainfo['info']['name'] print name torrent = metafile.Metafile(torrent_file) path = os.path.join(seed_dir, name) try: success = torrent.check(metainfo, path, metafile.console_progress()) except OSError: success = False dest_dir = os.path.join(os.path.dirname(torrent_file), 'success' if success else 'failure') if not os.path.isdir(dest_dir): os.mkdir(dest_dir) shutil.move(torrent_file, dest_dir)
def Make(logger, path, torrentPath): logger.info("Making torrent from '%s' to '%s'." % (path, torrentPath)) if os.path.exists(torrentPath): raise PtpUploaderException( "Can't create torrent because path '%s' already exists." % torrentPath) sourceSize = GetPathSize(path) # Optimal piece size should be automatically calculated by mktorrent... pieceSize = "-l 19" # 512 KB if sourceSize > (16 * 1024 * 1024 * 1024): pieceSize = "-l 24" # 16 MB elif sourceSize > (8 * 1024 * 1024 * 1024): pieceSize = "-l 23" # 8 MB elif sourceSize > (4 * 1024 * 1024 * 1024): pieceSize = "-l 22" # 4 MB elif sourceSize > (2 * 1024 * 1024 * 1024): pieceSize = "-l 21" # 2 MB elif sourceSize > (1 * 1024 * 1024 * 1024): pieceSize = "-l 20" # 1 MB args = [ Settings.MktorrentPath, '-a', Settings.PtpAnnounceUrl, '-p', pieceSize, '-o', torrentPath, path ] errorCode = subprocess.call(args) if errorCode != 0: args[ 2] = "OMITTED" # Do not log the announce URL, so it less likely gets posted in the forums. raise PtpUploaderException( "Process execution '%s' returned with error code '%s'." % (args, errorCode)) # Torrents with exactly the same content and piece size get the same info hash regardless of the announcement URL. # To make sure that our new torrent will have unique info hash we add a unused key to the info section of the metadata. # Another way would be to use a different piece size, but this solution is much more elegant. # See: http://wiki.theory.org/BitTorrentSpecification#Metainfo_File_Structure metainfo = bencode.bread(torrentPath) metafile.assign_fields(metainfo, ['info.source=PTP']) bencode.bwrite(torrentPath, metainfo)
def mainloop(self): """ The main loop. """ if not self.args: self.parser.print_help() self.parser.exit() elif len(self.args) < 1: self.parser.error("Expecting at least a metafile name") # Read metafile metapath = self.args[0] try: metainfo = bencode.bread(metapath) except (KeyError, bencode.BencodeError) as exc: self.fatal("Bad metafile %r (%s)" % (metapath, type(exc).__name__), exc) raise else: # Check metafile integrity try: metafile.check_meta(metainfo) except ValueError as exc: self.fatal("Metafile %r failed integrity check" % (metapath,), exc) raise else: if len(self.args) > 1: datapath = self.args[1].rstrip(os.sep) else: datapath = metainfo["info"]["name"] # Check the hashes torrent = metafile.Metafile(metapath) try: ok = torrent.check(metainfo, datapath, progress=None if self.options.quiet else metafile.console_progress()) if not ok: self.fatal("Metafile %r has checksum errors" % (metapath,)) sys.exit(1) except OSError as exc: self.fatal("Torrent data file missing", exc) raise
def listing(self, masked=True): """ List torrent info & contents. Returns a list of formatted lines. """ # Assemble data metainfo, bad_encodings, bad_fields = sanitize(bencode.bread( self.filename), diagnostics=True) announce = metainfo['announce'] info = metainfo['info'] infohash = hashlib.sha1(bencode.bencode(info)) total_size = data_size(metainfo) piece_length = info['piece length'] piece_number, last_piece_length = divmod(total_size, piece_length) # Build result result = [ "NAME %s" % (os.path.basename(fmt.to_unicode(self.filename))), "SIZE %s (%i * %s + %s)" % ( fmt.human_size(total_size).strip(), piece_number, fmt.human_size(piece_length).strip(), fmt.human_size(last_piece_length).strip(), ), "META %s (pieces %s %.1f%%)" % ( fmt.human_size(os.path.getsize(self.filename)).strip(), fmt.human_size(len(info["pieces"])).strip(), 100.0 * len(info["pieces"]) / os.path.getsize(self.filename), ), "HASH %s" % (infohash.hexdigest().upper()), "URL %s" % (mask_keys if masked else str)(announce), "PRV %s" % ("YES (DHT/PEX disabled)" if info.get("private") else "NO (DHT/PEX enabled)"), "TIME %s" % ("N/A" if "creation date" not in metainfo else time.strftime( "%Y-%m-%d %H:%M:%S", time.localtime( metainfo["creation date"]))), ] for label, key in (("BY ", "created by"), ("REM ", "comment")): if key in metainfo: result.append("%s %s" % (label, metainfo.get(key, "N/A"))) result.extend([ "", "FILE LISTING%s" % ("" if 'length' in info else " [%d file(s)]" % len(info['files']), ), ]) if 'length' in info: # Single file result.append("%-69s%9s" % ( fmt.to_unicode(info['name']), fmt.human_size(total_size), )) else: # Directory structure result.append("%s/" % fmt.to_unicode(info['name'])) oldpaths = [None] * 99 for entry in info['files']: # Remove crap that certain PHP software puts in paths entry_path = [fmt.to_unicode(i) for i in entry["path"] if i] for idx, item in enumerate(entry_path[:-1]): if item != oldpaths[idx]: result.append("%s%s/" % (' ' * (4 * (idx + 1)), item)) oldpaths[idx] = item result.append("%-69s%9s" % ( ' ' * (4 * len(entry_path)) + entry_path[-1], fmt.human_size(entry['length']), )) if bad_encodings: result.extend([ "", "WARNING: Bad encoding(s) {} in these fields: {}".format( ', '.join(sorted(bad_encodings)), ', '.join(sorted(bad_fields))), "Use the --raw option to inspect these encoding issues.", ]) return result
class MetafileChanger(ScriptBaseWithConfig): """ Change attributes of a bittorrent metafile. """ # argument description for the usage information ARGS_HELP = "<metafile>..." # Keys of rTorrent session data RT_RESUMT_KEYS = ('libtorrent_resume', 'log_callback', 'err_callback', 'rtorrent') def add_options(self): """ Add program options. """ super(MetafileChanger, self).add_options() self.add_bool_option("-n", "--dry-run", help="don't write changes to disk, just tell what would happen") self.add_bool_option("-V", "--no-skip", help="do not skip broken metafiles that fail the integrity check") self.add_value_option("-o", "--output-directory", "PATH", help="optional output directory for the modified metafile(s)") self.add_bool_option("-p", "--make-private", help="make torrent private (DHT/PEX disabled)") self.add_bool_option("-P", "--make-public", help="make torrent public (DHT/PEX enabled)") self.add_value_option("-s", "--set", "KEY=VAL [-s ...]", action="append", default=[], help="set a specific key to the given value") self.add_value_option("-r", "--regex", "KEYcREGEXcSUBSTc [-r ...]", action="append", default=[], help="replace pattern in a specific key by the given substitution") self.add_bool_option("-C", "--clean", help="remove all non-standard data from metafile outside the info dict") self.add_bool_option("-A", "--clean-all", help="remove all non-standard data from metafile including inside the info dict") self.add_bool_option("-X", "--clean-xseed", help="like --clean-all, but keep libtorrent resume information") self.add_bool_option("-R", "--clean-rtorrent", help="remove all rTorrent session data from metafile") self.add_value_option("-H", "--hashed", "--fast-resume", "DATAPATH", help="add libtorrent fast-resume information (use {} in place of the torrent's name in DATAPATH)") # TODO: chtor --tracker ##self.add_value_option("-T", "--tracker", "DOMAIN", ## help="filter given torrents for a tracker domain") self.add_value_option("-a", "--reannounce", "URL", help="set a new announce URL, but only if the old announce URL matches the new one") self.add_value_option("--reannounce-all", "URL", help="set a new announce URL on ALL given metafiles") self.add_bool_option("--no-ssl", help="force announce URL to 'http'") self.add_bool_option("--no-cross-seed", help="when using --reannounce-all, do not add a non-standard field to the info dict ensuring unique info hashes") self.add_value_option("--comment", "TEXT", help="set a new comment (an empty value deletes it)") self.add_bool_option("--bump-date", help="set the creation date to right now") self.add_bool_option("--no-date", help="remove the 'creation date' field") def mainloop(self): """ The main loop. """ if not self.args: self.parser.error("No metafiles given, nothing to do!") if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)): self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!") # Set filter criteria for metafiles filter_url_prefix = None if self.options.reannounce: # <scheme>://<netloc>/<path>?<query> filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False) filter_url_prefix = urlparse.urlunsplit(( filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103 )) self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix) if self.options.reannounce_all: self.options.reannounce = self.options.reannounce_all else: # When changing the announce URL w/o changing the domain, don't change the info hash! self.options.no_cross_seed = True # Resolve tracker alias, if URL doesn't look like an URL if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme: tracker_alias, idx = self.options.reannounce, "0" if '.' in tracker_alias: tracker_alias, idx = tracker_alias.split('.', 1) try: idx = int(idx, 10) _, tracker_url = config.lookup_announce_alias(tracker_alias) self.options.reannounce = tracker_url[idx] except (KeyError, IndexError, TypeError, ValueError), exc: raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % ( self.options.reannounce, exc)) # go through given files bad = 0 changed = 0 for filename in self.args: try: # Read and remember current content metainfo = bencode.bread(filename) old_metainfo = bencode.bencode(metainfo) except (EnvironmentError, KeyError, bencode.BencodeError), exc: self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc)) bad += 1 else: # Check metafile integrity try: metafile.check_meta(metainfo) except ValueError, exc: self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,)) if not self.options.no_skip: continue # Skip any metafiles that don't meet the pre-conditions if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix): self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,)) continue # Keep resume info safe libtorrent_resume = {} if "libtorrent_resume" in metainfo: try: libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"] except KeyError: pass # nothing to remember libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"]) # Change private flag? if self.options.make_private and not metainfo["info"].get("private", 0): self.LOG.info("Setting private flag...") metainfo["info"]["private"] = 1 if self.options.make_public and metainfo["info"].get("private", 0): self.LOG.info("Clearing private flag...") del metainfo["info"]["private"] # Remove non-standard keys? if self.options.clean or self.options.clean_all or self.options.clean_xseed: metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info) # Restore resume info? if self.options.clean_xseed: if libtorrent_resume: self.LOG.info("Restoring key 'libtorrent_resume'...") metainfo.setdefault("libtorrent_resume", {}) metainfo["libtorrent_resume"].update(libtorrent_resume) else: self.LOG.warn("No resume information found!") # Clean rTorrent data? if self.options.clean_rtorrent: for key in self.RT_RESUMT_KEYS: if key in metainfo: self.LOG.info("Removing key %r..." % (key,)) del metainfo[key] # Change announce URL? if self.options.reannounce: metainfo['announce'] = self.options.reannounce if "announce-list" in metainfo: del metainfo["announce-list"] if not self.options.no_cross_seed: # Enforce unique hash per tracker metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest() if self.options.no_ssl: # We're assuming here the same (default) port is used metainfo['announce'] = (metainfo['announce'] .replace("https://", "http://").replace(":443/", ":80/")) # Change comment or creation date? if self.options.comment is not None: if self.options.comment: metainfo["comment"] = self.options.comment elif "comment" in metainfo: del metainfo["comment"] if self.options.bump_date: metainfo["creation date"] = long(time.time()) if self.options.no_date and "creation date" in metainfo: del metainfo["creation date"] # Add fast-resume data? if self.options.hashed: try: metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"])) except EnvironmentError, exc: self.fatal("Error making fast-resume data (%s)" % (exc,)) raise
for torrent, details in torrentList.items(): ratio = float(details["ratio"]) seeding_time = int(details["seeding_time"]) # Match our criteria? if ratio >= args.ratio or seeding_time >= max_seconds: D2Rlog.info("[ " + str(torrent_index + 1) + " ] " + " Selected <" + details['name'] + "> Ratio: " + str(ratio) + " Seed Time: " + str(seeding_time)) delugeTorrent = os.path.join(delugeBasePath, torrent + ".torrent") # Read Torrent metainfo = bencode.bread(delugeTorrent) payloadPath = os.path.join(details["save_path"], metainfo["info"]["name"]) # Add RTorrent Fast Resume rTorrent = metafile.add_fast_resume(metainfo, payloadPath) # Determine where to save the new torrent destination = watchFolder if args.label: labeled = os.path.join(watchFolder, details["label"]) # Label Folder must exist, otherwise add torrent to base watch folder (no label) if os.path.exists(labeled):
def mainloop(self): """ The main loop. """ if not self.args: self.parser.error("No metafiles given, nothing to do!") if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)): self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!") # Set filter criteria for metafiles filter_url_prefix = None if self.options.reannounce: # <scheme>://<netloc>/<path>?<query> filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False) filter_url_prefix = urlparse.urlunsplit(( filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103 )) self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix) if self.options.reannounce_all: self.options.reannounce = self.options.reannounce_all else: # When changing the announce URL w/o changing the domain, don't change the info hash! self.options.no_cross_seed = True # Resolve tracker alias, if URL doesn't look like an URL if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme: tracker_alias, idx = self.options.reannounce, "0" if '.' in tracker_alias: tracker_alias, idx = tracker_alias.split('.', 1) try: idx = int(idx, 10) _, tracker_url = config.lookup_announce_alias(tracker_alias) self.options.reannounce = tracker_url[idx] except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % ( self.options.reannounce, exc)) # go through given files bad = 0 changed = 0 for filename in self.args: try: # Read and remember current content metainfo = bencode.bread(filename) old_metainfo = bencode.bencode(metainfo) except (EnvironmentError, KeyError, bencode.BencodeError) as exc: self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc)) bad += 1 else: # Check metafile integrity try: metafile.check_meta(metainfo) except ValueError as exc: self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,)) if not self.options.no_skip: continue # Skip any metafiles that don't meet the pre-conditions if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix): self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,)) continue # Keep resume info safe libtorrent_resume = {} if "libtorrent_resume" in metainfo: try: libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"] except KeyError: pass # nothing to remember libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"]) # Change private flag? if self.options.make_private and not metainfo["info"].get("private", 0): self.LOG.info("Setting private flag...") metainfo["info"]["private"] = 1 if self.options.make_public and metainfo["info"].get("private", 0): self.LOG.info("Clearing private flag...") del metainfo["info"]["private"] # Remove non-standard keys? if self.options.clean or self.options.clean_all or self.options.clean_xseed: metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info) # Restore resume info? if self.options.clean_xseed: if libtorrent_resume: self.LOG.info("Restoring key 'libtorrent_resume'...") metainfo.setdefault("libtorrent_resume", {}) metainfo["libtorrent_resume"].update(libtorrent_resume) else: self.LOG.warn("No resume information found!") # Clean rTorrent data? if self.options.clean_rtorrent: for key in self.RT_RESUMT_KEYS: if key in metainfo: self.LOG.info("Removing key %r..." % (key,)) del metainfo[key] # Change announce URL? if self.options.reannounce: metainfo['announce'] = self.options.reannounce if "announce-list" in metainfo: del metainfo["announce-list"] if not self.options.no_cross_seed: # Enforce unique hash per tracker metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest() if self.options.no_ssl: # We're assuming here the same (default) port is used metainfo['announce'] = (metainfo['announce'] .replace("https://", "http://").replace(":443/", ":80/")) # Change comment or creation date? if self.options.comment is not None: if self.options.comment: metainfo["comment"] = self.options.comment elif "comment" in metainfo: del metainfo["comment"] if self.options.bump_date: metainfo["creation date"] = int(time.time()) if self.options.no_date and "creation date" in metainfo: del metainfo["creation date"] # Add fast-resume data? if self.options.hashed: try: metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"])) except EnvironmentError as exc: self.fatal("Error making fast-resume data (%s)" % (exc,)) raise # Set specific keys? metafile.assign_fields(metainfo, self.options.set, self.options.debug) replace_fields(metainfo, self.options.regex) # Write new metafile, if changed new_metainfo = bencode.bencode(metainfo) if new_metainfo != old_metainfo: if self.options.output_directory: filename = os.path.join(self.options.output_directory, os.path.basename(filename)) self.LOG.info("Writing %r..." % filename) if not self.options.dry_run: bencode.bwrite(filename, metainfo) if "libtorrent_resume" in metainfo: # Also write clean version filename = filename.replace(".torrent", "-no-resume.torrent") del metainfo["libtorrent_resume"] self.LOG.info("Writing %r..." % filename) bencode.bwrite(filename, metainfo) else: self.LOG.info("Changing %r..." % filename) if not self.options.dry_run: # Write to temporary file tempname = os.path.join( os.path.dirname(filename), '.' + os.path.basename(filename), ) self.LOG.debug("Writing %r..." % tempname) bencode.bwrite(tempname, metainfo) # Replace existing file if os.name != "posix": # cannot rename to existing target on WIN32 os.remove(filename) try: os.rename(tempname, filename) except EnvironmentError as exc: # TODO: Try to write directly, keeping a backup! raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % ( tempname, filename, exc )) changed += 1 # Print summary if changed: self.LOG.info("%s %d metafile(s)." % ( "Would've changed" if self.options.dry_run else "Changed", changed )) if bad: self.LOG.warn("Skipped %d bad metafile(s)!" % (bad))
def listing(self, masked=True): """ List torrent info & contents. Returns a list of formatted lines. """ # Assemble data metainfo = sanitize(bencode.bread(self.filename)) announce = metainfo['announce'] info = metainfo['info'] info_hash = hashlib.sha1(bencode.bencode(info)) total_size = data_size(metainfo) piece_length = info['piece length'] piece_number, last_piece_length = divmod(total_size, piece_length) # Build result result = [ "NAME %s" % (os.path.basename(self.filename)), "SIZE %s (%i * %s + %s)" % ( fmt.human_size(total_size).strip(), piece_number, fmt.human_size(piece_length).strip(), fmt.human_size(last_piece_length).strip(), ), "META %s (pieces %s %.1f%%)" % ( fmt.human_size(os.path.getsize(self.filename)).strip(), fmt.human_size(len(info["pieces"])).strip(), 100.0 * len(info["pieces"]) / os.path.getsize(self.filename), ), "HASH %s" % (info_hash.hexdigest().upper()), "URL %s" % (mask_keys if masked else str)(announce), "PRV %s" % ("YES (DHT/PEX disabled)" if info.get("private") else "NO (DHT/PEX enabled)"), "TIME %s" % ("N/A" if "creation date" not in metainfo else time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(metainfo["creation date"])) ), ] for label, key in (("BY ", "created by"), ("REM ", "comment")): if key in metainfo: result.append("%s %s" % (label, metainfo.get(key, "N/A"))) result.extend([ "", "FILE LISTING%s" % ("" if info.has_key('length') else " [%d file(s)]" % len(info['files']),), ]) if info.has_key('length'): # Single file result.append("%-69s%9s" % ( info['name'], fmt.human_size(total_size), )) else: # Directory structure result.append("%s/" % info['name']) oldpaths = [None] * 99 for entry in info['files']: # Remove crap that certain PHP software puts in paths entry_path = [i for i in entry["path"] if i] for idx, item in enumerate(entry_path[:-1]): if item != oldpaths[idx]: result.append("%s%s/" % (' ' * (4*(idx+1)), item)) oldpaths[idx] = item result.append("%-69s%9s" % ( ' ' * (4*len(entry_path)) + entry_path[-1], fmt.human_size(entry['length']), )) return result
def CleanTorrentFile(self, logger, torrentPath): logger.info("Cleaning torrent file '%s'." % torrentPath) metainfo = bencode.bread(torrentPath) metafile.clean_meta(metainfo, including_info=False, logger=logger.info) bencode.bwrite(torrentPath, metainfo)