예제 #1
0
파일: mktor.py 프로젝트: 01100001/pyroscope
    def make_magnet_meta(self, magnet_uri):
        """ Create a magnet-uri torrent.
        """
        import cgi, re, hashlib

        if magnet_uri.startswith("magnet:"):
            magnet_uri = magnet_uri[7:]
        meta = {"magnet-uri": "magnet:" + magnet_uri}
        magnet_params = cgi.parse_qs(magnet_uri.lstrip('?'))

        meta_name = magnet_params.get("xt", [hashlib.sha1(magnet_uri).hexdigest()])[0]
        if "dn" in magnet_params:
            meta_name = "%s-%s" % (magnet_params["dn"][0], meta_name)
        meta_name = re.sub(r"[^-_,a-zA-Z0-9]+", '.', meta_name).strip('.').replace("urn.btih.", "")

        if not config.magnet_watch:
            self.fatal("You MUST set the 'magnet_watch' config option!")
        meta_path = os.path.join(config.magnet_watch, "magnet-%s.torrent" % meta_name)
        self.LOG.debug("Writing magnet-uri metafile %r..." % (meta_path,))

        try:
            bencode.bwrite(meta_path, meta)
        except EnvironmentError, exc:
            self.fatal("Error writing magnet-uri metafile %r (%s)" % (meta_path, exc,))
            raise
예제 #2
0
    def make_magnet_meta(self, magnet_uri):
        """ Create a magnet-uri torrent.
        """
        import cgi
        import hashlib

        if magnet_uri.startswith("magnet:"):
            magnet_uri = magnet_uri[7:]
        meta = {"magnet-uri": "magnet:" + magnet_uri}
        magnet_params = cgi.parse_qs(magnet_uri.lstrip('?'))

        meta_name = magnet_params.get(
            "xt", [hashlib.sha1(magnet_uri).hexdigest()])[0]
        if "dn" in magnet_params:
            meta_name = "%s-%s" % (magnet_params["dn"][0], meta_name)
        meta_name = re.sub(r"[^-_,a-zA-Z0-9]+", '.',
                           meta_name).strip('.').replace("urn.btih.", "")

        if not config.magnet_watch:
            self.fatal("You MUST set the 'magnet_watch' config option!")
        meta_path = os.path.join(config.magnet_watch,
                                 "magnet-%s.torrent" % meta_name)
        self.LOG.debug("Writing magnet-uri metafile %r..." % (meta_path, ))

        try:
            bencode.bwrite(meta_path, meta)
        except EnvironmentError as exc:
            self.fatal("Error writing magnet-uri metafile %r (%s)" % (
                meta_path,
                exc,
            ))
            raise
예제 #3
0
    def AddTorrentSkipHashCheck(self, logger, torrentPath, downloadPath):
        logger.info(
            "Adding torrent '%s' without hash checking to rTorrent to '%s'." %
            (torrentPath, downloadPath))

        sourceDirectory, sourceFilename = os.path.split(torrentPath)
        sourceFilename = "fast resume " + sourceFilename
        destinationTorrentPath = os.path.join(sourceDirectory, sourceFilename)

        if os.path.exists(destinationTorrentPath):
            raise PtpUploaderException(
                "Can't create fast resume torrent because path '%s' already exists."
                % destinationTorrentPath)

        shutil.copyfile(torrentPath, destinationTorrentPath)

        metainfo = bencode.bread(destinationTorrentPath)
        metafile.add_fast_resume(metainfo, downloadPath.encode('utf-8'))
        bencode.bwrite(destinationTorrentPath, metainfo)

        infoHash = ""
        try:
            infoHash = self.AddTorrent(logger, destinationTorrentPath,
                                       downloadPath)
        finally:
            # We always remove the fast resume torrent regardless of result of adding the torrent to rTorrent.
            # This ensures that even if adding to rTorent fails, then resuming the job will work.
            os.remove(destinationTorrentPath)

        return infoHash
예제 #4
0
	def Make(logger, path, torrentPath):
		logger.info( "Making torrent from '%s' to '%s'." % ( path, torrentPath ) )
		
		if os.path.exists( torrentPath ):
			raise PtpUploaderException( "Can't create torrent because path '%s' already exists." % torrentPath )
		
		sourceSize = GetPathSize( path )

		# Optimal piece size should be automatically calculated by mktorrent...
		pieceSize = "-l 19" # 512 KB
		if sourceSize > ( 16 * 1024 * 1024 * 1024 ):
			pieceSize = "-l 24" # 16 MB
		elif sourceSize > ( 8 * 1024 * 1024 * 1024 ):
			pieceSize = "-l 23" # 8 MB
		elif sourceSize > ( 4 * 1024 * 1024 * 1024 ):
			pieceSize = "-l 22" # 4 MB
		elif sourceSize > ( 2 * 1024 * 1024 * 1024 ):
			pieceSize = "-l 21" # 2 MB
		elif sourceSize > ( 1 * 1024 * 1024 * 1024 ):
			pieceSize = "-l 20" # 1 MB

		args = [ Settings.MktorrentPath, '-a', Settings.PtpAnnounceUrl, '-p', pieceSize, '-o', torrentPath, path ]
		errorCode = subprocess.call( args )
		if errorCode != 0:
			args[ 2 ] = "OMITTED" # Do not log the announce URL, so it less likely gets posted in the forums.
			raise PtpUploaderException( "Process execution '%s' returned with error code '%s'." % ( args, errorCode ) )

		# Torrents with exactly the same content and piece size get the same info hash regardless of the announcement URL.
		# To make sure that our new torrent will have unique info hash we add a unused key to the info section of the metadata.
		# Another way would be to use a different piece size, but this solution is much more elegant.
		# See: http://wiki.theory.org/BitTorrentSpecification#Metainfo_File_Structure 
		metainfo = bencode.bread( torrentPath )
		metafile.assign_fields( metainfo, [ 'info.source=PTP' ] )
		bencode.bwrite( torrentPath, metainfo )
예제 #5
0
파일: mktor.py 프로젝트: 01100001/pyroscope
    def mainloop(self):
        """ The main loop.
        """
        if len(self.args) == 1 and "=urn:btih:" in self.args[0]:
            # Handle magnet link
            self.make_magnet_meta(self.args[0])
            return

        if not self.args:
            self.parser.print_help()
            self.parser.exit()
        elif len(self.args) < 2:
            self.parser.error("Expected a path and at least one announce URL, got: %s" % (' '.join(self.args),))

        # Create and configure metafile factory
        datapath = self.args[0].rstrip(os.sep)
        metapath = datapath
        if self.options.output_filename:
            metapath = self.options.output_filename
            if os.path.isdir(metapath):
                metapath = os.path.join(metapath, os.path.basename(datapath))
        if not metapath.endswith(".torrent"):
            metapath += ".torrent"
        torrent = metafile.Metafile(metapath)
        torrent.ignore.extend(self.options.exclude)

        def callback(meta):
            "Callback to set label and resume data."
            if self.options.cross_seed:
                meta["info"]["x_cross_seed_label"] = self.options.cross_seed
            if self.options.no_cross_seed:
                del meta["info"]["x_cross_seed"]

            # Set specific keys?
            metafile.assign_fields(meta, self.options.set)

        # Create and write the metafile(s)
        # TODO: make it work better with multiple trackers (hash only once), also create fast-resume file for each tracker
        meta = torrent.create(datapath, self.args[1:],
            progress=None if self.options.quiet else metafile.console_progress(),
            root_name=self.options.root_name, private=self.options.private, no_date=self.options.no_date,
            comment=self.options.comment, created_by="PyroScope %s" % self.version, callback=callback
        )

        # Create second metafile with fast-resume?
        if self.options.hashed:
            try:
                metafile.add_fast_resume(meta, datapath)
            except EnvironmentError, exc:
                self.fatal("Error making fast-resume data (%s)" % (exc,))
                raise

            hashed_path = re.sub(r"\.torrent$", "", metapath) + "-resume.torrent"
            self.LOG.info("Writing fast-resume metafile %r..." % (hashed_path,))
            try:
                bencode.bwrite(hashed_path, meta)
            except EnvironmentError, exc:
                self.fatal("Error writing fast-resume metafile %r (%s)" % (hashed_path, exc,))
                raise
예제 #6
0
    def Make(logger, path, torrentPath):
        logger.info("Making torrent from '%s' to '%s'." % (path, torrentPath))

        if os.path.exists(torrentPath):
            raise PtpUploaderException(
                "Can't create torrent because path '%s' already exists." %
                torrentPath)

        sourceSize = GetPathSize(path)

        # Optimal piece size should be automatically calculated by mktorrent...
        pieceSize = "-l 19"  # 512 KB
        if sourceSize > (16 * 1024 * 1024 * 1024):
            pieceSize = "-l 24"  # 16 MB
        elif sourceSize > (8 * 1024 * 1024 * 1024):
            pieceSize = "-l 23"  # 8 MB
        elif sourceSize > (4 * 1024 * 1024 * 1024):
            pieceSize = "-l 22"  # 4 MB
        elif sourceSize > (2 * 1024 * 1024 * 1024):
            pieceSize = "-l 21"  # 2 MB
        elif sourceSize > (1 * 1024 * 1024 * 1024):
            pieceSize = "-l 20"  # 1 MB

        args = [
            Settings.MktorrentPath, '-a', Settings.PtpAnnounceUrl, '-p',
            pieceSize, '-o', torrentPath, path
        ]
        errorCode = subprocess.call(args)
        if errorCode != 0:
            args[
                2] = "OMITTED"  # Do not log the announce URL, so it less likely gets posted in the forums.
            raise PtpUploaderException(
                "Process execution '%s' returned with error code '%s'." %
                (args, errorCode))

        # Torrents with exactly the same content and piece size get the same info hash regardless of the announcement URL.
        # To make sure that our new torrent will have unique info hash we add a unused key to the info section of the metadata.
        # Another way would be to use a different piece size, but this solution is much more elegant.
        # See: http://wiki.theory.org/BitTorrentSpecification#Metainfo_File_Structure
        metainfo = bencode.bread(torrentPath)
        metafile.assign_fields(metainfo, ['info.source=PTP'])
        bencode.bwrite(torrentPath, metainfo)
예제 #7
0
    def create(self,
               datapath,
               tracker_urls,
               comment=None,
               root_name=None,
               created_by=None,
               private=False,
               no_date=False,
               progress=None,
               callback=None,
               chunk_min=0,
               chunk_max=0):
        """ Create a metafile with the path given on object creation.
            Returns the last metafile dict that was written (as an object, not bencoded).
        """
        if datapath:
            self.datapath = datapath

        try:
            tracker_urls = ['' + tracker_urls]
        except TypeError:
            tracker_urls = list(tracker_urls)
        multi_mode = len(tracker_urls) > 1

        # TODO add optimization so the hashing happens only once for multiple URLs!
        for tracker_url in tracker_urls:
            # Lookup announce URLs from config file
            try:
                if urlparse.urlparse(tracker_url).scheme:
                    tracker_alias = urlparse.urlparse(
                        tracker_url).netloc.split(':')[0].split('.')
                    tracker_alias = tracker_alias[-2 if len(tracker_alias
                                                            ) > 1 else 0]
                else:
                    tracker_alias, tracker_url = config.lookup_announce_alias(
                        tracker_url)
                    tracker_url = tracker_url[0]
            except (KeyError, IndexError):
                raise error.UserError("Bad tracker URL %r, or unknown alias!" %
                                      (tracker_url, ))

            # Determine metafile name
            output_name = self.filename
            if multi_mode:
                # Add 2nd level of announce URL domain to metafile name
                output_name = list(os.path.splitext(output_name))
                try:
                    output_name[1:1] = '-' + tracker_alias
                except (IndexError, ):
                    self.LOG.error("Malformed announce URL %r, skipping!" %
                                   (tracker_url, ))
                    continue
                output_name = ''.join(output_name)

            # Hash the data
            self.LOG.info("Creating %r for %s %r..." % (
                output_name,
                "filenames read from" if self._fifo else "data in",
                self.datapath,
            ))
            meta, _ = self._make_meta(tracker_url, root_name, private,
                                      progress, chunk_min, chunk_max)

            # Add optional fields
            if comment:
                meta["comment"] = comment
            if created_by:
                meta["created by"] = created_by
            if not no_date:
                meta["creation date"] = int(time.time())
            if callback:
                callback(meta)

            # Write metafile to disk
            self.LOG.debug("Writing %r..." % (output_name, ))
            bencode.bwrite(output_name, meta)

        return meta
예제 #8
0
                        self.fatal("Error making fast-resume data (%s)" % (exc,))
                        raise

                # Set specific keys?
                metafile.assign_fields(metainfo, self.options.set)
                replace_fields(metainfo, self.options.regex)

                # Write new metafile, if changed
                new_metainfo = bencode.bencode(metainfo)
                if new_metainfo != old_metainfo:
                    if self.options.output_directory:
                        filename = os.path.join(self.options.output_directory, os.path.basename(filename))
                        self.LOG.info("Writing %r..." % filename)

                        if not self.options.dry_run:
                            bencode.bwrite(filename, metainfo)
                            if "libtorrent_resume" in metainfo:
                                # Also write clean version
                                filename = filename.replace(".torrent", "-no-resume.torrent")
                                del metainfo["libtorrent_resume"]
                                self.LOG.info("Writing %r..." % filename)
                                bencode.bwrite(filename, metainfo)
                    else:
                        self.LOG.info("Changing %r..." % filename)

                        if not self.options.dry_run:
                            # Write to temporary file
                            tempname = os.path.join(
                                os.path.dirname(filename),
                                '.' + os.path.basename(filename),
                            )
            #           Determine where to save the new torrent
            destination = watchFolder

            if args.label:
                labeled = os.path.join(watchFolder, details["label"])

                #               Label Folder must exist, otherwise add torrent to base watch folder (no label)
                if os.path.exists(labeled):
                    destination = labeled

            torrentFile = os.path.join(destination, torrent + ".torrent")

            #           Write out rtorrent torrent to watch folder destination
            try:
                bencode.bwrite(torrentFile, rTorrent)

            except Exception as writeFailed:
                D2Rlog.warn("Write of New Torrent Failed: %s" + writeFailed[1])
                pass
            else:
                D2Rlog.info("Wrote Fast Resume Torrent to: " + torrentFile)

#           Remove torrent from Deluge
            try:
                client.core.remove_torrent(torrent, False)
            except Exception as removeFailed:
                D2Rlog.warn("Remove of Deluge Torrent Failed: %s" +
                            removeFailed[1])
            else:
                D2Rlog.info("Removed torrent from Deluge.")
예제 #10
0
    def mainloop(self):
        """ The main loop.
        """
        if not self.args:
            self.parser.error("No metafiles given, nothing to do!")

        if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)):
            self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!")

        # Set filter criteria for metafiles
        filter_url_prefix = None
        if self.options.reannounce:
            # <scheme>://<netloc>/<path>?<query>
            filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False)
            filter_url_prefix = urlparse.urlunsplit((
                filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103
            ))
            self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix)

        if self.options.reannounce_all:
            self.options.reannounce = self.options.reannounce_all
        else:
            # When changing the announce URL w/o changing the domain, don't change the info hash!
            self.options.no_cross_seed = True

        # Resolve tracker alias, if URL doesn't look like an URL
        if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme:
            tracker_alias, idx = self.options.reannounce, "0"
            if '.' in tracker_alias:
                tracker_alias, idx = tracker_alias.split('.', 1)
            try:
                idx = int(idx, 10)
                _, tracker_url = config.lookup_announce_alias(tracker_alias)
                self.options.reannounce = tracker_url[idx]
            except (KeyError, IndexError, TypeError, ValueError) as exc:
                raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % (
                    self.options.reannounce, exc))

        # go through given files
        bad = 0
        changed = 0
        for filename in self.args:
            try:
                # Read and remember current content
                metainfo = bencode.bread(filename)
                old_metainfo = bencode.bencode(metainfo)
            except (EnvironmentError, KeyError, bencode.BencodeError) as exc:
                self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc))
                bad += 1
            else:
                # Check metafile integrity
                try:
                    metafile.check_meta(metainfo)
                except ValueError as exc:
                    self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,))
                    if not self.options.no_skip:
                        continue

                # Skip any metafiles that don't meet the pre-conditions
                if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix):
                    self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,))
                    continue

                # Keep resume info safe
                libtorrent_resume = {}
                if "libtorrent_resume" in metainfo:
                    try:
                        libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"]
                    except KeyError:
                        pass # nothing to remember

                    libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"])

                # Change private flag?
                if self.options.make_private and not metainfo["info"].get("private", 0):
                    self.LOG.info("Setting private flag...")
                    metainfo["info"]["private"] = 1
                if self.options.make_public and metainfo["info"].get("private", 0):
                    self.LOG.info("Clearing private flag...")
                    del metainfo["info"]["private"]

                # Remove non-standard keys?
                if self.options.clean or self.options.clean_all or self.options.clean_xseed:
                    metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info)

                # Restore resume info?
                if self.options.clean_xseed:
                    if libtorrent_resume:
                        self.LOG.info("Restoring key 'libtorrent_resume'...")
                        metainfo.setdefault("libtorrent_resume", {})
                        metainfo["libtorrent_resume"].update(libtorrent_resume)
                    else:
                        self.LOG.warn("No resume information found!")

                # Clean rTorrent data?
                if self.options.clean_rtorrent:
                    for key in self.RT_RESUMT_KEYS:
                        if key in metainfo:
                            self.LOG.info("Removing key %r..." % (key,))
                            del metainfo[key]

                # Change announce URL?
                if self.options.reannounce:
                    metainfo['announce'] = self.options.reannounce
                    if "announce-list" in metainfo:
                        del metainfo["announce-list"]

                    if not self.options.no_cross_seed:
                        # Enforce unique hash per tracker
                        metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest()
                if self.options.no_ssl:
                    # We're assuming here the same (default) port is used
                    metainfo['announce'] = (metainfo['announce']
                        .replace("https://", "http://").replace(":443/", ":80/"))

                # Change comment or creation date?
                if self.options.comment is not None:
                    if self.options.comment:
                        metainfo["comment"] = self.options.comment
                    elif "comment" in metainfo:
                        del metainfo["comment"]
                if self.options.bump_date:
                    metainfo["creation date"] = int(time.time())
                if self.options.no_date and "creation date" in metainfo:
                    del metainfo["creation date"]

                # Add fast-resume data?
                if self.options.hashed:
                    try:
                        metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"]))
                    except EnvironmentError as exc:
                        self.fatal("Error making fast-resume data (%s)" % (exc,))
                        raise

                # Set specific keys?
                metafile.assign_fields(metainfo, self.options.set, self.options.debug)
                replace_fields(metainfo, self.options.regex)

                # Write new metafile, if changed
                new_metainfo = bencode.bencode(metainfo)
                if new_metainfo != old_metainfo:
                    if self.options.output_directory:
                        filename = os.path.join(self.options.output_directory, os.path.basename(filename))
                        self.LOG.info("Writing %r..." % filename)

                        if not self.options.dry_run:
                            bencode.bwrite(filename, metainfo)
                            if "libtorrent_resume" in metainfo:
                                # Also write clean version
                                filename = filename.replace(".torrent", "-no-resume.torrent")
                                del metainfo["libtorrent_resume"]
                                self.LOG.info("Writing %r..." % filename)
                                bencode.bwrite(filename, metainfo)
                    else:
                        self.LOG.info("Changing %r..." % filename)

                        if not self.options.dry_run:
                            # Write to temporary file
                            tempname = os.path.join(
                                os.path.dirname(filename),
                                '.' + os.path.basename(filename),
                            )
                            self.LOG.debug("Writing %r..." % tempname)
                            bencode.bwrite(tempname, metainfo)

                            # Replace existing file
                            if os.name != "posix":
                                # cannot rename to existing target on WIN32
                                os.remove(filename)

                            try:
                                os.rename(tempname, filename)
                            except EnvironmentError as exc:
                                # TODO: Try to write directly, keeping a backup!
                                raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % (
                                    tempname, filename, exc
                                ))

                    changed += 1

        # Print summary
        if changed:
            self.LOG.info("%s %d metafile(s)." % (
                "Would've changed" if self.options.dry_run else "Changed", changed
            ))
        if bad:
            self.LOG.warn("Skipped %d bad metafile(s)!" % (bad))
예제 #11
0
    def mainloop(self):
        """ The main loop.
        """
        if len(self.args) == 1 and "=urn:btih:" in self.args[0]:
            # Handle magnet link
            self.make_magnet_meta(self.args[0])
            return

        if not self.args:
            self.parser.print_help()
            self.parser.exit()
        elif len(self.args) < 2:
            self.parser.error(
                "Expected a path and at least one announce URL, got: %s" %
                (' '.join(self.args), ))

        # Create and configure metafile factory
        datapath = self.args[0].rstrip(os.sep)
        metapath = datapath
        if self.options.output_filename:
            metapath = self.options.output_filename
            if os.path.isdir(metapath):
                metapath = os.path.join(metapath, os.path.basename(datapath))
        if not metapath.endswith(".torrent"):
            metapath += ".torrent"
        torrent = metafile.Metafile(metapath)
        torrent.ignore.extend(self.options.exclude)

        def callback(meta):
            "Callback to set label and resume data."
            if self.options.cross_seed:
                if self.options.cross_seed == "@entropy":
                    meta["info"]["entropy"] = format(
                        random.getrandbits(self.ENTROPY_BITS),
                        'x').zfill(self.ENTROPY_BITS // 4)
                else:
                    meta["info"][
                        "x_cross_seed_label"] = self.options.cross_seed
            if self.options.no_cross_seed:
                del meta["info"]["x_cross_seed"]

            # Set specific keys?
            metafile.assign_fields(meta, self.options.set, self.options.debug)

        # Create and write the metafile(s)
        # TODO: make it work better with multiple trackers (hash only once), also create fast-resume file for each tracker
        meta = torrent.create(datapath,
                              self.args[1:],
                              progress=None if self.options.quiet else
                              metafile.console_progress(),
                              root_name=self.options.root_name,
                              private=self.options.private,
                              no_date=self.options.no_date,
                              comment=self.options.comment,
                              created_by="PyroScope %s" % self.version,
                              callback=callback)
        tied_file = metapath

        # Create second metafile with fast-resume?
        if self.options.hashed:
            try:
                metafile.add_fast_resume(meta, datapath)
            except EnvironmentError as exc:
                self.fatal("Error making fast-resume data (%s)" % (exc, ))
                raise

            hashed_path = re.sub(r"\.torrent$", "",
                                 metapath) + "-resume.torrent"
            self.LOG.info("Writing fast-resume metafile %r..." %
                          (hashed_path, ))
            try:
                bencode.bwrite(hashed_path, meta)
                tied_file = hashed_path
            except EnvironmentError as exc:
                self.fatal("Error writing fast-resume metafile %r (%s)" % (
                    hashed_path,
                    exc,
                ))
                raise

        # Load into client on demand
        if self.options.load or self.options.start:
            proxy = config.engine.open()
            info_hash = metafile.info_hash(meta)
            try:
                item_name = proxy.d.name(info_hash, fail_silently=True)
            except xmlrpc.HashNotFound:
                load_item = proxy.load.start_verbose if self.options.start else proxy.load.verbose
                load_item(xmlrpc.NOHASH, os.path.abspath(tied_file))
                time.sleep(.05)  # let things settle
                try:
                    item_name = proxy.d.name(info_hash, fail_silently=True)
                    self.LOG.info(
                        "OK: Item #%s %s client.", info_hash,
                        'started in' if self.options.start else 'loaded into')
                except xmlrpc.HashNotFound as exc:
                    self.fatal("Error while loading item #%s into client: %s" %
                               (
                                   info_hash,
                                   exc,
                               ))
            else:
                self.LOG.warning(
                    "Item #%s already exists in client, --load/--start is ignored!",
                    info_hash)
예제 #12
0
파일: mktor.py 프로젝트: zapras/pyrocore
    def mainloop(self):
        """ The main loop.
        """
        if len(self.args) == 1 and "=urn:btih:" in self.args[0]:
            # Handle magnet link
            self.make_magnet_meta(self.args[0])
            return

        if not self.args:
            self.parser.print_help()
            self.parser.exit()
        elif len(self.args) < 2:
            self.parser.error("Expected a path and at least one announce URL, got: %s" % (' '.join(self.args),))

        # Create and configure metafile factory
        datapath = self.args[0].rstrip(os.sep)
        metapath = datapath
        if self.options.output_filename:
            metapath = self.options.output_filename
            if os.path.isdir(metapath):
                metapath = os.path.join(metapath, os.path.basename(datapath))
        if not metapath.endswith(".torrent"):
            metapath += ".torrent"
        torrent = metafile.Metafile(metapath)
        torrent.ignore.extend(self.options.exclude)

        def callback(meta):
            "Callback to set label and resume data."
            if self.options.cross_seed:
                if self.options.cross_seed == "@entropy":
                    meta["info"]["entropy"] = format(random.getrandbits(self.ENTROPY_BITS),
                                                     'x').zfill(self.ENTROPY_BITS//4)
                else:
                    meta["info"]["x_cross_seed_label"] = self.options.cross_seed
            if self.options.no_cross_seed:
                del meta["info"]["x_cross_seed"]

            # Set specific keys?
            metafile.assign_fields(meta, self.options.set)

        # Create and write the metafile(s)
        # TODO: make it work better with multiple trackers (hash only once), also create fast-resume file for each tracker
        meta = torrent.create(datapath, self.args[1:],
            progress=None if self.options.quiet else metafile.console_progress(),
            root_name=self.options.root_name, private=self.options.private, no_date=self.options.no_date,
            comment=self.options.comment, created_by="PyroScope %s" % self.version, callback=callback
        )

        # Create second metafile with fast-resume?
        if self.options.hashed:
            try:
                metafile.add_fast_resume(meta, datapath)
            except EnvironmentError as exc:
                self.fatal("Error making fast-resume data (%s)" % (exc,))
                raise

            hashed_path = re.sub(r"\.torrent$", "", metapath) + "-resume.torrent"
            self.LOG.info("Writing fast-resume metafile %r..." % (hashed_path,))
            try:
                bencode.bwrite(hashed_path, meta)
            except EnvironmentError as exc:
                self.fatal("Error writing fast-resume metafile %r (%s)" % (hashed_path, exc,))
                raise
예제 #13
0
    def create(self, datapath, tracker_urls, comment=None, root_name=None,
                     created_by=None, private=False, no_date=False, progress=None,
                     callback=None):
        """ Create a metafile with the path given on object creation.
            Returns the last metafile dict that was written (as an object, not bencoded).
        """
        if datapath:
            self.datapath = datapath

        try:
            tracker_urls = ['' + tracker_urls]
        except TypeError:
            tracker_urls = list(tracker_urls)
        multi_mode = len(tracker_urls) > 1

        # TODO add optimization so the hashing happens only once for multiple URLs!
        for tracker_url in tracker_urls:
            # Lookup announce URLs from config file
            try:
                if urlparse.urlparse(tracker_url).scheme:
                    tracker_alias = urlparse.urlparse(tracker_url).netloc.split(':')[0].split('.')
                    tracker_alias = tracker_alias[-2 if len(tracker_alias) > 1 else 0]
                else:
                    tracker_alias, tracker_url = config.lookup_announce_alias(tracker_url)
                    tracker_url = tracker_url[0]
            except (KeyError, IndexError):
                raise error.UserError("Bad tracker URL %r, or unknown alias!" % (tracker_url,))

            # Determine metafile name
            output_name = self.filename
            if multi_mode:
                # Add 2nd level of announce URL domain to metafile name
                output_name = list(os.path.splitext(output_name))
                try:
                    output_name[1:1] = '-' + tracker_alias
                except (IndexError,):
                    self.LOG.error("Malformed announce URL %r, skipping!" % (tracker_url,))
                    continue
                output_name = ''.join(output_name)

            # Hash the data
            self.LOG.info("Creating %r for %s %r..." % (
                output_name, "filenames read from" if self._fifo else "data in", self.datapath,
            ))
            meta = self._make_meta(tracker_url, root_name, private, progress)

            # Add optional fields
            if comment:
                meta["comment"] = comment
            if created_by:
                meta["created by"] = created_by
            if not no_date:
                meta["creation date"] = long(time.time())
            if callback:
                callback(meta)

            # Write metafile to disk
            self.LOG.debug("Writing %r..." % (output_name,))
            bencode.bwrite(output_name, meta)

        return meta
예제 #14
0
    def CleanTorrentFile(self, logger, torrentPath):
        logger.info("Cleaning torrent file '%s'." % torrentPath)

        metainfo = bencode.bread(torrentPath)
        metafile.clean_meta(metainfo, including_info=False, logger=logger.info)
        bencode.bwrite(torrentPath, metainfo)