示例#1
0
def load_torrent(proxy, ID, path):
    """Send a torrent to rtorrent and kick off the hash recheck"""
    logger = logging.getLogger(__name__)
    torrent = ptpapi.Torrent(ID=ID)
    torrent_data = torrent.download()
    data = bencode.bdecode(torrent_data)
    thash = metafile.info_hash(data)
    try:
        logger.debug(u"Testing for hash {0}".format(
            proxy.d.hash(thash, fail_silently=True)))
        logger.error(
            u"Hash {0} already exists in rtorrent as {1}, cannot load.".format(
                thash, proxy.d.name(thash)))
        return
    except (xmlrpc_client.Fault, xmlrpc.HashNotFound):
        pass
    proxy.load.raw('', xmlrpc_client.Binary(torrent_data))
    # Wait until the torrent is loaded and available
    while True:
        sleep(1)
        try:
            proxy.d.hash(thash, fail_silently=True)
            break
        except (xmlrpc_client.Fault, xmlrpc.HashNotFound):
            pass
    logger.info(u"Torrent loaded at {0}".format(path))
    proxy.d.custom.set(thash, 'tm_completed', str(int(time())))
    proxy.d.directory.set(thash, path)
    proxy.d.check_hash(thash)
示例#2
0
    def AddTorrent(self, logger, torrentPath, downloadPath):
        logger.info(
            "Initiating the download of torrent '%s' with rTorrent to '%s'." %
            (torrentPath, downloadPath))

        file = open(torrentPath, "rb")
        contents = xmlrpclib.Binary(file.read())
        file.close()

        torrentData = bencode.bread(torrentPath)
        metafile.check_meta(torrentData)
        infoHash = metafile.info_hash(torrentData)

        self.proxy.load_raw(contents)

        # If load_raw is slow then set_directory_base throws an exception (Fault: <Fault -501: 'Could not find info-hash.'>),
        # so we retry adding the torrent some delay.
        maximumTries = 15
        while True:
            try:
                self.proxy.d.set_directory_base(infoHash, downloadPath)
                self.proxy.d.start(infoHash)
                break
            except Exception:
                if maximumTries > 1:
                    maximumTries -= 1
                    time.sleep(2)  # Two seconds.
                else:
                    raise

        return infoHash
示例#3
0
	def AddTorrent(self, logger, torrentPath, downloadPath):
		logger.info( "Initiating the download of torrent '%s' with rTorrent to '%s'." % ( torrentPath, downloadPath ) );
		
		file = open( torrentPath, "rb" );
		contents = xmlrpclib.Binary( file.read() );
		file.close();

		torrentData = bencode.bread( torrentPath ); 
		metafile.check_meta( torrentData );
		infoHash = metafile.info_hash( torrentData );

		self.proxy.load_raw( contents );

		# If load_raw is slow then set_directory_base throws an exception (Fault: <Fault -501: 'Could not find info-hash.'>),
		# so we retry adding the torrent some delay.
		maximumTries = 15
		while True:
			try:
				self.proxy.d.set_directory_base( infoHash, downloadPath );
				self.proxy.d.start( infoHash );
				break
			except Exception:
				if maximumTries > 1:
					maximumTries -= 1
					time.sleep( 2 ) # Two seconds.
				else:
					raise

		return infoHash;
示例#4
0
文件: watch.py 项目: zapras/pyrocore
    def parse(self):
        """ Parse metafile and check pre-conditions.
        """
        try:
            if not os.path.getsize(self.ns.pathname):
                # Ignore 0-byte dummy files (Firefox creates these while downloading)
                self.job.LOG.warn("Ignoring 0-byte metafile '%s'" %
                                  (self.ns.pathname, ))
                return
            self.metadata = metafile.checked_open(self.ns.pathname)
        except EnvironmentError as exc:
            self.job.LOG.error("Can't read metafile '%s' (%s)" % (
                self.ns.pathname,
                str(exc).replace(": '%s'" % self.ns.pathname, ""),
            ))
            return
        except ValueError as exc:
            self.job.LOG.error("Invalid metafile '%s': %s" %
                               (self.ns.pathname, exc))
            return

        self.ns.info_hash = metafile.info_hash(self.metadata)
        self.ns.info_name = self.metadata["info"]["name"]
        self.job.LOG.info("Loaded '%s' from metafile '%s'" %
                          (self.ns.info_name, self.ns.pathname))

        # Check whether item is already loaded
        try:
            name = self.job.proxy.d.name(self.ns.info_hash, fail_silently=True)
        except xmlrpc.HashNotFound:
            pass
        except xmlrpc.ERRORS as exc:
            if exc.faultString != "Could not find info-hash.":
                self.job.LOG.error("While checking for #%s: %s" %
                                   (self.ns.info_hash, exc))
                return
        else:
            self.job.LOG.warn("Item #%s '%s' already added to client" %
                              (self.ns.info_hash, name))
            return

        return True
示例#5
0
    def mainloop(self):
        """ The main loop.
        """
        if not self.args:
            self.parser.print_help()
            self.parser.exit()

        for idx, filename in enumerate(self.args):
            torrent = metafile.Metafile(filename)
            if idx and not self.options.output:
                print('')
                print("~" * 79)

            try:
                # Read and check metafile
                try:
                    data = metafile.checked_open(
                        filename,
                        log=self.LOG if self.options.skip_validation else None,
                        quiet=(self.options.quiet
                               and (self.options.output or self.options.raw)))
                except EnvironmentError as exc:
                    self.fatal("Can't read '%s' (%s)" % (
                        filename,
                        str(exc).replace(": '%s'" % filename, ""),
                    ))
                    raise

                listing = None

                if self.options.raw or self.options.json:
                    if not self.options.reveal and "info" in data:
                        # Shorten useless binary piece hashes
                        data["info"]["pieces"] = "<%d piece hashes>" % (
                            len(data["info"]["pieces"]) /
                            len(hashlib.sha1().digest())  # bogus pylint: disable=E1101
                        )

                    if self.options.json:
                        listing = json.dumps(data,
                                             default=repr,
                                             indent=4,
                                             sort_keys=True)
                    else:
                        pprinter = (pprint.PrettyPrinter if self.options.reveal
                                    else metafile.MaskingPrettyPrinter)()
                        listing = pprinter.pformat(data)
                elif self.options.output:

                    def splitter(fields):
                        "Yield single names for a list of comma-separated strings."
                        for flist in fields:
                            for field in flist.split(','):
                                yield field.strip()

                    data["__file__"] = filename
                    if 'info' in data:
                        data["__hash__"] = metafile.info_hash(data)
                        data["__size__"] = metafile.data_size(data)
                    values = []
                    for field in splitter(self.options.output):
                        try:
                            val = data
                            for key in field.split('.'):
                                val = val[key]
                        except KeyError as exc:
                            self.LOG.error("%s: Field %r not found (%s)" %
                                           (filename, field, exc))
                            break
                        else:
                            values.append(str(val))
                    else:
                        listing = '\t'.join(values)
                else:
                    listing = '\n'.join(
                        torrent.listing(masked=not self.options.reveal))
            except (ValueError, KeyError, bencode.BencodeError) as exc:
                if self.options.debug:
                    raise
                self.LOG.warning("Bad metafile %r (%s: %s)" %
                                 (filename, type(exc).__name__, exc))
            else:
                if listing is not None:
                    print(fmt.to_utf8(listing))
示例#6
0
    def mainloop(self):
        """ The main loop.
        """
        if len(self.args) == 1 and "=urn:btih:" in self.args[0]:
            # Handle magnet link
            self.make_magnet_meta(self.args[0])
            return

        if not self.args:
            self.parser.print_help()
            self.parser.exit()
        elif len(self.args) < 2:
            self.parser.error(
                "Expected a path and at least one announce URL, got: %s" %
                (' '.join(self.args), ))

        # Create and configure metafile factory
        datapath = self.args[0].rstrip(os.sep)
        metapath = datapath
        if self.options.output_filename:
            metapath = self.options.output_filename
            if os.path.isdir(metapath):
                metapath = os.path.join(metapath, os.path.basename(datapath))
        if not metapath.endswith(".torrent"):
            metapath += ".torrent"
        torrent = metafile.Metafile(metapath)
        torrent.ignore.extend(self.options.exclude)

        def callback(meta):
            "Callback to set label and resume data."
            if self.options.cross_seed:
                if self.options.cross_seed == "@entropy":
                    meta["info"]["entropy"] = format(
                        random.getrandbits(self.ENTROPY_BITS),
                        'x').zfill(self.ENTROPY_BITS // 4)
                else:
                    meta["info"][
                        "x_cross_seed_label"] = self.options.cross_seed
            if self.options.no_cross_seed:
                del meta["info"]["x_cross_seed"]

            # Set specific keys?
            metafile.assign_fields(meta, self.options.set, self.options.debug)

        # Create and write the metafile(s)
        # TODO: make it work better with multiple trackers (hash only once), also create fast-resume file for each tracker
        meta = torrent.create(datapath,
                              self.args[1:],
                              progress=None if self.options.quiet else
                              metafile.console_progress(),
                              root_name=self.options.root_name,
                              private=self.options.private,
                              no_date=self.options.no_date,
                              comment=self.options.comment,
                              created_by="PyroScope %s" % self.version,
                              callback=callback)
        tied_file = metapath

        # Create second metafile with fast-resume?
        if self.options.hashed:
            try:
                metafile.add_fast_resume(meta, datapath)
            except EnvironmentError as exc:
                self.fatal("Error making fast-resume data (%s)" % (exc, ))
                raise

            hashed_path = re.sub(r"\.torrent$", "",
                                 metapath) + "-resume.torrent"
            self.LOG.info("Writing fast-resume metafile %r..." %
                          (hashed_path, ))
            try:
                bencode.bwrite(hashed_path, meta)
                tied_file = hashed_path
            except EnvironmentError as exc:
                self.fatal("Error writing fast-resume metafile %r (%s)" % (
                    hashed_path,
                    exc,
                ))
                raise

        # Load into client on demand
        if self.options.load or self.options.start:
            proxy = config.engine.open()
            info_hash = metafile.info_hash(meta)
            try:
                item_name = proxy.d.name(info_hash, fail_silently=True)
            except xmlrpc.HashNotFound:
                load_item = proxy.load.start_verbose if self.options.start else proxy.load.verbose
                load_item(xmlrpc.NOHASH, os.path.abspath(tied_file))
                time.sleep(.05)  # let things settle
                try:
                    item_name = proxy.d.name(info_hash, fail_silently=True)
                    self.LOG.info(
                        "OK: Item #%s %s client.", info_hash,
                        'started in' if self.options.start else 'loaded into')
                except xmlrpc.HashNotFound as exc:
                    self.fatal("Error while loading item #%s into client: %s" %
                               (
                                   info_hash,
                                   exc,
                               ))
            else:
                self.LOG.warning(
                    "Item #%s already exists in client, --load/--start is ignored!",
                    info_hash)
示例#7
0
        try:
            if not os.path.getsize(self.ns.pathname):
                # Ignore 0-byte dummy files (Firefox creates these while downloading)
                self.job.LOG.warn("Ignoring 0-byte metafile '%s'" % (self.ns.pathname,))
                return
            self.metadata = metafile.checked_open(self.ns.pathname)
        except EnvironmentError, exc:
            self.job.LOG.error("Can't read metafile '%s' (%s)" % (
                self.ns.pathname, str(exc).replace(": '%s'" % self.ns.pathname, ""),
            ))
            return
        except ValueError, exc:
            self.job.LOG.error("Invalid metafile '%s': %s" % (self.ns.pathname, exc))
            return

        self.ns.info_hash = metafile.info_hash(self.metadata)
        self.ns.info_name = self.metadata["info"]["name"]
        self.job.LOG.info("Loaded '%s' from metafile '%s'" % (self.ns.info_name, self.ns.pathname))

        # Check whether item is already loaded
        try:
            name = self.job.proxy.d.get_name(self.ns.info_hash, fail_silently=True)
        except xmlrpc.ERRORS, exc:
            if exc.faultString != "Could not find info-hash.":
                self.job.LOG.error("While checking for #%s: %s" % (self.ns.info_hash, exc))
                return
        else:
            self.job.LOG.warn("Item #%s '%s' already added to client" % (self.ns.info_hash, name))
            return

        return True
示例#8
0
        try:
            if not os.path.getsize(self.ns.pathname):
                # Ignore 0-byte dummy files (Firefox creates these while downloading)
                self.job.LOG.warn("Ignoring 0-byte metafile '%s'" % (self.ns.pathname,))
                return
            self.metadata = metafile.checked_open(self.ns.pathname)
        except EnvironmentError, exc:
            self.job.LOG.error("Can't read metafile '%s' (%s)" % (
                self.ns.pathname, str(exc).replace(": '%s'" % self.ns.pathname, ""),
            ))
            return
        except ValueError, exc:
            self.job.LOG.error("Invalid metafile '%s': %s" % (self.ns.pathname, exc))
            return

        self.ns.info_hash = metafile.info_hash(self.metadata)
        self.ns.info_name = self.metadata["info"]["name"]
        self.job.LOG.info("Loaded '%s' from metafile '%s'" % (self.ns.info_name, self.ns.pathname))

        # Check whether item is already loaded
        try:
            name = self.job.proxy.d.get_name(self.ns.info_hash, fail_silently=True)
        except xmlrpc.HashNotFound:
            pass
        except xmlrpc.ERRORS, exc:
            if exc.faultString != "Could not find info-hash.":
                self.job.LOG.error("While checking for #%s: %s" % (self.ns.info_hash, exc))
                return
        else:
            self.job.LOG.warn("Item #%s '%s' already added to client" % (self.ns.info_hash, name))
            return
示例#9
0
    def mainloop(self):
        """ The main loop.
        """
        if not self.args:
            self.parser.print_help()
            self.parser.exit()

        for idx, filename in enumerate(self.args):
            torrent = metafile.Metafile(filename)
            if idx and not self.options.output:
                print
                print "~" * 79

            try:
                # Read and check metafile
                try:
                    data = metafile.checked_open(filename, log=self.LOG if self.options.skip_validation else None,
                        quiet=(self.options.quiet and (self.options.output or self.options.raw)))
                except EnvironmentError, exc:
                    self.fatal("Can't read '%s' (%s)" % (
                        filename, str(exc).replace(": '%s'" % filename, ""),
                    ))
                    raise

                listing = None

                if self.options.raw:
                    if not self.options.reveal and "info" in data:
                        # Shorten useless binary piece hashes
                        data["info"]["pieces"] = "<%d piece hashes>" % (
                            len(data["info"]["pieces"]) / len(hashlib.sha1().digest()) # bogus pylint: disable=E1101 
                        )

                    pprinter = (pprint.PrettyPrinter if self.options.reveal else metafile.MaskingPrettyPrinter)() 
                    listing = pprinter.pformat(data)
                elif self.options.output:
                    def splitter(fields):
                        "Yield single names for a list of comma-separated strings."
                        for flist in fields:
                            for field in flist.split(','):
                                yield field.strip()

                    data["__file__"] = filename
                    if 'info' in data:
                        data["__hash__"] = metafile.info_hash(data)
                        data["__size__"] = metafile.data_size(data)
                    values = []
                    for field in splitter(self.options.output):
                        try:
                            val = data
                            for key in field.split('.'):
                                val = val[key]
                        except KeyError, exc:
                            self.LOG.error("%s: Field %r not found (%s)" % (filename, field, exc))
                            break
                        else:
                            values.append(str(val))
                    else:
                        listing = '\t'.join(values)
                else:
                    listing = '\n'.join(torrent.listing(masked=not self.options.reveal))