Exemple #1
0
    def renameFolder(self, torid, newname):
        """rename torrent directory name"""
        # first, get info on this torrent
        torinfo = self.xcon.call('core.get_torrent_status', torid, [])

        # Check if the dir is the same as torrent name
        if os.path.isdir(torinfo['save_path'] + '/' + torinfo['name']):
            fdir = torinfo['save_path'] + '/' + torinfo['name']
        elif os.path.isdir(
                os.path.dirname(torinfo['save_path'] + '/' +
                                torinfo['files'][0]['path'])):
            fdir = os.path.dirname(torinfo['save_path'] + '/' +
                                   torinfo['files'][0]['path'])
        else:
            logthis("Unable to determine existing directory name for",
                    suffix=torid,
                    loglevel=LL.ERROR)
            return False

        # get base dir from full path
        sdir = os.path.basename(fdir)

        try:
            self.xcon.call('core.rename_folder', torid, sdir, newname)
            return True
        except Exception as e:
            logthis("Failed to rename torrent dir:",
                    prefix=torid,
                    suffix=e,
                    loglevel=LL.ERROR)
            return False
Exemple #2
0
    def moveTorrent(self, torids, destdir):
        """move torrent data (directory or file) to new location"""
        if torids is list: torids = torids[0]
        mcx = multicall(self.xcon)

        rpdir = os.path.realpath(destdir)
        if not os.path.isdir(rpdir):
            logthis(
                "Failed to move torrent storage. Directory does not exist:",
                suffix=rpdir,
                loglevel=LL.ERROR)
            return False

        # get current dir of the torrent and torrent name
        tdir, tname = mcx.q('d.get_directory', [torids]).q('d.name',
                                                           [torids]).run()
        if os.path.is_dir(tdir):
            mbase = tdir
            basedir = os.path.dirname(tdir)
            tdest = destdir + '/' + os.path.basename(tdir)
        else:
            mbase = tdir + '/' + tname
            basedir = tdir
            tdest = destdir + '/' + tname

        try:
            # move file/dir (rtorrent does not do this itself like deluge does)
            os.rename(mbase, tdest)
            self.xcon.d.set_directory(torids, tdest)
            return True
        except Exception as e:
            logthis("Failed to move torrent storage:",
                    suffix=e,
                    loglevel=LL.ERROR)
            return False
Exemple #3
0
def optexpand(iop):
    # expand CLI options like "xcode.scale" from 1D to 2D array/dict (like [xcode][scale])
    outrc = {}
    for i in iop:
        dsec, dkey = i.split(".")
        if dsec not in outrc:
            outrc[dsec] = {}
        outrc[dsec][dkey] = iop[i]
    logthis("Expanded cli optdex:", suffix=outrc, loglevel=LL.DEBUG2)
    return outrc
Exemple #4
0
def pidfile_set():
    pfname = config.srv['pidfile']
    try:
        fo = open(pfname, "w")
        fo.write("%d\n" % os.getpid())
        fo.close()
    except:
        logthis("Failed to write data to PID file:",
                suffix=pfname,
                loglevel=LL.ERROR)
        failwith(PROCFAIL, "Ensure write permission at the PID file location.")
Exemple #5
0
def _main():
    """entry point"""
    global config, dlx

    # Show banner
    if len(sys.argv) < 2 or sys.argv[1] != '--version' and not (
            len(sys.argv[1]) > 1 and sys.argv[1][1] == 'q'):
        show_banner()

    # Set default loglevel
    loglevel(defaults['core']['loglevel'])

    # parse CLI options and load running config
    xopt = parse_cli()
    config = rcfile.loadConfig(cliopts=xopt)
    loglevel(config.core['loglevel'])
    configure_logger(config)

    # Set quiet exception handler for quiet operation
    # Otherwise use the verbose handler, which logs the exception to logging targets
    if config.core['loglevel'] < LL.INFO:
        sys.excepthook = exceptionHandlerQuiet
    else:
        sys.excepthook = exceptionHandler

    # parse rules file
    ruleparser.parse(config)

    # connect to deluge (for non-daemon stuffs)
    if not config.run['srv']:
        dlx = tclient.TorrentClient(config)

    ## process commands

    if config.run['list']:
        # get list of torrents
        rval = mode_list()
    elif config.run['move'] is not None:
        # process 'move' command
        rval = mode_move(config.run['torid'], config.run['move'])
    elif config.run['torid']:
        # process 'complete' hook
        rval = mode_chook(config.run['torid'])
    elif config.run['srv']:
        # start daemon
        rval = daemon.start(config)
    else:
        logthis("Nothing to do.", loglevel=LL.WARNING)
        rval = 1

    #closelog()
    sys.exit(rval)
def tor_isfile(tdata):
    """check if a torrent is a single file (instead of a directory)"""
    tflist = tdata.get('files', None)
    if not tflist:
        logthis("incomplete torrent data provided", loglevel=LL.WARNING)
        return False

    if tflist[0]['path'] == tdata['name']:
        return True
    elif os.path.isfile(tdata['path']):
        return True
    else:
        return False
Exemple #7
0
 def __init__(self, uri, abortfail=True):
     """connect to deluged and authenticate"""
     logthis("Connecting to rTorrent RPC via",
             suffix=uri,
             loglevel=LL.VERBOSE)
     self.xcon = xmlrpc.client.ServerProxy(uri)
     mcx = multicall(self.xcon)
     try:
         self.client_version, self.api_version, self.libtor_version = mcx.q(
             'system.client_version').q('system.api_version').q(
                 'system.library_version').run()
     except Exception as e:
         logthis("Failed to connect to rTorrent:",
                 suffix=e,
                 loglevel=LL.ERROR)
         if abortfail:
             failwith(ER.CONF_BAD,
                      "Connection to rTorrent failed. Aborting.")
         else:
             return
     logthis("Connected to rTorrent OK", ccode=C.GRN, loglevel=LL.VERBOSE)
     logthis("rTorrent %s (libtorrent %s)" %
             (self.client_version, self.libtor_version),
             loglevel=LL.VERBOSE)
     self.connected = True
Exemple #8
0
def rcList(xtraConf=None):
    global rcfiles
    rcc = []

    if xtraConf:
        xcf = os.path.expanduser(xtraConf)
        if os.path.exists(xcf):
            rcc.append(xcf)
            logthis("Added rcfile candidate (from command line):",
                    suffix=xcf,
                    loglevel=LL.DEBUG)
        else:
            logthis("Specified rcfile does not exist:",
                    suffix=xcf,
                    loglevel=LL.ERROR)

    for tf in rcfiles:
        ttf = os.path.expanduser(tf)
        logthis("Checking for rcfile candidate",
                suffix=ttf,
                loglevel=LL.DEBUG2)
        if os.path.exists(ttf):
            rcc.append(ttf)
            logthis("Got rcfile candidate", suffix=ttf, loglevel=LL.DEBUG2)

    return rcc
Exemple #9
0
def rexec(optlist, supout=False):
    """
    execute command; input a list of options; if `supout` is True, then suppress stderr
    """
    logthis("Executing:", suffix=optlist, loglevel=LL.DEBUG)
    try:
        if supout:
            fout = subprocess.check_output(optlist, stderr=subprocess.STDOUT)
        else:
            fout = subprocess.check_output(optlist)
    except subprocess.CalledProcessError as e:
        logthis("exec failed:", suffix=e, loglevel=LL.ERROR)
        fout = None
    return fout
Exemple #10
0
def enqueue(xredis, qname, thash, opts={}, jid=None, silent=False):
    """
    enqueue a task on the specified queue
    """
    # generate a job ID from the current time
    if not jid:
        jid = str(time.time()).replace(".", "")

    # JSON-encode and LPUSH on to the selected queue
    xredis.lpush("queue_"+qname, json.dumps({'id': jid, 'thash': thash, 'opts': opts }))

    if not silent:
        logthis("Enqueued job# %s in queue:" % (jid), suffix=qname, loglevel=LL.VERBOSE)

    return jid
Exemple #11
0
def route_chook():
    """
    deluge 'download complete' handler
    """
    global rdx
    logthis(">> Received chook request", loglevel=LL.VERBOSE)

    indata = request.json
    jobid = queue.enqueue(rdx, "xfer", indata.get('thash', False),
                          indata.get('opts', False))
    resp = dresponse({
        'status': "ok",
        'message': "Queued as job %s" % (jobid)
    }, "201 Queued")

    return resp
Exemple #12
0
    def moveTorrent(self, torids, destdir):
        """move torrent data (directory or file) to new location"""
        if not torids is list: torids = [ torids ]

        rpdir = os.path.realpath(destdir)
        if not os.path.isdir(rpdir):
            logthis("Failed to move torrent storage. Directory does not exist:", suffix=rpdir, loglevel=LL.ERROR)
            return False

        self.checkConnection()
        try:
            self.xcon.call('core.move_storage', torids, rpdir)
            return True
        except Exception as e:
            logthis("Failed to move torrent storage:", suffix=e, loglevel=LL.ERROR)
            return False
Exemple #13
0
def precheck(rheaders=False, require_ctype=True):
    # Check for proper Content-Type
    if require_ctype:
        try:
            ctype = request.headers['Content-Type']
        except KeyError:
            ctype = None
        if not re.match(r'^(application\/json|text\/x-json)', ctype, re.I):
            logthis("Content-Type mismatch. Not acceptable:",
                    suffix=ctype,
                    loglevel=LL.WARNING)
            if rheaders:
                return ({
                    'status': "error",
                    'error': "json_required",
                    'message': "Content-Type must be application/json"
                }, "417 Content Mismatch")
            else:
                return False

    # Check authentication
    try:
        wauth = request.headers['WWW-Authenticate']
    except KeyError:
        wauth = None
    skey = config.srv['shared_key']
    if wauth:
        if wauth == skey:
            logthis("Authentication passed", loglevel=LL.VERBOSE)
            if rheaders: return ({'status': "ok"}, "212 Login Validated")
            else: return True
        else:
            logthis("Authentication failed; invalid credentials",
                    loglevel=LL.WARNING)
            if rheaders:
                return ({
                    'status': "error",
                    'error': "auth_fail",
                    'message': "Authentication failed"
                }, "401 Unauthorized")
            else:
                return False
    else:
        logthis(
            "Authentication failed; WWW-Authenticate header missing from request",
            loglevel=LL.WARNING)
        if rheaders:
            return ({
                'status': "error",
                'error': "www_authenticate_header_missing",
                'message': "Must include WWW-Authenticate header"
            }, "400 Bad Request")
        else:
            return False
Exemple #14
0
    def __init__(self,
                 hostname,
                 username=None,
                 keyfile=None,
                 password=None,
                 port=22,
                 timeout=None,
                 autoconnect=True):
        """
        initialize, connect, and establish sftp channel
        """
        # initialize our big daddy, SSHClient
        super(self.__class__, self).__init__()

        # set hostkey policy
        self.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())

        if not username: username = os.getlogin()

        # connect
        if autoconnect:
            try:
                self.connect(hostname,
                             port,
                             username,
                             password,
                             key_filename=keyfile,
                             timeout=timeout,
                             compress=True)
            except Exception as e:
                logthis(
                    "!! Failed to connect via SSH to remote host (%s@%s:%d):" %
                    (username, hostname, port),
                    suffix=e,
                    loglevel=LL.ERROR)
                return

            self.connected = True
            logthis("** Connected to remote host via SSH:",
                    suffix="%s@%s:%d" % (username, hostname, port),
                    loglevel=LL.INFO)

            # initialize sftp channel
            self.rsc = self.open_sftp()
def match(tordata):
    """find matching rules for a single torrent"""

    fmatch = False

    # check each ruleset for a match
    for k, v in rules.items():
        if check_ruleset(tordata, k) == True:
            fmatch = k
            break

    if fmatch:
        logthis("Found match:", suffix=fmatch, loglevel=LL.VERBOSE)
        rout = (fmatch, rresolve(fmatch))
    else:
        logthis("No match found", loglevel=LL.WARNING)
        rout = (False, {})

    return rout
Exemple #16
0
def start(xconfig, qname="xfer"):
    global rdx, dlx, dadpid, handlers, conf
    conf = xconfig

    # Fork into its own process
    logthis("Forking...", loglevel=LL.DEBUG)
    dadpid = os.getpid()
    try:
        pid = os.fork()
    except OSError as e:
        logthis("os.fork() failed:", suffix=e, loglevel=LL.ERROR)
        failwith(ER.PROCFAIL, "Failed to fork worker. Aborting.")

    # Return if we are the parent process
    if pid:
        return 0

    # Otherwise, we are the child
    logthis("Forked queue runner. pid =", prefix=qname, suffix=os.getpid(), loglevel=LL.INFO)
    logthis("QRunner. ppid =", prefix=qname, suffix=dadpid, loglevel=LL.VERBOSE)
    setproctitle("rainwatch: queue runner - %s" % (qname))

    # Connect to Redis
    rdx = db.redis({ 'host': conf.redis['host'], 'port': conf.redis['port'], 'db': conf.redis['db'] },
                   prefix=conf.redis['prefix'])

    # Connect to torrent client
    dlx = tclient.TorrentClient(xconfig)

    # Set up Jabber access
    jabber.setup(xconfig)

    # Set queue callbacks
    handlers = {
                 'xfer': cb_xfer
               }

    # Start listener loop
    qrunner(qname)

    # And exit once we're done
    logthis("*** Queue runner terminating", prefix=qname, loglevel=LL.INFO)
    sys.exit(0)
Exemple #17
0
def libnotify_send(xconfig,
                   msgText,
                   hname=False,
                   huser=False,
                   msgTimeout=10000,
                   msgHead=False,
                   msgIcon=False):
    """
    send libnotify notification to another host via ssh
    """
    # use defaults
    if not hname: hname = xconfig.notify['hostname']
    if not huser: huser = xconfig.notify['user']

    # set up notification icon
    if msgIcon: xicon = "-i '%s'" % (msgIcon)
    elif msgIcon == False and xconfig.notify['icon']:
        xicon = "-i '%s'" % (xconfig.notify['icon'])
    else:
        xicon = ''

    # if no message header, then use our hostname
    if msgHead == False: msgHead = socket.gethostname()
    else: msgHead = [msgHead]

    # escape newlines and such
    msgText = "'%s'" % (msgText.replace("\n", "\\n").replace(
        "\r", "\\r").replace("\t", "\\t"))

    # get dbus session address
    dbsout = rexec(
        ['/usr/bin/ssh', hname, '--', 'ps', 'aux', '|', 'grep', huser])
    dbus_addr = re.search('dbus-daemon --fork --session --address=(.+)',
                          dbsout, re.I | re.M).group(1)
    logthis("Got dbus session:", suffix=dbus_addr, loglevel=LL.VERBOSE)

    # send notification
    rexec([
        '/usr/bin/ssh', hname, '--',
        "DBUS_SESSION_BUS_ADDRESS=\'%s\'" % (dbus_addr), 'notify-send', '-t',
        str(msgTimeout), xicon, msgHead, msgText
    ])
    logthis("Sent libnotify message to remote host", loglevel=LL.VERBOSE)
Exemple #18
0
    def checkConnection(self, auto_reconnect=True):
        """check status of current connection and automatically re-establish a broken connection"""
        try:
            if self.connected is True:
                self.xcon.call('daemon.info')
        except (BrokenPipeError, ConnectionLostException, CallTimeoutException):
            self.connected = False

        if auto_reconnect is True:
            if self.connected is False:
                rcreds = (self.xcon.host, self.xcon.port, self.xcon.username, self.xcon.password)
                self.xcon = DelugeRPCUnicode(*rcreds)
                try:
                    self.xcon.connect()
                    self.client_version = self.xcon.call('daemon.info')
                    self.libtor_version = self.xcon.call('core.get_libtorrent_version')
                    self.connected = True
                    logthis("Re-established connection to Deluge", loglevel=LL.WARNING)
                except Exception as e:
                    logexc(e, "Deluge re-connection attempt failed")
        return self.connected
Exemple #19
0
 def __init__(self, duser, dpass, dhost='localhost', dport=58846, abortfail=True):
     """connect to deluged and authenticate"""
     logthis("Connecting to deluged on", suffix="%s:%d" % (dhost, dport), loglevel=LL.VERBOSE)
     self.xcon = DelugeRPCUnicode(dhost, dport, duser, dpass)
     try:
         self.xcon.connect()
         self.client_version = self.xcon.call('daemon.info')
         self.libtor_version = self.xcon.call('core.get_libtorrent_version')
     except Exception as e:
         logthis("Failed to connect to Deluge:", suffix=e, loglevel=LL.ERROR)
         if abortfail: failwith(ER.CONF_BAD, "Connection to Deluge failed. Aborting.")
     logthis("Connected to Deluge OK", ccode=C.GRN, loglevel=LL.VERBOSE)
     logthis("Deluge %s (libtorrent %s)" % (self.client_version, self.libtor_version), loglevel=LL.VERBOSE)
     self.connected = True
Exemple #20
0
def parse(xtraConf=None):
    """
    Parse rcfile (rainwatch.conf)
    Output: (rcfile, rcdata)
    """
    global rcpar
    # get rcfile list
    rcl = rcList(xtraConf)
    logthis("Parsing any local, user, or system RC files...",
            loglevel=LL.DEBUG)

    # use ConfigParser to parse the rcfiles
    rcpar = configparser.RawConfigParser()
    rcfile = None
    if len(rcl):
        rcfile = os.path.realpath(rcl[0])
        logthis("Parsing config file:", suffix=rcfile, loglevel=LL.VERBOSE)
        try:
            # use ConfigParser.readfp() so that we can correctly parse UTF-8 stuffs
            # ...damn you python 2 and your shitty unicode bodgery
            with codecs.open(rcfile, 'r', encoding='utf-8') as f:
                rcpar.readfp(f)
        except configparser.ParsingError as e:
            logthis("Error parsing config file: %s" % e, loglevel=LL.ERROR)
            return False

    # build a dict
    rcdict = {}
    rsecs = rcpar.sections()
    logthis("Config sections:", suffix=rsecs, loglevel=LL.DEBUG2)
    for ss in rsecs:
        isecs = rcpar.items(ss)
        rcdict[ss] = {}
        for ii in isecs:
            logthis(">> %s" % ii[0], suffix=ii[1], loglevel=LL.DEBUG2)
            rcdict[ss][ii[0]] = ii[1]

    # return loaded filename and rcdata
    return (rcfile, rcdict)
Exemple #21
0
def mode_move(tid, destdir):
    global dlx, jbx

    # get torrent data
    logthis(">> Retrieving torrent data for", suffix=tid, loglevel=LL.INFO)
    tordata = dlx.getTorrent(tid)
    fnewpath = os.path.realpath(destdir) + '/' + tordata['name']
    logthis(">> Moving: %s ->" % (tordata['path']),
            suffix=fnewpath,
            loglevel=LL.INFO)

    # move to destination dir
    if dlx.moveTorrent(tid, destdir):
        logthis("** Moved to", suffix=destdir, loglevel=LL.INFO)
        rval = 0
    else:
        logthis("!! Failed to move to", suffix=destdir, loglevel=LL.ERROR)
        rval = 102
    return rval
Exemple #22
0
def dfork():
    """Fork into the background"""
    logthis("Forking...", loglevel=LL.DEBUG)
    try:
        # first fork
        pid = os.fork()
    except OSError as e:
        logthis("os.fork() failed:", suffix=e, loglevel=LL.ERROR)
        failwith(ER.PROCFAIL, "Failed to fork into background. Aborting.")
    if (pid == 0):
        # become parent of session & process group
        os.setsid()
        try:
            # second fork
            pid = os.fork()
        except OSError as e:
            logthis("os.fork() [2] failed:", suffix=e, loglevel=LL.ERROR)
            failwith(ER.PROCFAIL, "Failed to fork into background. Aborting.")
        if pid:
            # ... and kill the other parent
            os._exit(0)

        logthis("** Forked into background. PID:",
                suffix=os.getpid(),
                loglevel=LL.INFO)
        # Redirect stdout & stderr to /dev/null
        sys.stdout.flush()
        sys.stdout = open(os.devnull, 'w')
        sys.stderr.flush()
        sys.stderr = open(os.devnull, 'w')
    else:
        # otherwise, kill the parent; _exit() so we don't mess with any
        # open file handles or streams; sleep for 0.5s to let the
        # "forked into background" message appear before the bash
        # prompt is given back to the user
        time.sleep(0.5)
        os._exit(0)
Exemple #23
0
def qrunner(qname="xfer"):
    global rdx, mdx, handlers

    qq = "queue_"+qname
    wq = "work_"+qname

    # Crash recovery
    # Check work queue (work_*) and re-queue any unhandled items
    logthis("-- QRunner crash recovery: checking for abandoned jobs...", loglevel=LL.VERBOSE)
    requeued = 0
    while(rdx.llen(wq) != 0):
        crraw = rdx.lpop(wq)
        try:
            critem = json.loads(crraw)
        except Exception as e:
            logthis("!! QRunner crash recovery: Bad JSON data from queue item. Job discarded. raw data:",
                    prefix=qname, suffix=crraw, loglevel=LL.ERROR)
            continue
        cr_jid = critem.get("id", "??")
        logthis("** Requeued abandoned job:", prefix=qname, suffix=cr_jid, loglevel=LL.WARNING)
        rdx.rpush(qq, crraw)
        requeued += 1

    if requeued:
        logthis("-- QRunner crash recovery OK! Jobs requeued:", prefix=qname, suffix=requeued, loglevel=LL.VERBOSE)

    logthis("pre-run queue sizes: %s = %d / %s = %d" % (qq, rdx.llen(qq), wq, rdx.llen(wq)),
            prefix=qname, loglevel=LL.DEBUG)
    logthis("-- QRunner waiting; queue:", prefix=qname, suffix=qname, loglevel=LL.VERBOSE)
    while(True):
        # RPOP from main queue and LPUSH on to the work queue
        # block for 5 seconds, check that the master hasn't term'd, then
        # check again until we get something
        qitem = None
        qiraw = rdx.brpoplpush(qq, wq, 5)
        if qiraw:
            logthis(">> QRunner: discovered a new job in queue", prefix=qname, suffix=qname, loglevel=LL.VERBOSE)

            try:
                qitem = json.loads(qiraw)
            except Exception as e:
                logthis("!! QRunner: Bad JSON data from queue item. Job discarded. raw data:",
                        prefix=qname, suffix=qiraw, loglevel=LL.ERROR)

            # If we've got a valid job item, let's run it!
            if qitem:
                logthis(">> QRunner: job data:\n", prefix=qname, suffix=json.dumps(qitem), loglevel=LL.DEBUG)

                # Execute callback
                rval = handlers[qname](qitem)
                if (rval == 0):
                    logthis("QRunner: Completed job successfully.", prefix=qname, loglevel=LL.VERBOSE)
                elif (rval == 1):
                    logthis("QRunner: Job complete, but with warnings.", prefix=qname, loglevel=LL.WARNING)
                else:
                    logthis("QRunner: Job failed. rval =", prefix=qname, suffix=rval, loglevel=LL.ERROR)

                # Remove from work queue
                rdx.rpop(wq)

            # Show wait message again
            logthis("-- QRunner: waiting; queue:", prefix=qname, suffix=qname, loglevel=LL.VERBOSE)

        # Check if daddy is still alive; prevents this process from becoming a bastard child
        if not master_alive():
            logthis("QRunner: Master has terminated.", prefix=qname, loglevel=LL.WARNING)
            return
Exemple #24
0
def merge(inrc, cops):
    """
    Merge options from loaded rcfile with defaults; strip quotes and perform type-conversion.
    Any defined value set in the config will override the default value.
    """
    outrc = {}
    # set defaults first
    for dsec in defaults:
        # create sub dict for this section, if not exist
        if dsec not in outrc:
            outrc[dsec] = {}
        # loop through the keys
        for dkey in defaults[dsec]:
            logthis("** Option:",
                    prefix="defaults",
                    suffix="%s => %s => '%s'" %
                    (dsec, dkey, defaults[dsec][dkey]),
                    loglevel=LL.DEBUG2)
            outrc[dsec][dkey] = defaults[dsec][dkey]

    # set options defined in rcfile, overriding defaults
    for dsec in inrc:
        # create sub dict for this section, if not exist
        if dsec not in outrc:
            outrc[dsec] = {}
        # loop through the keys
        for dkey in inrc[dsec]:
            # check if key exists in defaults
            try:
                type(outrc[dsec][dkey])
                keyok = True
            except KeyError:
                keyok = False

            # Strip quotes and perform type-conversion for ints and floats
            # only perform conversion if key exists in defaults
            if keyok:
                if isinstance(outrc[dsec][dkey], int):
                    try:
                        tkval = int(qstrip(inrc[dsec][dkey]))
                    except ValueError as e:
                        logthis(
                            "Unable to convert value to integer. Check config option value. Value:",
                            prefix="%s:%s" % (dsec, dkey),
                            suffix=qstrip(inrc[dsec][dkey]),
                            loglevel=LL.ERROR)
                        continue
                elif isinstance(outrc[dsec][dkey], float):
                    try:
                        tkval = float(qstrip(inrc[dsec][dkey]))
                    except ValueError as e:
                        logthis(
                            "Unable to convert value to float. Check config option value. Value:",
                            prefix="%s:%s" % (dsec, dkey),
                            suffix=qstrip(inrc[dsec][dkey]),
                            loglevel=LL.ERROR)
                        continue
                else:
                    tkval = qstrip(inrc[dsec][dkey])
            else:
                tkval = qstrip(inrc[dsec][dkey])

            logthis("** Option set:",
                    prefix="rcfile",
                    suffix="%s => %s => '%s'" % (dsec, dkey, tkval),
                    loglevel=LL.DEBUG2)
            outrc[dsec][dkey] = tkval

    # add in cli options
    for dsec in cops:
        # create sub dict for this section, if not exist
        if dsec not in outrc:
            outrc[dsec] = {}
        # loop through the keys
        for dkey in cops[dsec]:
            # only if the value has actually been set (eg. non-false)
            if cops[dsec][dkey]:
                logthis("** Option:",
                        prefix="cliopts",
                        suffix="%s => %s => '%s'" %
                        (dsec, dkey, cops[dsec][dkey]),
                        loglevel=LL.DEBUG2)
                outrc[dsec][dkey] = cops[dsec][dkey]

    return outrc
Exemple #25
0
def mode_chook(tid):
    global dlx, jbx

    # get torrent data
    logthis(">> Processing 'complete' exec hook for",
            suffix=tid,
            loglevel=LL.INFO)
    tordata = dlx.getTorrent(tid)

    # find matching rules
    rname, rset = ruleparser.match(tordata)
    if rname:
        logthis("++ Matched ruleset:\n",
                suffix=print_r(rset),
                loglevel=LL.VERBOSE)
    else:
        logthis("!! No ruleset matched", loglevel=LL.WARNING)

    # move to destination dir
    if rset.get('moveto', None):
        if dlx.moveTorrent(tid, rset['moveto']):
            logthis("** Moved to", suffix=rset['moveto'], loglevel=LL.INFO)
            time.sleep(2)
        else:
            logthis("!! Failed to move to",
                    suffix=rset['moveto'],
                    loglevel=LL.ERROR)

    # enqueue
    qurl = config.srv['url'] + '/api/chook'
    headset = {
        'Content-Type': "application/json",
        'WWW-Authenticate': config.srv['shared_key'],
        'User-Agent': "rainwatch/" + __version__
    }
    rq = requests.post(qurl,
                       headers=headset,
                       data=json.dumps({
                           'thash': tid,
                           'opts': False
                       }))

    if rq.status_code == 201:
        logthis(">> Queued torrent for transfer", loglevel=LL.INFO)
        rval = 0
    else:
        logthis("!! Failed to queue for transfer:",
                suffix=str(rq.status_code) + ' ' + rq.reason,
                loglevel=LL.ERROR)
        rval = 101

    logthis("*** Finished with complete exec hook for",
            suffix=tordata['name'],
            loglevel=LL.INFO)
    return rval
Exemple #26
0
def start(xconfig):
    """
    Start Rainwatch daemon
    """
    global config, rdx, dlx
    # first, fork
    if not xconfig.srv['nofork']: dfork()
    config = xconfig

    # set process title
    setproctitle("rainwatch: master process (%s:%d)" %
                 (xconfig.srv['iface'], xconfig.srv['port']))
    pidfile_set()

    # spawn queue runners
    queue.start(xconfig, 'xfer')

    # spawn jabber handler
    if xconfig.xmpp['user'] and xconfig.xmpp['pass']:
        jabber.spawn(xconfig)
        jabber.setup(xconfig)
    else:
        logthis("!! Not spawning Jabber client, no JID defined in rc file",
                loglevel=LL.WARNING)

    # connect to Redis
    rdx = db.redis(
        {
            'host': xconfig.redis['host'],
            'port': xconfig.redis['port'],
            'db': xconfig.redis['db']
        },
        prefix=xconfig.redis['prefix'])

    # connect to torrent daemon
    dlx = tclient.TorrentClient(xconfig)

    # create flask object, and map API routes
    xsrv = Flask('rainwatch')
    xsrv.add_url_rule('/', 'root', view_func=route_root,
                      methods=['GET'])  # same as /api/info
    xsrv.add_url_rule('/api/info',
                      'root',
                      view_func=route_root,
                      methods=['GET', 'POST'])
    xsrv.add_url_rule('/api/auth',
                      'auth',
                      view_func=route_auth,
                      methods=['GET', 'POST'])
    xsrv.add_url_rule('/api/chook',
                      'chook',
                      view_func=route_chook,
                      methods=['GET', 'POST', 'PUT'])
    xsrv.add_url_rule('/api/torrent/list',
                      'torrent_list',
                      view_func=torrent_list,
                      methods=['GET', 'POST'])
    xsrv.add_url_rule('/api/torrent/getinfo',
                      'torrent_getinfo',
                      view_func=torrent_getinfo,
                      methods=['GET', 'POST'])
    xsrv.add_url_rule('/api/rules/list',
                      'rules_list',
                      view_func=rules_list,
                      methods=['GET', 'POST'])
    xsrv.add_url_rule('/api/queue/list',
                      'queue_list',
                      view_func=queue_list,
                      methods=['GET', 'POST'])

    # start flask listener
    logthis("Starting Flask...", loglevel=LL.VERBOSE)
    xsrv.run(xconfig.srv['iface'],
             xconfig.srv['port'],
             xconfig.run['fdebug'],
             use_evalex=False)
Exemple #27
0
    def getTorrentList(self, filter=None, full=False):
        """get list of torrents; setting full=True will return all fields for all torrents"""
        if full:
            fields = ("d.hash", "d.name", "d.base_path", "d.directory_base",
                      "d.creation_date", "d.message", "d.size_bytes",
                      "d.completed_bytes", "d.ratio", "d.up.total",
                      "d.down.total", "d.up.rate", "d.down.rate",
                      "d.peers_connected", "d.peers_complete", "d.is_private",
                      "d.complete", "d.is_active", "d.is_hash_checking",
                      "d.state", "d.size_files", "d.size_chunks")
        else:
            fields = ("d.name", "d.hash", "d.completed_bytes", "d.size_bytes",
                      "d.creation_date", "d.complete", "d.is_active",
                      "d.is_hash_checking", "d.state", "d.down.rate")

        try:
            traw = self.__d_multicall("main", fields)
        except Exception as e:
            logthis("Error calling d.multicall:", suffix=e, loglevel=LL.ERROR)
            return False

        tlist = {}
        for ttor in traw:
            # remap values
            otor = {map_tinfo[tk]: tv for tk, tv in ttor.items()}
            if None in otor: del (otor[None])
            logthis("remapped:", suffix=otor, loglevel=LL.DEBUG2)

            if full:
                torid = ttor['d.hash']

                # get files
                flist = []
                fraw = self.__f_multicall(
                    torid,
                    ("f.path", "f.offset", "f.size_bytes",
                     "f.completed_chunks", "f.size_chunks", "f.priority"))
                for tk, td in enumerate(fraw):
                    # remap
                    tfile = {map_tinfo_files[ik]: iv for ik, iv in td.items()}
                    if None in tfile: del (tfile[None])
                    # extrapolate
                    tfile['index'] = tk
                    tfile['progress'] = (float(td['f.completed_chunks']) /
                                         float(td['f.size_chunks'])) * 100.0
                    flist.append(tfile)

                otor['files'] = tuple(flist)

                # get trackers
                rlist = []
                rraw = self.__t_multicall(
                    torid, ("t.failed_counter", "t.success_counter", "t.url",
                            "t.type", "t.is_enabled"))
                for tk, td in enumerate(rraw):
                    # remap
                    track = {
                        map_tinfo_trackers[ik]: iv
                        for ik, iv in td.items()
                    }
                    if None in track: del (track[None])
                    # extrapolate
                    track['type'] = rtorrent_tracker_types[int(td['t.type'])]
                    rlist.append(track)

                otor['trackers'] = tuple(rlist)

            # set extrapolated values
            otor['progress'] = float(ttor['d.completed_bytes']) / float(
                ttor['d.size_bytes']) * 100.0
            otor['state'] = self.__statusLookup(ttor['d.complete'],
                                                ttor['d.is_active'],
                                                ttor['d.is_hash_checking'],
                                                ttor['d.state'])
            otor['hash'] = otor['hash'].lower()

            if full:
                otor['ratio'] = float(otor['ratio']) / 1000.0
                otor['private'] = bool(otor['private'])

            # terrible ETA calculation
            if not ttor['d.complete'] and ttor['d.down.rate'] > 0:
                otor['eta'] = int(
                    float(ttor['d.size_bytes'] - ttor['d.completed_bytes']) /
                    float(ttor['d.down.rate']))
            else:
                otor['eta'] = 0

            try:
                if full:
                    tracker_url = tlist[0]['url']
                else:
                    tracker_url = self.xcon.t.get_url(ttor['d.hash'], 0)
                otor['tracker_host'] = re.sub(':[0-9]+$', '',
                                              urlparse(tracker_url).netloc)
            except:
                otor['tracker_host'] = None

            tlist[ttor['d.hash'].lower()] = otor

        return tlist
Exemple #28
0
    def getTorrent(self, torid):
        """get info on a particular torrent"""
        try:
            traw = self.__d_multicall_single(
                torid,
                ("d.hash", "d.name", "d.base_path", "d.directory_base",
                 "d.creation_date", "d.message", "d.size_bytes",
                 "d.completed_bytes", "d.ratio", "d.up.total", "d.down.total",
                 "d.up.rate", "d.down.rate", "d.peers_connected",
                 "d.peers_complete", "d.is_private", "d.complete",
                 "d.is_active", "d.is_hash_checking", "d.state",
                 "d.size_files", "d.size_chunks"))
            logthis("traw:", suffix=traw, loglevel=LL.DEBUG2)
        except Exception as e:
            logthis("Error calling d.multicall:", suffix=e, loglevel=LL.ERROR)
            return False

        # remap values
        otor = {map_tinfo[tk]: tv for tk, tv in traw.items()}
        if None in otor: del (otor[None])
        logthis("remap:", suffix=otor, loglevel=LL.DEBUG2)

        # get files
        flist = []
        fraw = self.__f_multicall(
            torid, ("f.path", "f.offset", "f.size_bytes", "f.completed_chunks",
                    "f.size_chunks", "f.priority"))
        for tk, td in enumerate(fraw):
            # remap
            tfile = {map_tinfo_files[ik]: iv for ik, iv in td.items()}
            if None in tfile: del (tfile[None])
            # extrapolate
            tfile['index'] = tk
            tfile['progress'] = (float(td['f.completed_chunks']) /
                                 float(td['f.size_chunks'])) * 100.0
            flist.append(tfile)

        otor['files'] = tuple(flist)

        # get trackers
        tlist = []
        rraw = self.__t_multicall(torid,
                                  ("t.failed_counter", "t.success_counter",
                                   "t.url", "t.type", "t.is_enabled"))
        for tk, td in enumerate(rraw):
            # remap
            track = {map_tinfo_trackers[ik]: iv for ik, iv in td.items()}
            if None in track: del (track[None])
            # extrapolate
            track['type'] = rtorrent_tracker_types[int(td['t.type'])]
            tlist.append(track)

        otor['trackers'] = tuple(tlist)

        # set extrapolated values
        otor['progress'] = float(traw['d.completed_bytes']) / float(
            traw['d.size_bytes']) * 100.0
        otor['state'] = self.__statusLookup(traw['d.complete'],
                                            traw['d.is_active'],
                                            traw['d.is_hash_checking'],
                                            traw['d.state'])
        otor['ratio'] = float(otor['ratio']) / 1000.0
        otor['hash'] = otor['hash'].lower()
        otor['private'] = bool(otor['private'])

        # terrible ETA calculation
        if not traw['d.complete'] and traw['d.down.rate'] > 0:
            otor['eta'] = int(
                float(traw['d.size_bytes'] - traw['d.completed_bytes']) /
                float(traw['d.down.rate']))
        else:
            otor['eta'] = 0

        try:
            otor['tracker_host'] = re.sub(':[0-9]+$', '',
                                          urlparse(tlist[0]['url']).netloc)
        except:
            otor['tracker_host'] = None

        return otor
Exemple #29
0
def cb_xfer(jdata):
    """
    xfer queue handler
    """
    global dlx, conf

    # get options from job request
    jid  = jdata['id']
    thash  = jdata['thash']
    opts = jdata['opts']

    # Do some loggy stuff
    logthis("xfer: JobID %s / TorHash %s / Opts %s" % (jid, thash, json.dumps(opts)), loglevel=LL.VERBOSE)

    # get updated data from torrent client
    tordata = dlx.getTorrent(thash)

    if not tordata:
        logthis("!! Failed to retrieve torrent data corresponding to supplied hash. Job discarded.", loglevel=LL.ERROR)
        return 101

    # establish SSH connection
    rsh = rainshell(conf.xfer['hostname'], username=conf.xfer['user'],
                    keyfile=conf.xfer['keyfile'], port=int(conf.xfer['port']))

    # download
    if conf.xfer['hostname']:
        # send xfer start notification
        if conf.notify['user'] and conf.notify['hostname']:
            try:
                libnotify_send(conf, "%s\n\nStarted transfer to incoming." % (tordata['name']))
            except Exception as e:
                logthis("Failed to send libnotify message:", suffix=e, loglevel=LL.ERROR)

        # xfer via scp
        try:
            tgpath = ("%s/%s" % (tordata['base_path'], tordata['name']))
            logthis("tgpath:", suffix=tgpath, loglevel=LL.DEBUG)
        except Exception as e:
            logexc(e, "Failed to perform string interpolation for tgpath")
            failwith(ER.PROCFAIL, "Unable to continue.")

        # Until a way can be determine to fix os.stat/open functions when running detached from
        # the terminal, this check is going to be skipped
        #try:
        #    if not path_exists(tgpath):
        #        logthis("!! Path does not exist:", suffix=tgpath, loglevel=LL.ERROR)
        #        return False
        #    else:
        #        logthis(">> Target path:", suffix=tgpath, loglevel=LL.INFO)
        #except Exception as e:
        #    logexc(e, "Unable to determine existence of tgpath")
        #    failwith(ER.PROCFAIL, "Unable to continue.")
        logthis(">> Target path:", suffix=tgpath, loglevel=LL.INFO)

        logthis(">> Starting transfer to remote host:",
                suffix="%s:%s" % (conf.xfer['hostname'], conf.xfer['basepath']), loglevel=LL.INFO)
        xstart = datetime.now()
        rsh.xfer(tgpath, conf.xfer['basepath'])
        xstop = datetime.now()
        logthis("** Transfer complete.", loglevel=LL.INFO)

        # send xfer complete notification
        xdelta = xstop - xstart
        xdelta_str = re.sub(r'\.[0-9]+$', '', str(xdelta))
        tsize = tordata['total_size']
        trate = float(tsize) / float(xdelta.seconds)
        tsize_str = fmtsize(tsize)
        trate_str = fmtsize(trate, rate=True)
        trate_bstr = fmtsize(trate, rate=True, bits=True)
        jabber.send('send_message', {'mto': conf.xmpp['sendto'],
                    'mbody': "%s -- Transfer Complete (%s) -- Time Elapsed ( %s ) -- Rate [ %s | %s ]" %
                    (tordata['name'], tsize_str, xdelta_str, trate_str, trate_bstr)})
        jabber.send('set_status', { 'show': None, 'status': "Ready" })

    # done
    rsh.close()
    return 0
Exemple #30
0
 def _undef(**kwargs):
     kwlist = ', '.join(["%s=%s" % (x, kwargs[x]) for x in kwargs])
     logthis("Unhandled method call:",
             suffix="%s(%s)" % (aname, kwlist),
             loglevel=LL.WARNING)