Ejemplo n.º 1
0
 def cleanup(self):
     sendresponse = self.params['nzo_id']
     queue = {
         'mode': 'history',
         'name': 'delete',
         'del_files': 1,
         'value': self.params['nzo_id'],
         'output': 'json',
         'apikey': self.sab_apikey,
         'limit': 500
     }
     try:
         logger.info('sending now to %s' % self.sab_url)
         logger.debug('parameters set to %s' % queue)
         h = requests.get(self.sab_url, params=queue, verify=False)
     except Exception as e:
         logger.error('uh-oh: %s' % e)
         return {'status': False}
     else:
         queueresponse = h.json()
         if queueresponse['status']:
             logger.info(
                 '[SABNZBD] Successfully deleted the item from SABnzbd.')
         else:
             logger.warn('[SABNZBD] Unable to delete item from SABnzbd.')
     return queueresponse
Ejemplo n.º 2
0
    def post_process(self):
        url = self.lidarr_url + '/api/v1/command'
        if self.applylabel is True:
            if self.snstat['label'] == 'None':
                newpath = os.path.join(self.defaultdir, self.snstat['name'])
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       self.snstat['name'])
        else:
            newpath = os.path.join(self.defaultdir, self.snstat['name'])

        payload = {
            'name': 'DownloadedAlbumsScan',
            'path': newpath,
            'downloadClientID': self.snstat['hash'],
            'importMode': 'Move'
        }

        logger.info('[LIDARR] Posting url: %s' % url)
        logger.info('[LIDARR] Posting to completed download handling now: %s' %
                    payload)

        r = requests.post(url, json=payload, headers=self.lidarr_headers)
        data = r.json()
        logger.info('content: %s' % data)

        check = True
        while check:
            url = self.lidarr_url + '/api/v1/command/' + str(data['id'])
            logger.info('[LIDARR] command check url : %s' % url)
            try:
                r = requests.get(url, params=None, headers=self.lidarr_headers)
                dt = r.json()
                logger.info('[LIDARR] Reponse: %s' % dt)
            except:
                logger.warn('error returned from lidarr call. Aborting.')
                return False
            else:
                if dt['state'] == 'completed':
                    logger.info('[LIDARR] Successfully post-processed : ' +
                                self.snstat['name'])
                    check = False
                else:
                    time.sleep(10)

        if check is False:
            # we need to get the root path here in order to make sure we call the correct plex update ...
            # hash is know @ self.snstat['hash'], file will exist in snatch queue dir as hashvalue.hash
            # file contains complete snatch record - retrieve the 'path' value to get the series directory.
            return True
        else:
            return False
Ejemplo n.º 3
0
    def add_queue(self, data):
        try:
            item = data['file']
            mode = 'file'
        except:
            item = data['hash']
            mode = 'hash'
        try:
            if mode == 'file':
                logger.info(
                    '[API-AWARE] Adding file to queue via FILE %s [label:%s]' %
                    (data['file'], data['label']))
                HQUEUE.put({
                    'mode': 'file-add',
                    'item': data['file'],
                    'label': data['label']
                })

            elif mode == 'hash':
                logger.info(
                    '[API-AWARE] Adding file to queue via HASH %s [label:%s]' %
                    (data['hash'], data['label']))
                HQUEUE.put({
                    'mode': 'hash-add',
                    'item': data['hash'],
                    'label': data['label']
                })
            else:
                logger.info(
                    '[API-AWARE] Unsupported mode or error in parsing. Ignoring request [%s]'
                    % data)
                return False
        except:
            logger.info(
                '[API-AWARE] Unsupported mode or error in parsing. Ignoring request [%s]'
                % data)
            return False
        else:
            logger.warn(
                '[API-AWARE] Successfully added to queue - Prepare for GLORIOUS retrieval'
            )
            return True
Ejemplo n.º 4
0
    def unrar_it(self, rar_set):
        logger.info('[RAR MANAGER] Extracting ' + str(len(rar_set['info'])) +
                    ' rar\'s for set : ' + os.path.join(rar_set['directory']))
        #arbitrarily pick the first entry and change directories.
        unrar_folder = rar_set['directory']
        #os.makedirs( unrar_folder )
        os.chdir(unrar_folder)
        logger.info('[RAR MANAGER] Changing to : ' + str(unrar_folder))
        unrar_cmd = '/usr/bin/unrar'
        baserar = rar_set['start_rar']
        # Extract.
        try:
            output = subprocess.check_output([unrar_cmd, 'x', baserar])
        except CalledProcessError as e:
            if e.returncode == 3:
                logger.warn('[RAR MANAGER] [Unrar Error 3] - Broken Archive.')
            elif e.returncode == 1:
                logger.warn(
                    '[RAR MANAGER] [Unrar Error 1] - No files to extract.')
            return "unrar error"
        except Exception as e:
            logger.warn('[RAR MANAGER] Error: %s' % e)
            return "unrar error"

        return "success"
Ejemplo n.º 5
0
    def main(self):
        status = 'None'
        dirlist = self.traverse_directories(self.path)
        rar_found = []

        for fname in dirlist:
            filename = fname['filename']

            rar_ex = r'\.(?:rar|r\d\d|\d\d\d)$'
            rar_chk = re.findall(rar_ex, filename, flags=re.IGNORECASE)
            if rar_chk:
                #append the rars found to the rar_found tuple
                rar_found.append({
                    "directory": self.path,
                    "filename": filename
                })

        #if it needs to get unrar'd - we should do it here.
        if len(rar_found) > 0:
            rar_info = self.rar_check(rar_found)
            if rar_info is None:
                logger.warn(
                    '[RAR-DETECTION-FAILURE] Incomplete rar set detected - ignoring.'
                )
            else:
                logger.info('[RAR-DETECTION] Detected rar\'s within ' +
                            rar_info[0]['directory'] +
                            '. Initiating rar extraction.')
                if len(rar_info) > 0:
                    for rk in rar_info:
                        if rk['start_rar'] is None:
                            continue
                        logger.info('[RAR MANAGER] [ ' + str(len(rk['info'])) +
                                    ' ] ')  # : ' + str(rar_info))
                        logger.info(
                            '[RAR MANAGER] First Rar detection initated for : '
                            + str(rk['start_rar']))
                        # extract the rar's biatch.
                        try:
                            rar_status = self.unrar_it(rk)
                        except Exception as e:
                            logger.warn(
                                '[RAR MANAGER] Error extracting rar: %s' % e)
                            continue
                        else:
                            if rar_status == "success":
                                logger.info(
                                    '[RAR MANAGER] Successfully extracted rar\'s.'
                                )
                                for rs in rk['info']:
                                    os.remove(
                                        os.path.join(self.path,
                                                     rs['filename']))
                                    logger.info('[RAR MANAGER] Removal of : ' +
                                                os.path.join(
                                                    self.path, rs['filename']))
                                #remove the crap in the directory that got logged earlier ( think its done later though )
                                logger.info(
                                    '[RAR MANAGER] Removal of start rar: ' +
                                    rk['start_rar'])
                                os.remove(rk['start_rar'])
                                status = 'success'

        if status == 'success':
            logger.info('Success!')
            dirlist = self.traverse_directories(self.path)
        else:
            if len(rar_found) > 0:
                logger.warn('Unable to unrar items')
            else:
                logger.debug('No items to unrar.')
        return dirlist
Ejemplo n.º 6
0
    def post_process(self):
        logger.info('snstat: %s' % self.snstat)
        issueid = None
        comicid = None
        nzb_name = None
        nzb = False
        try:
            logger.debug('Attempting to open: %s' %
                         os.path.join(self.torrentfile_dir, self.mylar_label,
                                      self.snstat['hash'] + '.mylar.hash'))
            with open(
                    os.path.join(self.torrentfile_dir, self.mylar_label,
                                 self.snstat['hash'] +
                                 '.mylar.hash')) as dfile:
                data = json.load(dfile)
        except Exception as e:
            logger.error('[%s] not able to load .mylar.hash file.' % e)
            #for those that were done outside of Mylar or using the -s switch on the cli directly by hash
            nzb_name = 'Manual Run'
        else:
            logger.debug('loaded .mylar.hash successfully - extracting info.')
            try:
                nzb_name = data['mylar_release_name']
            except:
                try:
                    if 'mylar_release_nzbname' in data.keys():
                        # nzb_name HAS TO BE the filename itself so it can pp directly
                        nzb_name = os.path.basename(self.snstat['folder'])
                        nzb = True
                except:
                    #if mylar_release_name doesn't exist, fall back to the torrent_filename.
                    #mylar retry issue will not have a release_name
                    nzb_name = data['mylar_torrent_filename']

            if self.issueid is None:
                if data['mylar_issuearcid'] != 'None':
                    issueid = data['mylar_issuearcid']
                else:
                    if data['mylar_release_pack'] == 'False':
                        issueid = data['mylar_issueid']
                    else:
                        issueid = None
                comicid = data['mylar_comicid']
                if comicid == 'None':
                    comicid = None
            else:
                issueid = self.issueid
                comicid = None

        if self.issueid is not None and nzb_name == 'Manual Run':
            issueid = self.issueid
            comicid = None
            nzb_name = self.snstat['name']

        url = self.mylar_url + '/api'
        if all([self.applylabel is True, self.snstat['label'] != 'None']):
            if nzb is True:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       self.snstat['extendedname'])
            else:
                if os.path.isdir(
                        os.path.join(self.defaultdir, self.snstat['label'],
                                     self.snstat['name'])):
                    newpath = os.path.join(self.defaultdir,
                                           self.snstat['label'],
                                           self.snstat['name'])
                else:
                    if os.path.isdir(
                            os.path.join(self.defaultdir,
                                         self.snstat['label'])):
                        newpath = os.path.join(self.defaultdir,
                                               self.snstat['label'])
        else:
            if nzb is True:
                newpath = os.path.join(self.defaultdir,
                                       self.snstat['extendedname'])
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['name'])

        payload = {
            'cmd': 'forceProcess',
            'apikey': self.mylar_apikey,
            'nzb_name': nzb_name,
            'issueid': issueid,
            'comicid': comicid,
            'nzb_folder': newpath
        }

        logger.info('[MYLAR] Posting url: %s' % url)
        logger.info('[MYLAR] Posting to completed download handling now: %s' %
                    payload)

        try:
            r = requests.post(url,
                              params=payload,
                              headers=self.mylar_headers,
                              timeout=0.001)
        except Exception as e:
            if any(['Connection refused' in e, 'Timeout' in e]):
                logger.warn(
                    'Unable to connect to Mylar server. Please check that it is online [%s].'
                    % e)
            else:
                logger.warn('%s' % e)
            return False

        #response = r.json()
        logger.debug('content: %s' % r.content)

        logger.debug('[MYLAR] status_code: %s' % r.status_code)
        logger.info('[MYLAR] Successfully post-processed : ' +
                    self.snstat['name'])

        return True
Ejemplo n.º 7
0
    def __init__(self,
                 hash=None,
                 file=None,
                 add=False,
                 label=None,
                 partial=False,
                 conf=None):

        if hash is None:
            self.torrent_hash = None
        else:
            self.torrent_hash = hash

        if file is None:
            self.filepath = None
        else:
            self.filepath = file

        self.basedir = None

        if label is None:
            self.label = None
        else:
            self.label = label

        if add is True:
            self.add = True
        else:
            self.add = False

        if partial is True:
            self.partial = True
        else:
            self.partial = False

        if conf is None:
            logger.warn(
                'Unable to load config file properly for rtorrent usage. Make sure harpoon.conf is located in the /conf directory'
            )
            return None
        else:
            self.conf_location = conf

        config = ConfigParser.RawConfigParser()
        config.read(self.conf_location)

        self.applylabel = config.getboolean('general', 'applylabel')
        self.multiple_seedboxes = config.getboolean('general',
                                                    'multiple_seedboxes')
        logger.info('multiple_seedboxes: %s' % self.multiple_seedboxes)
        if self.multiple_seedboxes is True:
            sectionsconfig1 = config.get('general', 'multiple1')
            sectionsconfig2 = config.get('general', 'multiple2')
            sectionlist1 = sectionsconfig1.split(',')
            sections1 = [x for x in sectionlist1 if x.lower() == label.lower()]
            sectionlist2 = sectionsconfig2.split(',')
            sections2 = [x for x in sectionlist2 if x.lower() == label.lower()]
            logger.info('sections1: %s' % sections1)
            logger.info('sections2: %s' % sections2)
            if sections1:
                logger.info('SEEDBOX-1 ENABLED!')
                self.start = config.getboolean('rtorrent', 'startonload')
                self.rtorrent_host = config.get(
                    'rtorrent', 'rtorr_host') + ':' + config.get(
                        'rtorrent', 'rtorr_port')
                self.rtorrent_user = config.get('rtorrent', 'rtorr_user')
                self.rtorrent_pass = config.get('rtorrent', 'rtorr_passwd')
                self.rtorrent_auth = config.get('rtorrent', 'authentication')
                self.rtorrent_rpc = config.get('rtorrent', 'rpc_url')
                self.rtorrent_ssl = config.getboolean('rtorrent', 'ssl')
                self.rtorrent_verify = config.getboolean(
                    'rtorrent', 'verify_ssl')
                self.basedir = config.get('post-processing', 'pp_basedir')
                self.multiple = '1'

            elif sections2:
                logger.info('SEEDBOX-2 ENABLED!')
                self.start = config.getboolean('rtorrent2', 'startonload')
                self.rtorrent_host = config.get(
                    'rtorrent2', 'rtorr_host') + ':' + config.get(
                        'rtorrent2', 'rtorr_port')
                self.rtorrent_user = config.get('rtorrent2', 'rtorr_user')
                self.rtorrent_pass = config.get('rtorrent2', 'rtorr_passwd')
                self.rtorrent_auth = config.get('rtorrent2', 'authentication')
                self.rtorrent_rpc = config.get('rtorrent2', 'rpc_url')
                self.rtorrent_ssl = config.getboolean('rtorrent2', 'ssl')
                self.rtorrent_verify = config.getboolean(
                    'rtorrent2', 'verify_ssl')
                self.basedir = config.get('post-processing2', 'pp_basedir2')
                self.multiple = '2'
            else:
                logger.info(
                    'No label directory assignment provided (ie. the torrent file is not located in a directory named after the label.'
                )
                return None
        else:
            logger.info('SEEDBOX-1 IS LONE OPTION - ENABLED!')
            self.start = config.getboolean('rtorrent', 'startonload')
            self.rtorrent_host = config.get('rtorrent',
                                            'rtorr_host') + ':' + config.get(
                                                'rtorrent', 'rtorr_port')
            self.rtorrent_user = config.get('rtorrent', 'rtorr_user')
            self.rtorrent_pass = config.get('rtorrent', 'rtorr_passwd')
            self.rtorrent_auth = config.get('rtorrent', 'authentication')
            self.rtorrent_rpc = config.get('rtorrent', 'rpc_url')
            self.rtorrent_ssl = config.getboolean('rtorrent', 'ssl')
            self.rtorrent_verify = config.getboolean('rtorrent', 'verify_ssl')
            self.basedir = config.get('post-processing', 'pp_basedir')
            self.multiple = None

        self.client = TorClient.TorrentClient()
        if not self.client.connect(self.rtorrent_host, self.rtorrent_user,
                                   self.rtorrent_pass, self.rtorrent_auth,
                                   self.rtorrent_rpc, self.rtorrent_ssl,
                                   self.rtorrent_verify):
            logger.info('could not connect to host, exiting')
            return None
Ejemplo n.º 8
0
    def post_process(self):
        url = self.sonarr_url + '/api/command'
        name = self.snstat['name']
        if 'extendedname' in self.snstat.keys():
            name = self.snstat['extendedname']
        if self.applylabel is True:
            if self.snstat['label'] == 'None':
                newpath = os.path.join(self.defaultdir, name)
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       name)
        else:
            newpath = os.path.join(self.defaultdir, name)

        if os.path.isfile(newpath):
            logger.warn(
                '[SONARR] This is an individual movie, but Sonarr will only import from a directory. Creating a temporary directory and moving this so it can proceed.'
            )
            newdir = os.path.join(
                os.path.abspath(os.path.join(newpath, os.pardir)),
                os.path.splitext(self.snstat['name'])[0])
            logger.info('[SONARR] Creating directory: %s' % newdir)
            os.makedirs(newdir)
            logger.info('[SONARR] Moving %s -TO- %s' % (newpath, newdir))
            shutil.move(newpath, newdir)
            newpath = newdir
            logger.info('[SONARR] New path location now set to: %s' % newpath)

        #make sure it's in a Completed status otherwise it won't import (why? I haven't a f*ckin' clue but it's cause of v2.0.0.5301)
        cntit = 0
        while True:
            check_that_shit = self.checkyourself()
            if check_that_shit is True:
                break
            if cntit == 10:
                logger.error(
                    '[SONARR-ERROR] Unable to verify completion status of item - maybe this was already post-processed using a different method?'
                )
                return False
            cntit += 1
            time.sleep(15)

        payload = {
            "name": "DownloadedEpisodesScan",
            "path": newpath,
            "downloadClientID": self.snstat['hash'],
            "importMode": "Move"
        }

        logger.info(
            '[SONARR] Waiting 10s prior to sending to download handler to make sure item is completed within Sonarr'
        )
        logger.info(
            '[SONARR] Posting to completed download handling after a short 10s delay: %s'
            % payload)
        time.sleep(10)

        r = requests.post(url, json=payload, headers=self.sonarr_headers)
        data = r.json()

        check = True
        while check:
            url = self.sonarr_url + '/api/command/' + str(data['id'])
            logger.info('[SONARR] command check url : %s' % url)
            try:
                r = requests.get(url, params=None, headers=self.sonarr_headers)
                dt = r.json()
                logger.info('[SONARR] Reponse: %s' % dt)
            except Exception as e:
                logger.warn('[%s] error returned from sonarr call. Aborting.' %
                            e)
                return False
            else:
                if dt['state'] == 'completed':
                    #duration = time.strptime(dt['duration'][:-1], '%H:%M:%S.%f').tm_sec
                    #if tm_sec < 20:
                    #    #if less than 20s duration, the pp didn't succeed.
                    #else:
                    logger.info('[SONARR] Successfully post-processed : ' +
                                self.snstat['name'])
                    check = False
                else:
                    time.sleep(10)

        if check is False:
            #we need to get the root path here in order to make sure we call the correct plex update ...
            #hash is know @ self.snstat['hash'], file will exist in snatch queue dir as hashvalue.hash
            #file contains complete snatch record - retrieve the 'path' value to get the series directory.
            return True
        else:
            return False
Ejemplo n.º 9
0
    def query(self):
        sendresponse = self.params['nzo_id']
        queue = {
            'mode': 'queue',
            'search': self.params['nzo_id'],
            'output': 'json',
            'apikey': self.sab_apikey
        }
        try:
            logger.info('sending now to %s' % self.sab_url)
            logger.debug('parameters set to %s' % queue)
            h = requests.get(self.sab_url, params=queue, verify=False)
        except Exception as e:
            logger.error('uh-oh: %s' % e)
            return {'completed': False}
        else:
            queueresponse = h.json()
            logger.info('successfully queried the queue for status')
            try:
                queueinfo = queueresponse['queue']
                logger.info('queue: %s' % queueresponse)
                logger.info('Queue status : %s' % queueinfo['status'])
                logger.info('Queue mbleft : %s' % queueinfo['mbleft'])
                if str(queueinfo['status']) == 'Downloading':
                    logger.info('[SABNZBD] Dowwnload is not yet finished')
                    return {'completed': False}
            except Exception as e:
                logger.error('error: %s' % e)
                return {'completed': False}

            logger.info('[SABNZBD] Download completed.  Querying history.')
            hist_params = {
                'mode': 'history',
                'failed': 0,
                'output': 'json',
                'limit': 500,
                'apikey': self.sab_apikey
            }
            hist = requests.get(self.sab_url, params=hist_params, verify=False)
            historyresponse = hist.json()
            histqueue = historyresponse['history']
            found = {'completed': True, 'failed': True}
            try:
                for hq in histqueue['slots']:
                    # logger.info('nzo_id: %s --- %s [%s]' % (hq['nzo_id'], sendresponse, hq['status']))
                    if hq['nzo_id'] == sendresponse and hq[
                            'status'] == 'Completed':
                        logger.info(
                            '[SABNZBD] Found matching completed item in history. Job has a status of %s'
                            % hq['status'])
                        logger.info('[SABNZBD] Location found @ %s' %
                                    hq['storage'])
                        path_folder = hq['storage']
                        nzbname = os.path.basename(hq['storage'])
                        found = {
                            'completed': True,
                            'name': re.sub('.nzb', '', hq['nzb_name']).strip(),
                            'extendedname': nzbname,
                            'folder': path_folder,
                            'mirror': True,  # Change this
                            'multiple': None,
                            'label': hq['category'],
                            'hash': hq['nzo_id'],
                            'failed': False,
                            'files': []
                        }
                        break
                    elif hq['nzo_id'] == sendresponse and hq[
                            'status'] == 'Failed':
                        # get the stage / error message and see what we can do
                        stage = hq['stage_log']
                        for x in stage:
                            if 'Failed' in x['actions'] and any(
                                [x['name'] == 'Unpack', x['name'] == 'Repair'
                                 ]):
                                if 'moving' in x['actions']:
                                    logger.warn(
                                        '[SABNZBD] There was a failure in SABnzbd during the unpack/repair phase that caused a failure: %s'
                                        % x['actions'])
                                else:
                                    logger.warn(
                                        '[SABNZBD] Failure occured during the Unpack/Repair phase of SABnzbd. This is probably a bad file: %s'
                                        % x['actions'])
                                    found = {'completed': True, 'failed': True}
                            if any(
                                [x['name'] == 'Download',
                                 x['name'] == 'Fail']):
                                logger.warn(
                                    '[SABNZBD] SABnzbd failed to to download.  Articles were probably missing.'
                                )
                                found = {'completed': True, 'failed': True}
                    elif hq['nzo_id'] == sendresponse:
                        logger.warn('[SABNZBD] Unexpected response: %s' % hq)
                        found = {'completed': False}
            except Exception as e:
                logger.warn('error %s' % e)

            return found
Ejemplo n.º 10
0
def initialize(options=None, basepath=None, parent=None):

    if options is None:
        options = {}
    https_enabled = options['https_enabled']
    https_cert = options['https_cert']
    https_key = options['https_key']
    logger.debug("Web Initializing: %s" % options)
    if https_enabled:
        if not (os.path.exists(https_cert) and os.path.exists(https_key)):
            logger.warn(
                "Disabled HTTPS because of missing certificate and key.")
            https_enabled = False
    options_dict = {
        'log.screen': False,
        'server.thread_pool': 10,
        'server.socket_port': options['http_port'],
        'server.socket_host': options['http_host'],
        'engine.autoreload.on': False,
        'tools.encode.on': True,
        'tools.encode.encoding': 'utf-8',
        'tools.decode.on': True,
        'tools.sessions.on': True,
        'tools.sessions.storage_type': "File",
        'tools.sessions.storage_path': os.path.join(basepath, "sessions"),
        'tools.sessions.timeout': 120,
    }
    if https_enabled:
        options_dict['server.ssl_certificate'] = https_cert
        options_dict['server.ssl_private_key'] = https_key
        protocol = "https"
    else:
        protocol = "http"
    logger.debug("Options: %s" % options_dict)
    logger.info("Starting harpoon web server on %s://%s:%d/" %
                (protocol, options['http_host'], options['http_port']))
    cherrypy.config.update(options_dict)
    cherrypy.log.access_log.propagate = False
    logger.debug('DataDir: %s' % basepath)
    conf = {
        '/': {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': os.path.join(basepath, 'data'),
            'tools.proxy.on': options['http_proxy'],
            'tools.auth.on': False,
            'tools.sessions.on': True
        },
        '/interfaces': {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': os.path.join(basepath, 'data',
                                                'interfaces'),
            'tools.auth.on': False,
            'tools.sessions.on': False
        },
        '/images': {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': os.path.join(basepath, 'data', 'images'),
            'tools.auth.on': False,
            'tools.sessions.on': False
        },
        '/css': {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': os.path.join(basepath, 'data', 'css'),
            'tools.auth.on': False,
            'tools.sessions.on': False
        },
        '/js': {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': os.path.join(basepath, 'data', 'js'),
            'tools.auth.on': False,
            'tools.sessions.on': False
        },
        '/favicon.ico': {
            'tools.staticfile.on':
            True,
            'tools.staticfile.filename':
            os.path.join(basepath, 'data', 'images', 'favicon.ico'),
            'tools.auth.on':
            False,
            'tools.sessions.on':
            False
        }
    }

    if options['http_pass'] != "":
        logger.info("Web server authentication is enabled, username is '%s'" %
                    options['http_user'])
        conf['/'].update({
            'tools.auth.on':
            True,
            'tools.sessions.on':
            True,
            'tools.auth_basic.on':
            True,
            'tools.auth_basic.realm':
            'harpoon',
            'tools.auth_basic.checkpassword':
            cherrypy.lib.auth_basic.checkpassword_dict(
                {options['http_user']: options['http_pass']})
        })
        conf['/api'] = {'tools.auth_basic.on': False}
    logger.debug('config: %s' % conf)
    # Prevent time-outs
    try:
        cherrypy.engine.timeout_monitor.unsubscribe()
        cherrypy.tree.mount(WebInterface(parent=parent),
                            str(options['http_root']),
                            config=conf)
        cherrypy.engine.autoreload.subscribe()
        cherrypy.process.servers.check_port(str(options['http_host']),
                                            options['http_port'])
        cherrypy.server.start()
    except IOError:
        print 'Failed to start on port: %i. is something else running?' % (
            options['http_port'])
        sys.exit(1)
    except Exception as e:
        print 'Error: %s' % e
        sys.exit(1)
    cherrypy.server.wait()
Ejemplo n.º 11
0
    def post_process(self):
        try:
            with open(
                    os.path.join(self.torrentfile_dir, self.radarr_label,
                                 self.snstat['hash'] + '.hash')) as dfile:
                data = json.load(dfile)
        except:
            path = self.torrentfile_dir

            url = self.radarr_url + '/api/history'
            payload = {
                'pageSize': 1000,
                'page': 1,
                'filterKey': 'eventType',
                'filterValue': 1,
                'sortKey': 'date',
                'sortDir': 'desc'
            }

            check = True
            logger.info('[RADARR] Querying against history now: %s' % payload)
            r = requests.get(url, params=payload, headers=self.radarr_headers)
            logger.info(r.status_code)
            result = r.json()
            hash = None
            #eventType = 'grabbed'
            #downloadClient = 'RTorrent'

            for x in result['records']:
                try:
                    if x['downloadId']:
                        if self.snstat['hash'] == x['downloadId']:
                            hash = x['downloadId']
                            data = x
                            logger.info('[RADARR] file located: %s' % hash)
                            check = False
                            break
                except:
                    continue

        if check is True:
            logger.warn(
                '[RADARR] Unable to locate movie within most recently snatched items. For this to work, the download MUST be initiated via Radarr.'
            )
            return {
                'status': False,
                'radarr_id': self.radarr_id,
                'radarr_movie': self.radarr_movie
            }

        logger.info(data)
        radarr_id = data['movieId']
        radarr_movie = data['movie']['title']
        radarr_root_path = data['movie']['path']
        logger.info('2')
        #we can't run the downloadmoviescan (ie. manual post-processing) since for some reason radarr will push the new download
        #to the original location regardless of any setting previously (it must be storing it in the download table or something)
        name = self.snstat['name']
        if 'extendedname' in self.snstat.keys():
            name = self.snstat['extendedname']
        if self.applylabel is True:
            if self.snstat['label'] == 'None':
                newpath = os.path.join(self.defaultdir, name)
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       name)
        else:
            newpath = os.path.join(self.defaultdir, name)

        if os.path.isfile(newpath):
            logger.warn(
                '[RADARR] This is an individual movie, but Radarr will only import from a directory. Creating a temporary directory and moving this so it can proceed.'
            )
            newdir = os.path.join(
                os.path.abspath(os.path.join(newpath, os.pardir)),
                os.path.splitext(self.snstat['name'])[0])
            if os.path.exists(newdir):
                logger.info(
                    '[RADARR] Directory already exists.  Clearing it out, and reusing it.'
                )
                for filename in os.listdir(newdir):
                    os.remove(os.path.join(newdir, filename))
            else:
                logger.info('[RADARR] Creating directory: %s' % newdir)
                os.makedirs(newdir)
            logger.info('[RADARR] Moving ' + newpath + ' -TO- ' + newdir)
            shutil.move(newpath, newdir)
            newpath = newdir
            logger.info('[RADARR] New path location now set to : ' + newpath)

        url = self.radarr_url + '/api/command'
        payload = {
            'name': 'downloadedmoviesscan',
            'path': newpath,
            'downloadClientID': self.snstat['hash'],
            'importMode': 'Move'
        }

        logger.info(
            '[RADARR] Posting to completed download handling now so the file gets moved as per normal: '
            + str(payload))
        r = requests.post(url, json=payload, headers=self.radarr_headers)
        data = r.json()

        check = True
        while check:
            try:
                url = self.radarr_url + '/api/command/' + str(data['id'])
                r = requests.get(url, params=None, headers=self.radarr_headers)
                dt = r.json()
            except:
                logger.warn('error returned from sonarr call. Aborting.')
                return False
            else:
                if dt['state'] == 'completed':
                    logger.info('[RADARR] Successfully post-processed : ' +
                                self.snstat['name'])
                    check = False
                else:
                    logger.info(
                        '[RADARR] Post-Process of file currently running - will recheck in 60s to see if completed'
                    )
                    time.sleep(60)

        if check is False:
            return {
                'status': True,
                'radarr_id': radarr_id,
                'radarr_movie': radarr_movie,
                'radarr_root': radarr_root_path
            }
        else:
            return {
                'status': False,
                'radarr_id': radarr_id,
                'radarr_movie': radarr_movie,
                'radarr_root': radarr_root_path
            }
Ejemplo n.º 12
0
    def og_folders(self):
        if self.keep_original_foldernames is True:
            url = self.radarr_url + '/api/movie/' + str(self.radarr_id)

            logger.info(
                '[RADARR] Retrieving existing movie information for %s' %
                self.radarr_movie)

            r = requests.get(url, headers=self.radarr_headers)
            existingdata = r.json()

            #try updating the path
            logger.info("[RADARR] OLD_PATH: %s" % existingdata['path'])
            existingfilename = None
            try:
                existingfilename = existingdata['movieFile']['relativePath']
                logger.info("[RADARR] OLD_FILENAME: %s" % existingfilename)
            except:
                pass

            #now we check the movieinfo to see what directory we sling it to...
            if all([
                    self.dir_hd_movies is None, self.dir_sd_movies is None,
                    self.dir_web_movies is None
            ]):
                destdir = self.radarr_rootdir
            else:
                logger.info(
                    '[RADARR Now checking movie file for further information as to where to sling the final file.'
                )
                destdir = self.moviecheck(existingdata)

            logger.info('[RADARR] Current/Existing Directory: %s' % destdir)

            newpath = os.path.join(destdir, self.snstat['name'])
            logger.info('[RADARR] New Directory: %s' % newpath)

            #makes sure we have enough free space on new location for the move
            st = os.statvfs(destdir)

            dst_freesize = st.f_bavail * st.f_frsize
            src_filesize = 0
            for dirpath, dirnames, filenames in os.walk(existingdata['path']):
                for f in filenames:
                    fp = os.path.join(dirpath, f)
                    src_filesize += os.path.getsize(fp)

            logger.info('[FREESPACE-CHECK] ' + destdir + ' has ' +
                        str(self.sizeof_fmt(dst_freesize)) + ' free.')
            logger.info('[FREESPACE-CHECK] ' + self.snstat['name'] +
                        ' will consume ' + str(self.sizeof_fmt(src_filesize)) +
                        '.')
            if dst_freesize > src_filesize:
                logger.info(
                    '[FREESPACE-CHECK] PASS. Free space available after move: '
                    + str(self.sizeof_fmt(dst_freesize - src_filesize)) + '.')
            else:
                logger.warn(
                    '[FREESPACE-CHECK] FAIL. There is not enough free space on the destination to move file.'
                )
                sys.exit('Not enough free space on destination:' + destdir)

            #move the dir to the new location (if in same dir will do a rename, otherwise will do a copy, then delete)
            shutil.move(existingdata['path'], newpath)
            logger.info("[RADARR] MOVE/RENAME successful to : %s " % newpath)

            url = self.radarr_url + '/api/command'
            refreshpayload = {
                'name': 'refreshmovie',
                'movieId': int(self.radarr_id)
            }

            logger.info(
                "[RADARR] Refreshing movie to make sure old location could not be located anymore: %s"
                % refreshpayload)
            r = requests.post(url,
                              json=refreshpayload,
                              headers=self.radarr_headers)
            datachk = r.json()
            check = True
            while check:
                url = self.radarr_url + '/api/command/' + str(datachk['id'])
                logger.info("[RADARR] API Submitting: %s" % url)
                r = requests.get(url, params=None, headers=self.radarr_headers)
                dchk = r.json()
                if dchk['state'] == 'completed':
                    check = False
                else:
                    logger.info(
                        '[RADARR] Refreshing of movie currently running - will recheck in 10s to see if completed'
                    )
                    time.sleep(10)

            url = self.radarr_url + '/api/movie/' + str(self.radarr_id)

            logger.info(
                '[RADARR] Retrieving existing movie information for %s' %
                self.radarr_movie)

            r = requests.get(url, headers=self.radarr_headers)
            data = r.json()

            data['path'] = u"" + newpath.decode('utf-8')
            data['folderName'] = u"" + self.snstat['name'].decode('utf-8')
            url = self.radarr_url + '/api/movie'
            #set the new path in the json - assume that the torrent name is ALSO the folder name
            #could set folder name to file name via an option..possible to-do.

            logger.info('[RADARR] Updating data for movie: ' + str(data))
            r = requests.put(url, json=data, headers=self.radarr_headers)

            url = self.radarr_url + '/api/command'
            refreshpayload = {
                'name': 'refreshmovie',
                'movieId': int(self.radarr_id)
            }

            logger.info(
                "[RADARR] Refreshing movie to make sure new location is now recognized: %s"
                % refreshpayload)
            r = requests.post(url,
                              json=refreshpayload,
                              headers=self.radarr_headers)
            datachk = r.json()
            check = True
            while check:
                url = self.radarr_url + '/api/command/' + str(datachk['id'])
                logger.info("[RADARR] API Submitting: %s" % url)
                r = requests.get(url, params=None, headers=self.radarr_headers)
                dchk = r.json()
                if dchk['state'] == 'completed':
                    check = False
                else:
                    logger.info(
                        '[RADARR] Refreshing of movie currently running - will recheck in 10s to see if completed'
                    )
                    time.sleep(10)

            url = self.radarr_url + '/api/movie/' + str(self.radarr_id)

            logger.info(
                '[RADARR] Retrieving existing movie information for %s' %
                self.radarr_movie)

            r = requests.get(url, headers=self.radarr_headers)
            data = r.json()

            data['path'] = u"" + newpath.decode('utf-8')
            data['folderName'] = u"" + self.snstat['name'].decode('utf-8')
            url = self.radarr_url + '/api/movie'
            #set the new path in the json - assume that the torrent name is ALSO the folder name
            #could set folder name to file name via an option..possible to-do.

            logger.info('[RADARR] Updating data for movie: ' + str(data))
            r = requests.put(url, json=data, headers=self.radarr_headers)

            url = self.radarr_url + '/api/command'
            refreshpayload = {
                'name': 'refreshmovie',
                'movieId': int(self.radarr_id)
            }

            logger.info(
                "[RADARR] Refreshing movie to make sure new location is now recognized: %s"
                % refreshpayload)
            r = requests.post(url,
                              json=refreshpayload,
                              headers=self.radarr_headers)
            datachk = r.json()
            check = True
            while check:
                url = self.radarr_url + '/api/command/' + str(datachk['id'])
                logger.info("[RADARR] API Submitting: %s" % url)
                r = requests.get(url, params=None, headers=self.radarr_headers)
                dchk = r.json()
                if dchk['state'] == 'completed':
                    logger.info(
                        '[RADARR] Successfully updated paths to original foldername for '
                        + self.radarr_movie)
                    check = False
                else:
                    logger.info(
                        '[RADARR] Refreshing of movie currently running - will recheck in 10s to see if completed'
                    )
                    time.sleep(10)