Ejemplo n.º 1
0
 def cleanup(self):
     sendresponse = self.params['nzo_id']
     queue = {
         'mode': 'history',
         'name': 'delete',
         'del_files': 1,
         'value': self.params['nzo_id'],
         'output': 'json',
         'apikey': self.sab_apikey,
         'limit': 500
     }
     try:
         logger.info('sending now to %s' % self.sab_url)
         logger.debug('parameters set to %s' % queue)
         h = requests.get(self.sab_url, params=queue, verify=False)
     except Exception as e:
         logger.error('uh-oh: %s' % e)
         return {'status': False}
     else:
         queueresponse = h.json()
         if queueresponse['status']:
             logger.info(
                 '[SABNZBD] Successfully deleted the item from SABnzbd.')
         else:
             logger.warn('[SABNZBD] Unable to delete item from SABnzbd.')
     return queueresponse
Ejemplo n.º 2
0
    def unrar_it(self, rar_set):
        logger.info('[RAR MANAGER] Extracting ' + str(len(rar_set['info'])) +
                    ' rar\'s for set : ' + os.path.join(rar_set['directory']))
        #arbitrarily pick the first entry and change directories.
        unrar_folder = rar_set['directory']
        #os.makedirs( unrar_folder )
        os.chdir(unrar_folder)
        logger.info('[RAR MANAGER] Changing to : ' + str(unrar_folder))
        unrar_cmd = '/usr/bin/unrar'
        baserar = rar_set['start_rar']
        # Extract.
        try:
            output = subprocess.check_output([unrar_cmd, 'x', baserar])
        except CalledProcessError as e:
            if e.returncode == 3:
                logger.warn('[RAR MANAGER] [Unrar Error 3] - Broken Archive.')
            elif e.returncode == 1:
                logger.warn(
                    '[RAR MANAGER] [Unrar Error 1] - No files to extract.')
            return "unrar error"
        except Exception as e:
            logger.warn('[RAR MANAGER] Error: %s' % e)
            return "unrar error"

        return "success"
Ejemplo n.º 3
0
    def sender(self, files=None):
        try:
            from requests.packages.urllib3 import disable_warnings
            disable_warnings()
        except:
            logger.info('Unable to disable https warnings. Expect some spam if using https nzb providers.')

        try:
            logger.info('parameters set to %s' % self.params)
            logger.info('sending now to %s' % self.sab_url)
            if files:
                sendit = requests.post(self.sab_url, data=self.params, files=files, verify=False)
            else:
                sendit = requests.post(self.sab_url, data=self.params, verify=False)
        except:
            logger.info('Failed to send to client.')
            return {'status': False}
        else:
            sendresponse = sendit.json()
            logger.info(sendresponse)
            if sendresponse['status'] is True:
                queue_params = {'status': True,
                                'nzo_id': ''.join(sendresponse['nzo_ids']),
                                'queue':  {'mode':   'queue',
                                           'search':  ''.join(sendresponse['nzo_ids']),
                                           'output':  'json',
                                           'apikey':  self.sab_apikey}}

            else:
                queue_params = {'status': False}

            return queue_params
Ejemplo n.º 4
0
 def __init__(self, sonarr_info):
     logger.info(sonarr_info)
     self.sonarr_url = sonarr_info['sonarr_url']
     self.sonarr_headers = sonarr_info['sonarr_headers']
     self.applylabel = sonarr_info['applylabel']
     self.defaultdir = sonarr_info['defaultdir']
     self.snstat = sonarr_info['snstat']
Ejemplo n.º 5
0
 def rar_check(self, rarlist):
     #used to determine the first rar of the set so that unraring won't fail
     #it will return a tuple indicating the 'start_rar' and the 'info' for the remainder of the rars
     rar_keep = {}
     rar_temp = []
     rar_keepsake = []
     startrar = None
     rar_ex1 = r'(\.001|\.part0*1\.rar|^((?!part\d*\.rar$).)*\.rar)$'
     #this might have to get sorted so that rar comes before r01, r02, etc
     for f in rarlist:
         first_rarchk = re.findall(rar_ex1,
                                   f['filename'],
                                   flags=re.IGNORECASE)
         if first_rarchk:
             startrar = f['filename']
             unrardir = f['directory']
             logger.info('[RAR DETECTION] First RAR detected as :' +
                         f['filename'])
         else:
             rar_temp.append(f)
     if startrar is not None:
         rar_keep['start_rar'] = startrar
         rar_keep['directory'] = unrardir
         rar_keep['info'] = rar_temp
         rar_keepsake.append(rar_keep)
         #logger.info(rar_keepsake)
         return rar_keepsake
     else:
         return None
Ejemplo n.º 6
0
 def __init__(self, sonarr_info):
     logger.info(sonarr_info)
     self.sonarr_url = config.SONARR['sonarr_url']
     self.sonarr_headers = config.SONARR['sonarr_headers']
     self.applylabel = config.GENERAL['applylabel']
     self.defaultdir = config.GENERAL['defaultdir']
     self.snstat = sonarr_info['snstat']
Ejemplo n.º 7
0
 def IndexableQueue(self, item):
     import collections
     d = HQUEUE.listqueue
     queue_position = [i for i, t in enumerate(d) if t['item'] == item]
     queue_pos = '%s/%s' % (''.join(
         str(e) for e in queue_position), HQUEUE.qsize())
     logger.info('queue position of %s' % queue_pos)
     return queue_pos
Ejemplo n.º 8
0
 def get_the_hash(self):
     # Open torrent file
     torrent_file = open(self.filepath, "rb")
     metainfo = bencode.decode(torrent_file.read())
     info = metainfo['info']
     thehash = hashlib.sha1(bencode.encode(info)).hexdigest().upper()
     logger.info('Hash: %s' % thehash)
     return thehash
Ejemplo n.º 9
0
 def handle(self):
     data = self.recv()
     logger.info('[API-AWARE] Incoming api request: %s' % data)
     if data['apikey'] == '8ukjkjdhkjh9817891lHJDJHAKllsdljal':
         self.send({'Status': True, 'Message': 'Successful authentication'})
         self.add_queue(data)
     else:
         self.send({'Status': False, 'Message': 'Invalid APIKEY'})
Ejemplo n.º 10
0
 def __init__(self, ll_info):
     logger.info(ll_info)
     self.lazylibrarian_url = ll_info['lazylibrarian_url']
     self.lazylibrarian_label = ll_info['lazylibrarian_label']
     self.lazylibrarian_headers = ll_info['lazylibrarian_headers']
     self.lazylibrarian_apikey = ll_info['lazylibrarian_apikey']
     self.lazylibrarian_filedata = ll_info['lazylibrarian_filedata']
     self.applylabel = ll_info['applylabel']
     self.defaultdir = ll_info['defaultdir']
     self.snstat = ll_info['snstat']
Ejemplo n.º 11
0
    def __init__(self):

        #HOST, PORT = "localhost", 50007
        #server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
        #ip, port = server.server_address
        #server_thread = threading.Thread(target=server.serve_forever, args=queue)
        #server_thread.daemon = True
        #server_thread.start()

        logger.info('[API-AWARE] Successfully sent API-AWARE into the background to monitor for connections...')
        logger.info('[API-AWARE] Now preparing to initialize queue across modules...')
Ejemplo n.º 12
0
 def __init__(self, ll_info):
     logger.info(ll_info)
     self.lazylibrarian_url = config.LAZYLIBRARIAN['lazylibrarian_url']
     self.lazylibrarian_label = config.LAZYLIBRARIAN['lazylibrarian_label']
     self.lazylibrarian_headers = config.LAZYLIBRARIAN[
         'lazylibrarian_headers']
     self.lazylibrarian_apikey = config.LAZYLIBRARIAN[
         'lazylibrarian_apikey']
     self.lazylibrarian_filedata = ll_info['filedata']
     self.applylabel = config.GENERAL['applylabel']
     self.defaultdir = config.GENERAL['defaultdir']
     self.snstat = ll_info['snstat']
     logger.debug("---")
Ejemplo n.º 13
0
    def auth(self):
        if any([self.plex_token == '', self.plex_token is None]):
            #step 1 - post request to plex.tv/users/sign_in.json to get x-plex-token since every application needs a unique token.
            url = 'https://plex.tv/users/sign_in.json'
            base64string = base64.encodestring(
                '%s:%s' % (self.plex_login, self.plex_password)).replace(
                    '\n', '')

            headers = {
                'X-Plex-Client-Identifier': 'harpoon2017',
                'X-Plex-Product': 'Harpoon-PLEX-UPDATER',
                'X-Plex-Version': '0.5j',
                'Authorization': 'Basic %s' % base64string
            }

            logger.info(
                '[HARPOON-PLEX] Requesting token from Plex.TV for application usage'
            )

            r = requests.post(url, headers=headers, verify=True)
            logger.info('[HARPOON-PLEX] Status Code: %s' % r.status_code)
            if any([r.status_code == 201, r.status_code == 200]):
                data = r.json()
                self.plex_token = data['user']['authToken']
                self.plex_uuid = data['user']['uuid']
                logger.info(
                    '[HARPOON-PLEX] Successfully retrieved authorization token for PMS integration'
                )
                return {'status': True}
            else:
                logger.info(
                    '[HARPOON-PLEX] Unable to succesfully authenticate - check your settings and will try again next time..'
                )
                return {'status': False}
Ejemplo n.º 14
0
    def post_process(self):
        url = self.sickrage_url + '/api/' + self.sickrage_apikey
        if self.applylabel is True:
            if self.snstat['label'] == 'None':
                newpath = os.path.join(self.defaultdir, self.snstat['name'])
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       self.snstat['name'])
        else:
            newpath = os.path.join(self.defaultdir, self.snstat['name'])

        payload = {
            'cmd': 'postprocess',
            'path': newpath,
            'delete': bool(self.sickrage_delete),
            'force_next': 0,
            'force_replace': bool(self.sickrage_force_replace),
            'is_priority': bool(self.sickrage_is_priority),
            'process_method': self.sickrage_process_method,
            'return_data': 1,
            'failed': bool(self.sickrage_failed),
            'type': self.sickrage_type
        }

        logger.info('[SICKRAGE] Posting url: %s' % url)
        logger.info(
            '[SICKRAGE] Posting to completed download handling now: %s' %
            payload)

        r = requests.post(url, json=payload, headers=self.sickrage_headers)
        data = r.json()
        logger.info('content: %s' % data)
        logger.info('[SICKRAGE] Successfully post-processed : ' +
                    self.snstat['name'])
        return True
Ejemplo n.º 15
0
 def __init__(self, sickrage_info):
     logger.info(sickrage_info)
     self.sickrage_url = config.SICKRAGE['sickrage_url']
     self.sickrage_apikey = config.SICKRAGE['sickrage_apikey']
     self.sickrage_forcereplace = config.SICKRAGE['sickrage_forcereplace']
     self.sickrage_forcenext = config.SICKRAGE['sickrage_forcenext']
     self.sickrage_process_method = config.SICKRAGE[
         'sickrage_process_method']
     self.sickrage_is_priority = config.SICKRAGE['sickrage_is_priority']
     self.sickrage_failed = config.SICKRAGE['sickrage_failed']
     self.sickrage_delete = config.SICKRAGE['sickrage_delete']
     self.sickrage_type = config.SICKRAGE['sickrage_type']
     self.sickrage_headers = config.SICKRAGE['sickrage_headers']
     self.applylabel = config.GENERAL['applylabel']
     self.defaultdir = config.GENERAL['defaultdir']
     self.snstat = sickrage_info['snstat']
Ejemplo n.º 16
0
    def traverse_directories(self, dir, filesfirst=False):
        filelist = []
        for (dirname, subs, files) in os.walk(dir, followlinks=True):

            for fname in files:

                filelist.append({"directory": dirname, "filename": fname})

        if len(filelist) > 0:
            logger.info('there are ' + str(len(filelist)) + ' files.')

        if filesfirst:
            return sorted(filelist,
                          key=lambda x: (x['filename'], x['directory']),
                          reverse=False)
        else:
            return sorted(filelist,
                          key=lambda x:
                          (x['directory'], os.path.splitext(x['filename'])[1]),
                          reverse=True)
Ejemplo n.º 17
0
    def add_queue(self, data):
        try:
            item = data['file']
            mode = 'file'
        except:
            item = data['hash']
            mode = 'hash'

        if mode == 'file':
            logger.info('[API-AWARE] Adding file to queue via FILE %s [label:%s]' % (data['file'], data['label']))
            #harpoon.SNQUEUE.put({'mode':  'file-add',
            #                     'item':  data['file'],
            #                     'label': data['label']})

        elif mode == 'hash':
            logger.info('[API-AWARE] Adding file to queue via HASH %s [label:%s]' % (data['hash'], data['label']))
            #harpoon.SNQUEUE.put({'mode':  'hash-add',
            #                     'item':  data['hash'],
            #                     'label': data['label']})

        return
Ejemplo n.º 18
0
    def sections(self):
        #step 2 - get the sections
        url = self.plex_host + '/library/sections'
        headers = {'X-Plex-Token': str(self.plex_token)}

        logger.info('[HARPOON-PLEX] Querying plex for library / sections required for application usage.')
        r = requests.get(url, headers=headers)
        logger.info('[HARPOON-PLEX] Status Code: %s' % r.status_code)
        if any([r.status_code == 200, r.status_code == 201]):
            root = ET.fromstring(r.content)
            sections = []
            libraries = {}
            locations = []
            for child in root.iter('Directory'):
                for ch in child.iter('Location'):
                    locations.append({'id':   ch.get('id'),
                                      'path': ch.get('path')})

                sections.append({'title':     child.get('title'),
                                 'key':       child.get('key'),
                                 'locations': locations})
                locations = []

            libraries['sections'] = sections
            libraries['status'] = True
            return libraries
        else:
            logger.info('[HARPOON-PLEX] Unable to retrieve sections from server - cannot update library due to this.')
            return {'status': False}
Ejemplo n.º 19
0
    def checkyourself(self):

        url = self.sonarr_url + '/api/queue'
        checkit = False

        logger.info(
            '[SONARR] Querying against active queue now for completion')
        r = requests.get(url, headers=self.sonarr_headers)
        logger.info(r.status_code)
        results = r.json()

        for x in results:
            try:
                if x['downloadId']:
                    if self.snstat['hash'] == x['downloadId']:
                        if x['status'] == 'Completed':
                            logger.info(
                                '[SONARR] file has been marked as completed within Sonarr. It\'s a Go!'
                            )
                            checkit = True
                            break
            except:
                continue

        return checkit
Ejemplo n.º 20
0
 def remove(self, hash, removefile=False):
     qsize = self.qsize()
     logger.debug('[QUEUE] Removal started (Queue Size: %s)' % qsize)
     msg = ''
     if qsize:
         for x in range(0, qsize):
             item = self.SNQUEUE.get(block=True)
             if not item['item'] == hash:
                 logger.debug('[QUEUE] Nope')
                 self.SNQUEUE.put(item)
             else:
                 logger.debug('[QUEUE] Found it')
                 if hash in self.CKQUEUE.keys():
                     msg += "Item '%s' removed from queue.\n" % self.CKQUEUE[
                         hash]['name']
                     logger.debug('[QUEUE] %s' % msg)
                     self.ckupdate(hash, {
                         'stage': 'failed',
                         'status': 'Removed from Queue'
                     })
                     if removefile:
                         try:
                             filename = os.path.join(
                                 str(config.GENERAL['torrentfile_dir']),
                                 str(item['label']),
                                 str(item['item']) + '.' +
                                 str(item['mode']))
                             os.remove(filename)
                             msg += "File '%s' removed." % filename
                             logger.info('[USER] File %s removed' %
                                         filename)
                         except Exception as e:
                             logger.info(
                                 '[USER] File could not be removed: %s' % e)
                             msg += "File '%s' could not be removed.  Reason: %s" % (
                                 filename, e)
         return msg
Ejemplo n.º 21
0
    def add_queue(self, data):
        try:
            item = data['file']
            mode = 'file'
        except:
            item = data['hash']
            mode = 'hash'
        try:
            if mode == 'file':
                logger.info(
                    '[API-AWARE] Adding file to queue via FILE %s [label:%s]' %
                    (data['file'], data['label']))
                HQUEUE.put({
                    'mode': 'file-add',
                    'item': data['file'],
                    'label': data['label']
                })

            elif mode == 'hash':
                logger.info(
                    '[API-AWARE] Adding file to queue via HASH %s [label:%s]' %
                    (data['hash'], data['label']))
                HQUEUE.put({
                    'mode': 'hash-add',
                    'item': data['hash'],
                    'label': data['label']
                })
            else:
                logger.info(
                    '[API-AWARE] Unsupported mode or error in parsing. Ignoring request [%s]'
                    % data)
                return False
        except:
            logger.info(
                '[API-AWARE] Unsupported mode or error in parsing. Ignoring request [%s]'
                % data)
            return False
        else:
            logger.warn(
                '[API-AWARE] Successfully added to queue - Prepare for GLORIOUS retrieval'
            )
            return True
Ejemplo n.º 22
0
 def handle(self):
     d = self.request.recv(1024)
     dt = d.split("\n")[1]
     data = json.loads(dt)
     #logger.info(type(data))
     if data['apikey'] == SOCKET_API:
         if data['mode'] == 'add':
             logger.info(
                 '[API-AWARE] Request received via API for item [%s] to be remotely added to queue:'
                 % data['hash'])
             addq = self.add_queue(data)
             queue_position = self.IndexableQueue(data['hash'])
             if addq is True:
                 self.send({
                     'Status': True,
                     'Message': 'Successful authentication',
                     'Added': True,
                     'QueuePosition': queue_position
                 })
             else:
                 self.send({
                     'Status': True,
                     'Message': 'Successful authentication',
                     'Added': False
                 })
         elif data['mode'] == 'queue':
             logger.info(
                 '[API-AWARE] Request received via API for listing of current queue'
             )
             currentqueue = None
             if HQUEUE.qsize() != 0:
                 for x in HQUEUE.ckqueue().keys():
                     if HQUEUE.ckqueue()[x]['stage'] == 'current':
                         currentqueue = x
                         logger.info('currentqueue: %s' % currentqueue)
                         break
             self.send({
                 'Status': True,
                 'QueueSize': HQUEUE.qsize(),
                 'CurrentlyInProgress': currentqueue,
                 'QueueContent': HQUEUE.queuelist()
             })
     else:
         self.send({
             'Status': False,
             'Message': 'Invalid APIKEY',
             'Added': False
         })
         return
Ejemplo n.º 23
0
    def main(self, check=False):

        if self.torrent_hash:
            torrent = self.client.find_torrent(self.torrent_hash)
            if torrent:
                if check:
                    logger.info(
                        'Successfully located torrent %s by hash on client. Detailed statistics to follow'
                        % self.torrent_hash)
                else:
                    if self.add is False:
                        logger.info(
                            '[SELF-ADD FALSE] Successfully located torrent %s by hash on client. Detailed statistics to follow'
                            % self.torrent_hash)
                    else:
                        logger.info(
                            "[SELF-ADD TRUE] %s Torrent already exists. Not adding to client.",
                            self.torrent_hash)
                        return False
            else:
                if self.add is True:
                    logger.info(
                        'Torrent with hash value of %s does not exist. Adding to client...'
                        % self.torrent_hash)
                else:
                    logger.info(
                        'Unable to locate torrent with a hash value of %s' %
                        self.torrent_hash)
                    return None

        #if self.filepath exists it will be the filename that exists on the torrent client. self.add cannot be true EVER in this case.
        elif all([self.filepath, self.add is False]):
            torrent = self.client.find_torrent(filepath=self.filepath)
            if torrent is None:
                logger.info("Couldn't find torrent with filename: %s " %
                            self.filepath)
                return None  #sys.exit(-1)
            else:
                logger.info("Located file at: %s" % self.filepath)

        #if add is true, self.filepath will contain the local path the .torrent file to load.
        if self.add is True:
            logger.info("Attempting to load torrent. Filepath is : %s" %
                        self.filepath)
            logger.info("label is : %s" % self.label)
            loadit = self.client.load_torrent(self.filepath, self.label,
                                              self.start, self.applylabel,
                                              self.basedir)
            if loadit:
                logger.info('Successfully loaded torrent.')
                torrent_hash = self.get_the_hash()
            else:
                logger.info('NOT Successfully loaded.')
                return None
            logger.info('Attempting to find by hash: ' + torrent_hash)
            torrent = self.client.find_torrent(torrent_hash)

        logger.info(torrent)
        torrent_info = self.client.get_torrent(torrent)

        if any([torrent_info is False, torrent_info is None]):
            return None

        if check:
            return torrent_info

        logger.info(torrent_info)

        if torrent_info['completed'] or self.partial is True:
            logger.info('# of files: %s' % str(len(torrent_info['files'])))
            logger.info('self.basedir: %s' % self.basedir)
            logger.info('torrent_info_folder: %s' % torrent_info['folder'])
            logger.info(
                'folder+label: %s' %
                os.path.join(torrent_info['folder'], torrent_info['label']))
            logger.info(
                'base: %s' %
                os.path.dirname(os.path.normpath(torrent_info['folder'])))
            logger.info('label: %s' % torrent_info['label'])
            if all([
                    len(torrent_info['files']) >= 1, self.basedir is not None,
                    self.basedir != torrent_info['folder'],
                    torrent_info['folder'] not in self.basedir
            ]) and all([
                    os.path.join(self.basedir, torrent_info['label']) !=
                    torrent_info['folder'],
                    os.path.dirname(os.path.normpath(
                        torrent_info['folder'])) != torrent_info['label']
            ]):
                logger.info("MIRROR SHOULD BE USED: %s" %
                            str(len(torrent_info['files'])))
                torrent_info['mirror'] = True
            else:
                logger.info("FILE SHOULD BE USED: %s" % torrent_info['files'])
                torrent_info['mirror'] = False
            logger.info("Directory: %s" % torrent_info['folder'])
            logger.info("Name: %s" % torrent_info['name'])
            #logger.info("FileSize: %s", helpers.human_size(torrent_info['total_filesize']))
            logger.info("Completed: %s" % torrent_info['completed'])
            #logger.info("Downloaded: %s", helpers.human_size(torrent_info['download_total']))
            #logger.info("Uploaded: %s", helpers.human_size(torrent_info['upload_total']))
            logger.info("Ratio: %s" % torrent_info['ratio'])
            #logger.info("Time Started: %s", torrent_info['time_started'])
            #logger.info("Seeding Time: %s", helpers.humanize_time(int(time.time()) - torrent_info['time_started']))

            if torrent_info['label']:
                logger.info("Torrent Label: %s" % torrent_info['label'])

            torrent_info['multiple'] = self.multiple

        logger.info(torrent_info)
        return torrent_info
Ejemplo n.º 24
0
    def post_process(self):
        url = self.lidarr_url + '/api/v1/command'
        if self.applylabel is True:
            if self.snstat['label'] == 'None':
                newpath = os.path.join(self.defaultdir, self.snstat['name'])
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['label'], self.snstat['name'])
        else:
            newpath = os.path.join(self.defaultdir, self.snstat['name'])

        payload = {'name': 'DownloadedAlbumsScan',
                   'path': newpath,
                   'downloadClientID': self.snstat['hash'],
                   'importMode': 'Move'}

        logger.info('[LIDARR] Posting url: %s' % url)
        logger.info('[LIDARR] Posting to completed download handling now: %s' % payload)

        r = requests.post(url, json=payload, headers=self.lidarr_headers)
        data = r.json()
        logger.info('content: %s' % data)

        check = True
        while check:
            url = self.lidarr_url + '/api/v1/command/' + str(data['id'])
            logger.info('[LIDARR] command check url : %s' % url)
            try:
                r = requests.get(url, params=None, headers=self.lidarr_headers)
                dt = r.json()
                logger.info('[LIDARR] Reponse: %s' % dt)
            except:
                logger.warn('error returned from lidarr call. Aborting.')
                return False
            else:
                if dt['status'] == 'completed':
                    logger.info('[LIDARR] Successfully post-processed : ' + self.snstat['name'])
                    check = False
                elif any([dt['status'] == 'failed', dt['status'] == 'aborted', dt['status'] == 'cancelled']):
                    logger.info('[LIDARR] FAiled to post-process : ' + self.snstat['name'])
                    check = False
                else:
                    time.sleep(10)

        if check is False:
            # we need to get the root path here in order to make sure we call the correct plex update ...
            # hash is know @ self.snstat['hash'], file will exist in snatch queue dir as hashvalue.hash
            # file contains complete snatch record - retrieve the 'path' value to get the series directory.
            return True
        else:
            return False
Ejemplo n.º 25
0
    def update(self, libraries):
        #step 3 - update the specific section for the harpoon'd item.
        secfound = False
        sect = []
        logger.info('Libraries discovered in PMS: %s' % str(len(libraries['sections'])))
        for x in libraries['sections']:
            if self.root_path is not None:
                for xl in x['locations']:
                    if self.root_path.lower().startswith(xl['path'].lower()):
                        sect.append({'key':    x['key'],
                                     'title':  x['title']})
                        break
            else:
                if self.plex_label.lower() in x['title'].lower():
                    if secfound is False:
                        sect.append({'key':    x['key'],
                                     'title':  x['title']})
                        secfound = True
                    else:
                        logger.info('multiple sections discovered with the same category - will update all')
                        sect.append({'key':    x['key'],
                                     'title':  x['title']})
        logger.info('[HARPOON-PLEX] section match to %s ' % sect)

        status = False
        for x in sect:
            url = self.plex_host + '/library/sections/' + str(x['key']) + '/refresh'
            headers = {'X-Plex-Token': self.plex_token}
            payload = {'force': 0}

            logger.info('[HARPOON-PLEX] Submitting refresh request to specific library %s [%s]' % (x['title'],x['key']))

            r = requests.get(url, data=payload, headers=headers)
            logger.info('[HARPOON-PLEX] Status Code: %s' % r.status_code)
            if r.status_code == 200:
                logger.info('[HARPOON-PLEX] Succesfully submitted for background refresh of library %s' % x['title'])
                status = True
            else:
                logger.info('[HARPOON-PLEX] Unable to submit for background refresh of library %s' % x['title'])
                status = False

        return {'status': status}
Ejemplo n.º 26
0
    def post_process(self):
        logger.info('snstat: %s' % self.snstat)
        issueid = None
        comicid = None
        nzb_name = None
        nzb = False
        try:
            logger.debug('Attempting to open: %s' %
                         os.path.join(self.torrentfile_dir, self.mylar_label,
                                      self.snstat['hash'] + '.mylar.hash'))
            with open(
                    os.path.join(self.torrentfile_dir, self.mylar_label,
                                 self.snstat['hash'] +
                                 '.mylar.hash')) as dfile:
                data = json.load(dfile)
        except Exception as e:
            logger.error('[%s] not able to load .mylar.hash file.' % e)
            #for those that were done outside of Mylar or using the -s switch on the cli directly by hash
            nzb_name = 'Manual Run'
        else:
            logger.debug('loaded .mylar.hash successfully - extracting info.')
            try:
                nzb_name = data['mylar_release_name']
            except:
                try:
                    if 'mylar_release_nzbname' in data.keys():
                        # nzb_name HAS TO BE the filename itself so it can pp directly
                        nzb_name = os.path.basename(self.snstat['folder'])
                        nzb = True
                except:
                    #if mylar_release_name doesn't exist, fall back to the torrent_filename.
                    #mylar retry issue will not have a release_name
                    nzb_name = data['mylar_torrent_filename']

            if self.issueid is None:
                if data['mylar_issuearcid'] != 'None':
                    issueid = data['mylar_issuearcid']
                else:
                    if data['mylar_release_pack'] == 'False':
                        issueid = data['mylar_issueid']
                    else:
                        issueid = None
                comicid = data['mylar_comicid']
                if comicid == 'None':
                    comicid = None
            else:
                issueid = self.issueid
                comicid = None

        if self.issueid is not None and nzb_name == 'Manual Run':
            issueid = self.issueid
            comicid = None
            nzb_name = self.snstat['name']

        url = self.mylar_url + '/api'
        if all([self.applylabel is True, self.snstat['label'] != 'None']):
            if nzb is True:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       self.snstat['extendedname'])
            else:
                if os.path.isdir(
                        os.path.join(self.defaultdir, self.snstat['label'],
                                     self.snstat['name'])):
                    newpath = os.path.join(self.defaultdir,
                                           self.snstat['label'],
                                           self.snstat['name'])
                else:
                    if os.path.isdir(
                            os.path.join(self.defaultdir,
                                         self.snstat['label'])):
                        newpath = os.path.join(self.defaultdir,
                                               self.snstat['label'])
        else:
            if nzb is True:
                newpath = os.path.join(self.defaultdir,
                                       self.snstat['extendedname'])
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['name'])

        payload = {
            'cmd': 'forceProcess',
            'apikey': self.mylar_apikey,
            'nzb_name': nzb_name,
            'issueid': issueid,
            'comicid': comicid,
            'nzb_folder': newpath
        }

        logger.info('[MYLAR] Posting url: %s' % url)
        logger.info('[MYLAR] Posting to completed download handling now: %s' %
                    payload)

        try:
            r = requests.post(url,
                              params=payload,
                              headers=self.mylar_headers,
                              timeout=0.001)
        except Exception as e:
            if any(['Connection refused' in e, 'Timeout' in e]):
                logger.warn(
                    'Unable to connect to Mylar server. Please check that it is online [%s].'
                    % e)
            else:
                logger.warn('%s' % e)
            return False

        #response = r.json()
        logger.debug('content: %s' % r.content)

        logger.debug('[MYLAR] status_code: %s' % r.status_code)
        logger.info('[MYLAR] Successfully post-processed : ' +
                    self.snstat['name'])

        return True
Ejemplo n.º 27
0
    def __init__(self,
                 hash=None,
                 file=None,
                 add=False,
                 label=None,
                 partial=False,
                 conf=None):

        if hash is None:
            self.torrent_hash = None
        else:
            self.torrent_hash = hash

        if file is None:
            self.filepath = None
        else:
            self.filepath = file

        self.basedir = None

        if label is None:
            self.label = None
        else:
            self.label = label

        if add is True:
            self.add = True
        else:
            self.add = False

        if partial is True:
            self.partial = True
        else:
            self.partial = False

        if conf is None:
            logger.warn(
                'Unable to load config file properly for rtorrent usage. Make sure harpoon.conf is located in the /conf directory'
            )
            return None
        else:
            self.conf_location = conf

        config = ConfigParser.RawConfigParser()
        config.read(self.conf_location)

        self.applylabel = config.getboolean('general', 'applylabel')
        self.multiple_seedboxes = config.getboolean('general',
                                                    'multiple_seedboxes')
        logger.info('multiple_seedboxes: %s' % self.multiple_seedboxes)
        if self.multiple_seedboxes is True:
            sectionsconfig1 = config.get('general', 'multiple1')
            sectionsconfig2 = config.get('general', 'multiple2')
            sectionlist1 = sectionsconfig1.split(',')
            sections1 = [x for x in sectionlist1 if x.lower() == label.lower()]
            sectionlist2 = sectionsconfig2.split(',')
            sections2 = [x for x in sectionlist2 if x.lower() == label.lower()]
            logger.info('sections1: %s' % sections1)
            logger.info('sections2: %s' % sections2)
            if sections1:
                logger.info('SEEDBOX-1 ENABLED!')
                self.start = config.getboolean('rtorrent', 'startonload')
                self.rtorrent_host = config.get(
                    'rtorrent', 'rtorr_host') + ':' + config.get(
                        'rtorrent', 'rtorr_port')
                self.rtorrent_user = config.get('rtorrent', 'rtorr_user')
                self.rtorrent_pass = config.get('rtorrent', 'rtorr_passwd')
                self.rtorrent_auth = config.get('rtorrent', 'authentication')
                self.rtorrent_rpc = config.get('rtorrent', 'rpc_url')
                self.rtorrent_ssl = config.getboolean('rtorrent', 'ssl')
                self.rtorrent_verify = config.getboolean(
                    'rtorrent', 'verify_ssl')
                self.basedir = config.get('post-processing', 'pp_basedir')
                self.multiple = '1'

            elif sections2:
                logger.info('SEEDBOX-2 ENABLED!')
                self.start = config.getboolean('rtorrent2', 'startonload')
                self.rtorrent_host = config.get(
                    'rtorrent2', 'rtorr_host') + ':' + config.get(
                        'rtorrent2', 'rtorr_port')
                self.rtorrent_user = config.get('rtorrent2', 'rtorr_user')
                self.rtorrent_pass = config.get('rtorrent2', 'rtorr_passwd')
                self.rtorrent_auth = config.get('rtorrent2', 'authentication')
                self.rtorrent_rpc = config.get('rtorrent2', 'rpc_url')
                self.rtorrent_ssl = config.getboolean('rtorrent2', 'ssl')
                self.rtorrent_verify = config.getboolean(
                    'rtorrent2', 'verify_ssl')
                self.basedir = config.get('post-processing2', 'pp_basedir2')
                self.multiple = '2'
            else:
                logger.info(
                    'No label directory assignment provided (ie. the torrent file is not located in a directory named after the label.'
                )
                return None
        else:
            logger.info('SEEDBOX-1 IS LONE OPTION - ENABLED!')
            self.start = config.getboolean('rtorrent', 'startonload')
            self.rtorrent_host = config.get('rtorrent',
                                            'rtorr_host') + ':' + config.get(
                                                'rtorrent', 'rtorr_port')
            self.rtorrent_user = config.get('rtorrent', 'rtorr_user')
            self.rtorrent_pass = config.get('rtorrent', 'rtorr_passwd')
            self.rtorrent_auth = config.get('rtorrent', 'authentication')
            self.rtorrent_rpc = config.get('rtorrent', 'rpc_url')
            self.rtorrent_ssl = config.getboolean('rtorrent', 'ssl')
            self.rtorrent_verify = config.getboolean('rtorrent', 'verify_ssl')
            self.basedir = config.get('post-processing', 'pp_basedir')
            self.multiple = None

        self.client = TorClient.TorrentClient()
        if not self.client.connect(self.rtorrent_host, self.rtorrent_user,
                                   self.rtorrent_pass, self.rtorrent_auth,
                                   self.rtorrent_rpc, self.rtorrent_ssl,
                                   self.rtorrent_verify):
            logger.info('could not connect to host, exiting')
            return None
Ejemplo n.º 28
0
    def post_process(self):
        url = self.sonarr_url + '/api/command'
        name = self.snstat['name']
        if 'extendedname' in self.snstat.keys():
            name = self.snstat['extendedname']
        if self.applylabel is True:
            if self.snstat['label'] == 'None':
                newpath = os.path.join(self.defaultdir, name)
            else:
                newpath = os.path.join(self.defaultdir, self.snstat['label'],
                                       name)
        else:
            newpath = os.path.join(self.defaultdir, name)

        if os.path.isfile(newpath):
            logger.warn(
                '[SONARR] This is an individual movie, but Sonarr will only import from a directory. Creating a temporary directory and moving this so it can proceed.'
            )
            newdir = os.path.join(
                os.path.abspath(os.path.join(newpath, os.pardir)),
                os.path.splitext(self.snstat['name'])[0])
            logger.info('[SONARR] Creating directory: %s' % newdir)
            os.makedirs(newdir)
            logger.info('[SONARR] Moving %s -TO- %s' % (newpath, newdir))
            shutil.move(newpath, newdir)
            newpath = newdir
            logger.info('[SONARR] New path location now set to: %s' % newpath)

        #make sure it's in a Completed status otherwise it won't import (why? I haven't a f*ckin' clue but it's cause of v2.0.0.5301)
        cntit = 0
        while True:
            check_that_shit = self.checkyourself()
            if check_that_shit is True:
                break
            if cntit == 10:
                logger.error(
                    '[SONARR-ERROR] Unable to verify completion status of item - maybe this was already post-processed using a different method?'
                )
                return False
            cntit += 1
            time.sleep(15)

        payload = {
            "name": "DownloadedEpisodesScan",
            "path": newpath,
            "downloadClientID": self.snstat['hash'],
            "importMode": "Move"
        }

        logger.info(
            '[SONARR] Waiting 10s prior to sending to download handler to make sure item is completed within Sonarr'
        )
        logger.info(
            '[SONARR] Posting to completed download handling after a short 10s delay: %s'
            % payload)
        time.sleep(10)

        r = requests.post(url, json=payload, headers=self.sonarr_headers)
        data = r.json()

        check = True
        while check:
            url = self.sonarr_url + '/api/command/' + str(data['id'])
            logger.info('[SONARR] command check url : %s' % url)
            try:
                r = requests.get(url, params=None, headers=self.sonarr_headers)
                dt = r.json()
                logger.info('[SONARR] Reponse: %s' % dt)
            except Exception as e:
                logger.warn('[%s] error returned from sonarr call. Aborting.' %
                            e)
                return False
            else:
                if dt['state'] == 'completed':
                    #duration = time.strptime(dt['duration'][:-1], '%H:%M:%S.%f').tm_sec
                    #if tm_sec < 20:
                    #    #if less than 20s duration, the pp didn't succeed.
                    #else:
                    logger.info('[SONARR] Successfully post-processed : ' +
                                self.snstat['name'])
                    check = False
                else:
                    time.sleep(10)

        if check is False:
            #we need to get the root path here in order to make sure we call the correct plex update ...
            #hash is know @ self.snstat['hash'], file will exist in snatch queue dir as hashvalue.hash
            #file contains complete snatch record - retrieve the 'path' value to get the series directory.
            return True
        else:
            return False
Ejemplo n.º 29
0
    def main(self):
        status = 'None'
        dirlist = self.traverse_directories(self.path)
        rar_found = []

        for fname in dirlist:
            filename = fname['filename']

            rar_ex = r'\.(?:rar|r\d\d|\d\d\d)$'
            rar_chk = re.findall(rar_ex, filename, flags=re.IGNORECASE)
            if rar_chk:
                #append the rars found to the rar_found tuple
                rar_found.append({
                    "directory": self.path,
                    "filename": filename
                })

        #if it needs to get unrar'd - we should do it here.
        if len(rar_found) > 0:
            rar_info = self.rar_check(rar_found)
            if rar_info is None:
                logger.warn(
                    '[RAR-DETECTION-FAILURE] Incomplete rar set detected - ignoring.'
                )
            else:
                logger.info('[RAR-DETECTION] Detected rar\'s within ' +
                            rar_info[0]['directory'] +
                            '. Initiating rar extraction.')
                if len(rar_info) > 0:
                    for rk in rar_info:
                        if rk['start_rar'] is None:
                            continue
                        logger.info('[RAR MANAGER] [ ' + str(len(rk['info'])) +
                                    ' ] ')  # : ' + str(rar_info))
                        logger.info(
                            '[RAR MANAGER] First Rar detection initated for : '
                            + str(rk['start_rar']))
                        # extract the rar's biatch.
                        try:
                            rar_status = self.unrar_it(rk)
                        except Exception as e:
                            logger.warn(
                                '[RAR MANAGER] Error extracting rar: %s' % e)
                            continue
                        else:
                            if rar_status == "success":
                                logger.info(
                                    '[RAR MANAGER] Successfully extracted rar\'s.'
                                )
                                for rs in rk['info']:
                                    os.remove(
                                        os.path.join(self.path,
                                                     rs['filename']))
                                    logger.info('[RAR MANAGER] Removal of : ' +
                                                os.path.join(
                                                    self.path, rs['filename']))
                                #remove the crap in the directory that got logged earlier ( think its done later though )
                                logger.info(
                                    '[RAR MANAGER] Removal of start rar: ' +
                                    rk['start_rar'])
                                os.remove(rk['start_rar'])
                                status = 'success'

        if status == 'success':
            logger.info('Success!')
            dirlist = self.traverse_directories(self.path)
        else:
            if len(rar_found) > 0:
                logger.warn('Unable to unrar items')
            else:
                logger.debug('No items to unrar.')
        return dirlist
Ejemplo n.º 30
0
    def query(self):
        sendresponse = self.params['nzo_id']
        queue = {
            'mode': 'queue',
            'search': self.params['nzo_id'],
            'output': 'json',
            'apikey': self.sab_apikey
        }
        try:
            logger.info('sending now to %s' % self.sab_url)
            logger.debug('parameters set to %s' % queue)
            h = requests.get(self.sab_url, params=queue, verify=False)
        except Exception as e:
            logger.error('uh-oh: %s' % e)
            return {'completed': False}
        else:
            queueresponse = h.json()
            logger.info('successfully queried the queue for status')
            try:
                queueinfo = queueresponse['queue']
                logger.info('queue: %s' % queueresponse)
                logger.info('Queue status : %s' % queueinfo['status'])
                logger.info('Queue mbleft : %s' % queueinfo['mbleft'])
                if str(queueinfo['status']) == 'Downloading':
                    logger.info('[SABNZBD] Dowwnload is not yet finished')
                    return {'completed': False}
            except Exception as e:
                logger.error('error: %s' % e)
                return {'completed': False}

            logger.info('[SABNZBD] Download completed.  Querying history.')
            hist_params = {
                'mode': 'history',
                'failed': 0,
                'output': 'json',
                'limit': 500,
                'apikey': self.sab_apikey
            }
            hist = requests.get(self.sab_url, params=hist_params, verify=False)
            historyresponse = hist.json()
            histqueue = historyresponse['history']
            found = {'completed': True, 'failed': True}
            try:
                for hq in histqueue['slots']:
                    # logger.info('nzo_id: %s --- %s [%s]' % (hq['nzo_id'], sendresponse, hq['status']))
                    if hq['nzo_id'] == sendresponse and hq[
                            'status'] == 'Completed':
                        logger.info(
                            '[SABNZBD] Found matching completed item in history. Job has a status of %s'
                            % hq['status'])
                        logger.info('[SABNZBD] Location found @ %s' %
                                    hq['storage'])
                        path_folder = hq['storage']
                        nzbname = os.path.basename(hq['storage'])
                        found = {
                            'completed': True,
                            'name': re.sub('.nzb', '', hq['nzb_name']).strip(),
                            'extendedname': nzbname,
                            'folder': path_folder,
                            'mirror': True,  # Change this
                            'multiple': None,
                            'label': hq['category'],
                            'hash': hq['nzo_id'],
                            'failed': False,
                            'files': []
                        }
                        break
                    elif hq['nzo_id'] == sendresponse and hq[
                            'status'] == 'Failed':
                        # get the stage / error message and see what we can do
                        stage = hq['stage_log']
                        for x in stage:
                            if 'Failed' in x['actions'] and any(
                                [x['name'] == 'Unpack', x['name'] == 'Repair'
                                 ]):
                                if 'moving' in x['actions']:
                                    logger.warn(
                                        '[SABNZBD] There was a failure in SABnzbd during the unpack/repair phase that caused a failure: %s'
                                        % x['actions'])
                                else:
                                    logger.warn(
                                        '[SABNZBD] Failure occured during the Unpack/Repair phase of SABnzbd. This is probably a bad file: %s'
                                        % x['actions'])
                                    found = {'completed': True, 'failed': True}
                            if any(
                                [x['name'] == 'Download',
                                 x['name'] == 'Fail']):
                                logger.warn(
                                    '[SABNZBD] SABnzbd failed to to download.  Articles were probably missing.'
                                )
                                found = {'completed': True, 'failed': True}
                    elif hq['nzo_id'] == sendresponse:
                        logger.warn('[SABNZBD] Unexpected response: %s' % hq)
                        found = {'completed': False}
            except Exception as e:
                logger.warn('error %s' % e)

            return found