def cleanup(self): sendresponse = self.params['nzo_id'] queue = { 'mode': 'history', 'name': 'delete', 'del_files': 1, 'value': self.params['nzo_id'], 'output': 'json', 'apikey': self.sab_apikey, 'limit': 500 } try: logger.info('sending now to %s' % self.sab_url) logger.debug('parameters set to %s' % queue) h = requests.get(self.sab_url, params=queue, verify=False) except Exception as e: logger.error('uh-oh: %s' % e) return {'status': False} else: queueresponse = h.json() if queueresponse['status']: logger.info( '[SABNZBD] Successfully deleted the item from SABnzbd.') else: logger.warn('[SABNZBD] Unable to delete item from SABnzbd.') return queueresponse
def post_process(self): logger.info('snstat: %s' % self.snstat) issueid = None comicid = None nzb_name = None nzb = False try: logger.debug('Attempting to open: %s' % os.path.join(self.torrentfile_dir, self.mylar_label, self.snstat['hash'] + '.mylar.hash')) with open( os.path.join(self.torrentfile_dir, self.mylar_label, self.snstat['hash'] + '.mylar.hash')) as dfile: data = json.load(dfile) except Exception as e: logger.error('[%s] not able to load .mylar.hash file.' % e) #for those that were done outside of Mylar or using the -s switch on the cli directly by hash nzb_name = 'Manual Run' else: logger.debug('loaded .mylar.hash successfully - extracting info.') try: nzb_name = data['mylar_release_name'] except: try: if 'mylar_release_nzbname' in data.keys(): # nzb_name HAS TO BE the filename itself so it can pp directly nzb_name = os.path.basename(self.snstat['folder']) nzb = True except: #if mylar_release_name doesn't exist, fall back to the torrent_filename. #mylar retry issue will not have a release_name nzb_name = data['mylar_torrent_filename'] if self.issueid is None: if data['mylar_issuearcid'] != 'None': issueid = data['mylar_issuearcid'] else: if data['mylar_release_pack'] == 'False': issueid = data['mylar_issueid'] else: issueid = None comicid = data['mylar_comicid'] if comicid == 'None': comicid = None else: issueid = self.issueid comicid = None if self.issueid is not None and nzb_name == 'Manual Run': issueid = self.issueid comicid = None nzb_name = self.snstat['name'] url = self.mylar_url + '/api' if all([self.applylabel is True, self.snstat['label'] != 'None']): if nzb is True: newpath = os.path.join(self.defaultdir, self.snstat['label'], self.snstat['extendedname']) else: if os.path.isdir( os.path.join(self.defaultdir, self.snstat['label'], self.snstat['name'])): newpath = os.path.join(self.defaultdir, self.snstat['label'], self.snstat['name']) else: if os.path.isdir( os.path.join(self.defaultdir, self.snstat['label'])): newpath = os.path.join(self.defaultdir, self.snstat['label']) else: if nzb is True: newpath = os.path.join(self.defaultdir, self.snstat['extendedname']) else: newpath = os.path.join(self.defaultdir, self.snstat['name']) payload = { 'cmd': 'forceProcess', 'apikey': self.mylar_apikey, 'nzb_name': nzb_name, 'issueid': issueid, 'comicid': comicid, 'nzb_folder': newpath } logger.info('[MYLAR] Posting url: %s' % url) logger.info('[MYLAR] Posting to completed download handling now: %s' % payload) try: r = requests.post(url, params=payload, headers=self.mylar_headers, timeout=0.001) except Exception as e: if any(['Connection refused' in e, 'Timeout' in e]): logger.warn( 'Unable to connect to Mylar server. Please check that it is online [%s].' % e) else: logger.warn('%s' % e) return False #response = r.json() logger.debug('content: %s' % r.content) logger.debug('[MYLAR] status_code: %s' % r.status_code) logger.info('[MYLAR] Successfully post-processed : ' + self.snstat['name']) return True
def post_process(self): url = self.sonarr_url + '/api/command' name = self.snstat['name'] if 'extendedname' in self.snstat.keys(): name = self.snstat['extendedname'] if self.applylabel is True: if self.snstat['label'] == 'None': newpath = os.path.join(self.defaultdir, name) else: newpath = os.path.join(self.defaultdir, self.snstat['label'], name) else: newpath = os.path.join(self.defaultdir, name) if os.path.isfile(newpath): logger.warn( '[SONARR] This is an individual movie, but Sonarr will only import from a directory. Creating a temporary directory and moving this so it can proceed.' ) newdir = os.path.join( os.path.abspath(os.path.join(newpath, os.pardir)), os.path.splitext(self.snstat['name'])[0]) logger.info('[SONARR] Creating directory: %s' % newdir) os.makedirs(newdir) logger.info('[SONARR] Moving %s -TO- %s' % (newpath, newdir)) shutil.move(newpath, newdir) newpath = newdir logger.info('[SONARR] New path location now set to: %s' % newpath) #make sure it's in a Completed status otherwise it won't import (why? I haven't a f*ckin' clue but it's cause of v2.0.0.5301) cntit = 0 while True: check_that_shit = self.checkyourself() if check_that_shit is True: break if cntit == 10: logger.error( '[SONARR-ERROR] Unable to verify completion status of item - maybe this was already post-processed using a different method?' ) return False cntit += 1 time.sleep(15) payload = { "name": "DownloadedEpisodesScan", "path": newpath, "downloadClientID": self.snstat['hash'], "importMode": "Move" } logger.info( '[SONARR] Waiting 10s prior to sending to download handler to make sure item is completed within Sonarr' ) logger.info( '[SONARR] Posting to completed download handling after a short 10s delay: %s' % payload) time.sleep(10) r = requests.post(url, json=payload, headers=self.sonarr_headers) data = r.json() check = True while check: url = self.sonarr_url + '/api/command/' + str(data['id']) logger.info('[SONARR] command check url : %s' % url) try: r = requests.get(url, params=None, headers=self.sonarr_headers) dt = r.json() logger.info('[SONARR] Reponse: %s' % dt) except Exception as e: logger.warn('[%s] error returned from sonarr call. Aborting.' % e) return False else: if dt['state'] == 'completed': #duration = time.strptime(dt['duration'][:-1], '%H:%M:%S.%f').tm_sec #if tm_sec < 20: # #if less than 20s duration, the pp didn't succeed. #else: logger.info('[SONARR] Successfully post-processed : ' + self.snstat['name']) check = False else: time.sleep(10) if check is False: #we need to get the root path here in order to make sure we call the correct plex update ... #hash is know @ self.snstat['hash'], file will exist in snatch queue dir as hashvalue.hash #file contains complete snatch record - retrieve the 'path' value to get the series directory. return True else: return False
def query(self): sendresponse = self.params['nzo_id'] queue = { 'mode': 'queue', 'search': self.params['nzo_id'], 'output': 'json', 'apikey': self.sab_apikey } try: logger.info('sending now to %s' % self.sab_url) logger.debug('parameters set to %s' % queue) h = requests.get(self.sab_url, params=queue, verify=False) except Exception as e: logger.error('uh-oh: %s' % e) return {'completed': False} else: queueresponse = h.json() logger.info('successfully queried the queue for status') try: queueinfo = queueresponse['queue'] logger.info('queue: %s' % queueresponse) logger.info('Queue status : %s' % queueinfo['status']) logger.info('Queue mbleft : %s' % queueinfo['mbleft']) if str(queueinfo['status']) == 'Downloading': logger.info('[SABNZBD] Dowwnload is not yet finished') return {'completed': False} except Exception as e: logger.error('error: %s' % e) return {'completed': False} logger.info('[SABNZBD] Download completed. Querying history.') hist_params = { 'mode': 'history', 'failed': 0, 'output': 'json', 'limit': 500, 'apikey': self.sab_apikey } hist = requests.get(self.sab_url, params=hist_params, verify=False) historyresponse = hist.json() histqueue = historyresponse['history'] found = {'completed': True, 'failed': True} try: for hq in histqueue['slots']: # logger.info('nzo_id: %s --- %s [%s]' % (hq['nzo_id'], sendresponse, hq['status'])) if hq['nzo_id'] == sendresponse and hq[ 'status'] == 'Completed': logger.info( '[SABNZBD] Found matching completed item in history. Job has a status of %s' % hq['status']) logger.info('[SABNZBD] Location found @ %s' % hq['storage']) path_folder = hq['storage'] nzbname = os.path.basename(hq['storage']) found = { 'completed': True, 'name': re.sub('.nzb', '', hq['nzb_name']).strip(), 'extendedname': nzbname, 'folder': path_folder, 'mirror': True, # Change this 'multiple': None, 'label': hq['category'], 'hash': hq['nzo_id'], 'failed': False, 'files': [] } break elif hq['nzo_id'] == sendresponse and hq[ 'status'] == 'Failed': # get the stage / error message and see what we can do stage = hq['stage_log'] for x in stage: if 'Failed' in x['actions'] and any( [x['name'] == 'Unpack', x['name'] == 'Repair' ]): if 'moving' in x['actions']: logger.warn( '[SABNZBD] There was a failure in SABnzbd during the unpack/repair phase that caused a failure: %s' % x['actions']) else: logger.warn( '[SABNZBD] Failure occured during the Unpack/Repair phase of SABnzbd. This is probably a bad file: %s' % x['actions']) found = {'completed': True, 'failed': True} if any( [x['name'] == 'Download', x['name'] == 'Fail']): logger.warn( '[SABNZBD] SABnzbd failed to to download. Articles were probably missing.' ) found = {'completed': True, 'failed': True} elif hq['nzo_id'] == sendresponse: logger.warn('[SABNZBD] Unexpected response: %s' % hq) found = {'completed': False} except Exception as e: logger.warn('error %s' % e) return found