def cleanup(self): sendresponse = self.params['nzo_id'] queue = { 'mode': 'history', 'name': 'delete', 'del_files': 1, 'value': self.params['nzo_id'], 'output': 'json', 'apikey': self.sab_apikey, 'limit': 500 } try: logger.info('sending now to %s' % self.sab_url) logger.debug('parameters set to %s' % queue) h = requests.get(self.sab_url, params=queue, verify=False) except Exception as e: logger.error('uh-oh: %s' % e) return {'status': False} else: queueresponse = h.json() if queueresponse['status']: logger.info( '[SABNZBD] Successfully deleted the item from SABnzbd.') else: logger.warn('[SABNZBD] Unable to delete item from SABnzbd.') return queueresponse
def hashfile(self, hash=None): if hash: queuehash = harpoon.HQUEUE.ckqueue()[hash] logger.debug(queuehash) if 'label' in queuehash.keys(): hashinfo = hashfile.info(hash=hash, label=queuehash['label']) else: hashinfo = {} else: hashinfo = {} return serve_template(templatename="hashfile.html", title="Hashfile Viewer", hashinfo=hashinfo)
def __init__(self, ll_info): logger.info(ll_info) self.lazylibrarian_url = config.LAZYLIBRARIAN['lazylibrarian_url'] self.lazylibrarian_label = config.LAZYLIBRARIAN['lazylibrarian_label'] self.lazylibrarian_headers = config.LAZYLIBRARIAN[ 'lazylibrarian_headers'] self.lazylibrarian_apikey = config.LAZYLIBRARIAN[ 'lazylibrarian_apikey'] self.lazylibrarian_filedata = ll_info['filedata'] self.applylabel = config.GENERAL['applylabel'] self.defaultdir = config.GENERAL['defaultdir'] self.snstat = ll_info['snstat'] logger.debug("---")
def info(hash=None, label=None, mode=None, filename=None): if all([filename, label]): hashfile = os.path.join(config.GENERAL['torrentfile_dir'], label, filename) elif all([hash, label, mode]): hashfile = os.path.join(config.GENERAL['torrentfile_dir'], label, hash + '.' + mode) elif all([hash, label]): searchfolder = os.path.join(config.GENERAL['torrentfile_dir'], label) logger.debug('sd: %s' % searchfolder) hashfile = None for fn in os.listdir(searchfolder): if fnmatch.fnmatch(fn, hash + '.*'): hashfile = os.path.join(searchfolder, fn) logger.debug('hf: %s' % hashfile) else: hashfile = None if hashfile and os.path.exists(hashfile): hashtype = 'hash' logger.debug("HashFile: %s" % hashfile) try: hashinfo = json.load(open(hashfile)) except: hashtype = 'unknown' if hashtype == 'unknown': try: hashinfo = torrent_parser.parse_torrent_file(hashfile) hashinfo['name'] = hashinfo['info']['name'] except: hashtype = 'nzb' hashinfo = {'name': 'Manually Added NZB File'} if 'name' not in hashinfo.keys(): if 'sourceTitle' in hashinfo.keys(): hashinfo['name'] = hashinfo['sourceTitle'] elif 'BookName' in hashinfo.keys(): hashinfo['name'] = hashinfo['BookName'] elif 'mylar_release_name' in hashinfo.keys(): hashinfo['name'] = hashinfo['mylar_release_name'] elif 'mylar_release_nzbname' in hashinfo.keys(): hashinfo['name'] = hashinfo['mylar_release_nzbname'] elif 'Title' in hashinfo.keys() and 'AuxInfo' in hashinfo.keys(): hashinfo['name'] = '%s %s' % (hashinfo['Title'], hashinfo['AuxInfo']) elif 'lidarr_release_title' in hashinfo.keys(): hashinfo['name'] = hashinfo['lidarr_release_title'] elif 'radarr_release_title' in hashinfo.keys(): hashinfo['name'] = hashinfo['radarr_release_title'] elif 'sonarr_release_title' in hashinfo.keys(): hashinfo['name'] = hashinfo['sonarr_release_title'] else: hashinfo['name'] = 'Unknown' logger.debug("HashInfo: %s" % hashinfo) return hashinfo else: return {'name': 'Hash File Not Found: %s' % hashfile}
def remove(hash=None, label=None, mode=None, filename=None): if all([filename, label]): hashfile = os.path.join(config.GENERAL['torrentfile_dir'], label, filename) elif all([hash, label, mode]): hashfile = os.path.join(config.GENERAL['torrentfile_dir'], label, hash + '.' + mode) elif all([hash, label]): searchfolder = os.path.join(config.GENERAL['torrentfile_dir'], label) logger.debug('sd: %s' % searchfolder) hashfile = None for fn in os.listdir(searchfolder): if fnmatch.fnmatch(fn, hash + '.*'): hashfile = os.path.join(searchfolder, fn) logger.debug('hf: %s' % hashfile) else: hashfile = None if hashfile and os.path.exists(hashfile): os.remove(hashfile) return True else: return False
def __init__(self, radarr_info): self.radarr_url = config.RADARR['radarr_url'] self.radarr_label = config.RADARR['radarr_label'] self.radarr_headers = config.RADARR['radarr_headers'] self.applylabel = config.GENERAL['applylabel'] self.defaultdir = config.GENERAL['defaultdir'] self.radarr_rootdir = config.RADARR['radarr_rootdir'] self.torrentfile_dir = config.GENERAL['torrentfile_dir'] self.keep_original_foldernames = config.RADARR[ 'radarr_keep_original_foldernames'] self.snstat = radarr_info['snstat'] self.dir_hd_movies = config.RADARR['dir_hd_movies'] self.dir_sd_movies = config.RADARR['dir_sd_movies'] self.dir_web_movies = config.RADARR['dir_web_movies'] logger.debug("Directives Set") #these 2 will only be not None if keep_original_foldernames is enabled as it will return the movie_id & name after the 1st pass of post-processing self.radarr_id = radarr_info['radarr_id'] self.radarr_movie = radarr_info['radarr_movie'] self.hd_movies_defs = ('720p', '1080p', '4k', '2160p', 'bluray') self.sd_movies_defs = ('screener', 'r5', 'dvdrip', 'xvid', 'dvd-rip', 'dvdscr', 'dvdscreener', 'ac3', 'webrip', 'bdrip') self.web_movies_defs = ('web-dl', 'webdl', 'hdrip', 'webrip')
def remove(self, hash, removefile=False): qsize = self.qsize() logger.debug('[QUEUE] Removal started (Queue Size: %s)' % qsize) msg = '' if qsize: for x in range(0, qsize): item = self.SNQUEUE.get(block=True) if not item['item'] == hash: logger.debug('[QUEUE] Nope') self.SNQUEUE.put(item) else: logger.debug('[QUEUE] Found it') if hash in self.CKQUEUE.keys(): msg += "Item '%s' removed from queue.\n" % self.CKQUEUE[ hash]['name'] logger.debug('[QUEUE] %s' % msg) self.ckupdate(hash, { 'stage': 'failed', 'status': 'Removed from Queue' }) if removefile: try: filename = os.path.join( str(config.GENERAL['torrentfile_dir']), str(item['label']), str(item['item']) + '.' + str(item['mode'])) os.remove(filename) msg += "File '%s' removed." % filename logger.info('[USER] File %s removed' % filename) except Exception as e: logger.info( '[USER] File could not be removed: %s' % e) msg += "File '%s' could not be removed. Reason: %s" % ( filename, e) return msg
def main(self): status = 'None' dirlist = self.traverse_directories(self.path) rar_found = [] for fname in dirlist: filename = fname['filename'] rar_ex = r'\.(?:rar|r\d\d|\d\d\d)$' rar_chk = re.findall(rar_ex, filename, flags=re.IGNORECASE) if rar_chk: #append the rars found to the rar_found tuple rar_found.append({ "directory": self.path, "filename": filename }) #if it needs to get unrar'd - we should do it here. if len(rar_found) > 0: rar_info = self.rar_check(rar_found) if rar_info is None: logger.warn( '[RAR-DETECTION-FAILURE] Incomplete rar set detected - ignoring.' ) else: logger.info('[RAR-DETECTION] Detected rar\'s within ' + rar_info[0]['directory'] + '. Initiating rar extraction.') if len(rar_info) > 0: for rk in rar_info: if rk['start_rar'] is None: continue logger.info('[RAR MANAGER] [ ' + str(len(rk['info'])) + ' ] ') # : ' + str(rar_info)) logger.info( '[RAR MANAGER] First Rar detection initated for : ' + str(rk['start_rar'])) # extract the rar's biatch. try: rar_status = self.unrar_it(rk) except Exception as e: logger.warn( '[RAR MANAGER] Error extracting rar: %s' % e) continue else: if rar_status == "success": logger.info( '[RAR MANAGER] Successfully extracted rar\'s.' ) for rs in rk['info']: os.remove( os.path.join(self.path, rs['filename'])) logger.info('[RAR MANAGER] Removal of : ' + os.path.join( self.path, rs['filename'])) #remove the crap in the directory that got logged earlier ( think its done later though ) logger.info( '[RAR MANAGER] Removal of start rar: ' + rk['start_rar']) os.remove(rk['start_rar']) status = 'success' if status == 'success': logger.info('Success!') dirlist = self.traverse_directories(self.path) else: if len(rar_found) > 0: logger.warn('Unable to unrar items') else: logger.debug('No items to unrar.') return dirlist
def post_process(self): logger.info('snstat: %s' % self.snstat) issueid = None comicid = None nzb_name = None nzb = False try: logger.debug('Attempting to open: %s' % os.path.join(self.torrentfile_dir, self.mylar_label, self.snstat['hash'] + '.mylar.hash')) with open( os.path.join(self.torrentfile_dir, self.mylar_label, self.snstat['hash'] + '.mylar.hash')) as dfile: data = json.load(dfile) except Exception as e: logger.error('[%s] not able to load .mylar.hash file.' % e) #for those that were done outside of Mylar or using the -s switch on the cli directly by hash nzb_name = 'Manual Run' else: logger.debug('loaded .mylar.hash successfully - extracting info.') try: nzb_name = data['mylar_release_name'] except: try: if 'mylar_release_nzbname' in data.keys(): # nzb_name HAS TO BE the filename itself so it can pp directly nzb_name = os.path.basename(self.snstat['folder']) nzb = True except: #if mylar_release_name doesn't exist, fall back to the torrent_filename. #mylar retry issue will not have a release_name nzb_name = data['mylar_torrent_filename'] if self.issueid is None: if data['mylar_issuearcid'] != 'None': issueid = data['mylar_issuearcid'] else: if data['mylar_release_pack'] == 'False': issueid = data['mylar_issueid'] else: issueid = None comicid = data['mylar_comicid'] if comicid == 'None': comicid = None else: issueid = self.issueid comicid = None if self.issueid is not None and nzb_name == 'Manual Run': issueid = self.issueid comicid = None nzb_name = self.snstat['name'] url = self.mylar_url + '/api' if all([self.applylabel is True, self.snstat['label'] != 'None']): if nzb is True: newpath = os.path.join(self.defaultdir, self.snstat['label'], self.snstat['extendedname']) else: if os.path.isdir( os.path.join(self.defaultdir, self.snstat['label'], self.snstat['name'])): newpath = os.path.join(self.defaultdir, self.snstat['label'], self.snstat['name']) else: if os.path.isdir( os.path.join(self.defaultdir, self.snstat['label'])): newpath = os.path.join(self.defaultdir, self.snstat['label']) else: if nzb is True: newpath = os.path.join(self.defaultdir, self.snstat['extendedname']) else: newpath = os.path.join(self.defaultdir, self.snstat['name']) payload = { 'cmd': 'forceProcess', 'apikey': self.mylar_apikey, 'nzb_name': nzb_name, 'issueid': issueid, 'comicid': comicid, 'nzb_folder': newpath } logger.info('[MYLAR] Posting url: %s' % url) logger.info('[MYLAR] Posting to completed download handling now: %s' % payload) try: r = requests.post(url, params=payload, headers=self.mylar_headers, timeout=0.001) except Exception as e: if any(['Connection refused' in e, 'Timeout' in e]): logger.warn( 'Unable to connect to Mylar server. Please check that it is online [%s].' % e) else: logger.warn('%s' % e) return False #response = r.json() logger.debug('content: %s' % r.content) logger.debug('[MYLAR] status_code: %s' % r.status_code) logger.info('[MYLAR] Successfully post-processed : ' + self.snstat['name']) return True
def query(self): sendresponse = self.params['nzo_id'] queue = { 'mode': 'queue', 'search': self.params['nzo_id'], 'output': 'json', 'apikey': self.sab_apikey } try: logger.info('sending now to %s' % self.sab_url) logger.debug('parameters set to %s' % queue) h = requests.get(self.sab_url, params=queue, verify=False) except Exception as e: logger.error('uh-oh: %s' % e) return {'completed': False} else: queueresponse = h.json() logger.info('successfully queried the queue for status') try: queueinfo = queueresponse['queue'] logger.info('queue: %s' % queueresponse) logger.info('Queue status : %s' % queueinfo['status']) logger.info('Queue mbleft : %s' % queueinfo['mbleft']) if str(queueinfo['status']) == 'Downloading': logger.info('[SABNZBD] Dowwnload is not yet finished') return {'completed': False} except Exception as e: logger.error('error: %s' % e) return {'completed': False} logger.info('[SABNZBD] Download completed. Querying history.') hist_params = { 'mode': 'history', 'failed': 0, 'output': 'json', 'limit': 500, 'apikey': self.sab_apikey } hist = requests.get(self.sab_url, params=hist_params, verify=False) historyresponse = hist.json() histqueue = historyresponse['history'] found = {'completed': True, 'failed': True} try: for hq in histqueue['slots']: # logger.info('nzo_id: %s --- %s [%s]' % (hq['nzo_id'], sendresponse, hq['status'])) if hq['nzo_id'] == sendresponse and hq[ 'status'] == 'Completed': logger.info( '[SABNZBD] Found matching completed item in history. Job has a status of %s' % hq['status']) logger.info('[SABNZBD] Location found @ %s' % hq['storage']) path_folder = hq['storage'] nzbname = os.path.basename(hq['storage']) found = { 'completed': True, 'name': re.sub('.nzb', '', hq['nzb_name']).strip(), 'extendedname': nzbname, 'folder': path_folder, 'mirror': True, # Change this 'multiple': None, 'label': hq['category'], 'hash': hq['nzo_id'], 'failed': False, 'files': [] } break elif hq['nzo_id'] == sendresponse and hq[ 'status'] == 'Failed': # get the stage / error message and see what we can do stage = hq['stage_log'] for x in stage: if 'Failed' in x['actions'] and any( [x['name'] == 'Unpack', x['name'] == 'Repair' ]): if 'moving' in x['actions']: logger.warn( '[SABNZBD] There was a failure in SABnzbd during the unpack/repair phase that caused a failure: %s' % x['actions']) else: logger.warn( '[SABNZBD] Failure occured during the Unpack/Repair phase of SABnzbd. This is probably a bad file: %s' % x['actions']) found = {'completed': True, 'failed': True} if any( [x['name'] == 'Download', x['name'] == 'Fail']): logger.warn( '[SABNZBD] SABnzbd failed to to download. Articles were probably missing.' ) found = {'completed': True, 'failed': True} elif hq['nzo_id'] == sendresponse: logger.warn('[SABNZBD] Unexpected response: %s' % hq) found = {'completed': False} except Exception as e: logger.warn('error %s' % e) return found
def post_process(self): url = self.lazylibrarian_url + '/api' if 'extendedname' in self.snstat.keys(): nzbname = self.snstat['extendedname'] else: nzbname = self.snstat['name'] if self.applylabel is True: if self.snstat['label'] == 'None': filepath = os.path.join(self.defaultdir, nzbname) else: filepath = os.path.join(self.defaultdir, self.snstat['label'], nzbname) else: filepath = os.path.join(self.defaultdir, nzbname) filebase = os.path.basename(filepath) logger.debug('[LAZYLIBRARIAN] Path: %s' % filepath) midpath = os.path.abspath(os.path.join(filepath, os.pardir)) midbase = os.path.basename(midpath) defaultbase = os.path.basename(self.defaultdir) movefile = False if self.lazylibrarian_filedata and 'BookID' in self.lazylibrarian_filedata.keys( ): process_suffix = ' LL.(%s)' % self.lazylibrarian_filedata['BookID'] else: process_suffix = ' PROCESS' logger.debug('[LAZYLIBRARIAN] Process Suffix: %s' % process_suffix) if midbase == defaultbase or midbase == self.snstat['label']: # name is 1 deep - if file, move it. if folder, check for LL if os.path.isfile(filepath): logger.debug('[LAZYLIBRARIAN] Prepping file to move') process_path = filepath + process_suffix movefile = True elif os.path.isdir(filepath): logger.debug('[LAZYLIBRARIAN] Path is a folder') if filepath.endswith(process_suffix): logger.debug( '[LAZYLIBRARIAN] Folder is already properly named') movefile = False process_path = filepath else: logger.debug('[LAZYLIBRARIAN] Renaming folder') movefile = False process_path = filepath + process_suffix os.rename(filepath, process_path) else: logger.debug('[LAZYLIBRARIAN] File not found') return False elif midbase.endswith(process_suffix): logger.debug('[LAZYLIBRARIAN] Setting working folder to %s' % midpath) process_path = midpath movefile = False else: logger.debug( '[LAZYLIBRARIAN] Setting working folder to %s and renaming' % midpath) process_path = midpath + process_suffix os.rename(midpath, process_path) movefile = False if movefile: logger.debug("[LAZYLIBRARIAN] Moving %s to %s" % (filepath, os.path.join(process_path, filebase))) if not os.path.exists(process_path): os.mkdir(process_path) shutil.move(filepath, os.path.join(process_path, filebase)) # if self.lazylibrarian_filedata and 'BookID' in self.lazylibrarian_filedata.keys(): # movefile = True # midpath = os.path.basename(os.path.abspath(os.path.join(newpath,os.path.pardir))) # if midpath.endswith('LL.(%s)' % self.lazylibrarian_filedata['BookID']): # logger.debug("Option 1") # brandnewpath = os.path.abspath(os.path.join(newpath,os.path.pardir)) # movefile = False # elif not newpath.endswith('LL.(%s)' % self.lazylibrarian_filedata['BookID']): # logger.debug("Option 2") # brandnewpath = newpath + ' LL.(%s)' % self.lazylibrarian_filedata['BookID'] # else: # logger.debug("Option 3") # brandnewpath = newpath # movefile = False # logger.debug('[LAZYLIBRARIAN] New Path: %s' % brandnewpath) # if os.path.isdir(newpath) and newpath != brandnewpath: # logger.debug('[LAZYLIBRARIAN] Renaming Folder') # os.rename(newpath, brandnewpath) # logger.debug('Path Renamed') # elif os.path.isfile(newpath) and movefile: # logger.debug('[LAZYLIBRARIAN] Moving file (%s) into folder (%s)' % (newpath, brandnewpath)) # newfile = os.path.join(brandnewpath, self.snstat['name']) # os.mkdir(brandnewpath) # logger.debug('NewFile: %s' % newfile) # shutil.move(newpath, newfile) # elif os.path.isdir(brandnewpath): # logger.debug('[LAZYLIBRARIAN] Processing folder already exists.') # else: # logger.debug('[LAZYLIBRARIAN] File not found.') # return False # else: # if os.path.isfile(newpath): # brandnewpath = newpath + ' PROCESS' # logger.debug('[LAZYLIBRARIAN] Moving file (%s) into folder (%s)' % (newpath, brandnewpath)) # newfile = os.path.join(brandnewpath, self.snstat['name']) # os.mkdir(brandnewpath) # logger.debug('NewFile: %s' % newfile) # shutil.move(newpath, newfile) # else: # brandnewpath = newpath logger.info('[LAZYLIBRARIAN] Path: %s' % process_path) payload = { 'cmd': 'forceProcess', 'dir': process_path, 'apikey': self.lazylibrarian_apikey, 'ignoreclient': 'True', } logger.info('[LAZYLIBRARIAN] Posting url: %s' % url) logger.info( '[LAZYLIBRARIAN] Posting to completed download handling now: %s' % payload) r = requests.post(url, data=payload, headers=self.lazylibrarian_headers) data = r.text logger.info('content: %s' % data) logger.info('[LAZYLIBRARIAN] Successfully post-processed : ' + self.snstat['name']) return True
def initialize(options=None, basepath=None, parent=None): if options is None: options = {} https_enabled = options['https_enabled'] https_cert = options['https_cert'] https_key = options['https_key'] logger.debug("Web Initializing: %s" % options) if https_enabled: if not (os.path.exists(https_cert) and os.path.exists(https_key)): logger.warn( "Disabled HTTPS because of missing certificate and key.") https_enabled = False options_dict = { 'log.screen': False, 'server.thread_pool': 10, 'server.socket_port': options['http_port'], 'server.socket_host': options['http_host'], 'engine.autoreload.on': False, 'tools.encode.on': True, 'tools.encode.encoding': 'utf-8', 'tools.decode.on': True, 'tools.sessions.on': True, 'tools.sessions.storage_type': "File", 'tools.sessions.storage_path': os.path.join(basepath, "sessions"), 'tools.sessions.timeout': 120, } if https_enabled: options_dict['server.ssl_certificate'] = https_cert options_dict['server.ssl_private_key'] = https_key protocol = "https" else: protocol = "http" logger.debug("Options: %s" % options_dict) logger.info("Starting harpoon web server on %s://%s:%d/" % (protocol, options['http_host'], options['http_port'])) cherrypy.config.update(options_dict) cherrypy.log.access_log.propagate = False logger.debug('DataDir: %s' % basepath) conf = { '/': { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(basepath, 'data'), 'tools.proxy.on': options['http_proxy'], 'tools.auth.on': False, 'tools.sessions.on': True }, '/interfaces': { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(basepath, 'data', 'interfaces'), 'tools.auth.on': False, 'tools.sessions.on': False }, '/images': { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(basepath, 'data', 'images'), 'tools.auth.on': False, 'tools.sessions.on': False }, '/css': { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(basepath, 'data', 'css'), 'tools.auth.on': False, 'tools.sessions.on': False }, '/js': { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(basepath, 'data', 'js'), 'tools.auth.on': False, 'tools.sessions.on': False }, '/favicon.ico': { 'tools.staticfile.on': True, 'tools.staticfile.filename': os.path.join(basepath, 'data', 'images', 'favicon.ico'), 'tools.auth.on': False, 'tools.sessions.on': False } } if options['http_pass'] != "": logger.info("Web server authentication is enabled, username is '%s'" % options['http_user']) conf['/'].update({ 'tools.auth.on': True, 'tools.sessions.on': True, 'tools.auth_basic.on': True, 'tools.auth_basic.realm': 'harpoon', 'tools.auth_basic.checkpassword': cherrypy.lib.auth_basic.checkpassword_dict( {options['http_user']: options['http_pass']}) }) conf['/api'] = {'tools.auth_basic.on': False} logger.debug('config: %s' % conf) # Prevent time-outs try: cherrypy.engine.timeout_monitor.unsubscribe() cherrypy.tree.mount(WebInterface(parent=parent), str(options['http_root']), config=conf) cherrypy.engine.autoreload.subscribe() cherrypy.process.servers.check_port(str(options['http_host']), options['http_port']) cherrypy.server.start() except IOError: print 'Failed to start on port: %i. is something else running?' % ( options['http_port']) sys.exit(1) except Exception as e: print 'Error: %s' % e sys.exit(1) cherrypy.server.wait()
def home(self, msg=None): logger.debug("Serving home") return serve_template(templatename='index.html', title="Queue Status", msg=msg)
def index(self): logger.debug("Serving index") # raise cherrypy.HTTPRedirect("home") return self.home()