def update_status_snatched(guid, imdbid): ''' Sets status to Snatched guid (str): guid for download link imdbid (str): imdb id # Updates MOVIES, SEARCHRESULTS, and MARKEDRESULTS to 'Snatched' Returns bool ''' logging.info('Updating {} to Snatched.'.format(imdbid)) if not Manage.searchresults(guid, 'Snatched'): logging.error('Unable to update search result status to Snatched.') return False if not Manage.markedresults(guid, 'Snatched', imdbid=imdbid): logging.error('Unable to store marked search result as Snatched.') return False if not Manage.movie_status(imdbid): logging.error('Unable to update movie status to Snatched.') return False return True
def search(movie): ''' Executes backlog search for required movies movie (dict): movie to run search for Gets new search results from newznab providers. Pulls existing search results and updates new data with old. This way the found_date doesn't change and scores can be updated if the quality profile was modified since last search. Sends ALL results to searchresults.score() to be (re-)scored and filtered. Checks if guid matches entries in MARKEDRESULTS and sets status if found. default status Available. Finally stores results in SEARCHRESULTS Returns Bool if movie is found. ''' imdbid = movie['imdbid'] title = movie['title'] year = movie['year'] quality = movie['quality'] english_title = movie.get('english_title', '') language = movie.get('download_language', '') logging.info('Performing backlog search for {} {}.'.format(title, year)) proxy.create() results = [] if core.CONFIG['Downloader']['Sources']['usenetenabled']: for i in nn.search_all(imdbid): results.append(i) if core.CONFIG['Downloader']['Sources']['torrentenabled']: if title != english_title: for i in torrent.search_all(imdbid, title, year): results.append(i) if english_title and language: for i in torrent.search_all(imdbid, english_title, year, title != english_title): results.append(i) proxy.destroy() old_results = core.sql.get_search_results(imdbid, quality) for old in old_results: if old['type'] == 'import': results.append(old) active_old_results = remove_inactive(old_results) # update results with old info if guids match for idx, result in enumerate(results): for old in active_old_results: if old['guid'] == result['guid']: if 'seeders' in result: old['seeders'] = result['seeders'] if 'leechers' in result: old['leechers'] = result['leechers'] result.update(old) results[idx] = result for idx, result in enumerate(results): logging.debug('Parse {}'.format(result['title'])) results[idx]['ptn'] = PTN.parse(result['title']) results[idx]['resolution'] = get_source(results[idx]['ptn']) scored_results = searchresults.score(results, imdbid=imdbid) # sets result status based off marked results table marked_results = core.sql.get_marked_results(imdbid) if marked_results: for result in scored_results: if result['guid'] in marked_results: result['status'] = marked_results[result['guid']] if not store_results(scored_results, imdbid, backlog=True): logging.error('Unable to store search results for {}'.format(imdbid)) return False if not Manage.movie_status(imdbid): logging.error('Unable to update movie status for {}'.format(imdbid)) return False if not core.sql.update('MOVIES', 'backlog', '1', 'imdbid', imdbid): logging.error( 'Unable to flag backlog search as complete for {}'.format(imdbid)) return False return True
def rss_sync(movies): ''' Gets latests RSS feed from all indexers movies (list): dicts of movies to look for Gets latest rss feed from all supported indexers. Looks through rss for anything that matches a movie in 'movies' Only stores new results. If you need to update scores or old results force a backlog search. Finally stores results in SEARCHRESULTS Returns bool ''' logging.info('Syncing indexer RSS feeds.') newznab_results = [] torrent_results = [] proxy.create() if core.CONFIG['Downloader']['Sources']['usenetenabled']: newznab_results = nn.get_rss() if core.CONFIG['Downloader']['Sources']['torrentenabled']: torrent_results = torrent.get_rss() proxy.destroy() for movie in movies: imdbid = movie['imdbid'] title = movie['title'] year = movie['year'] english_title = movie.get('english_title') logging.info('Parsing RSS for {} {}'.format(title, year)) nn_found = [i for i in newznab_results if i['imdbid'] == imdbid] tor_found = [] for i in torrent_results: if _match_torrent_name(title, year, i['title']): tor_found.append(i) elif english_title and _match_torrent_name(english_title, year, i['title']): tor_found.append(i) for idx, result in enumerate(tor_found): result['imdbid'] = imdbid tor_found[idx] = result results = nn_found + tor_found if not results: logging.info('Nothing found in RSS for {} {}'.format(title, year)) continue # Ignore results we've already stored old_results = core.sql.get_search_results(imdbid, rejected=True) new_results = [] for res in results: guid = res['guid'] if all(guid != i['guid'] for i in old_results): new_results.append(res) else: continue logging.info('Found {} new results for {} {}.'.format( len(new_results), title, year)) # Get source media and resolution for idx, result in enumerate(new_results): logging.debug('Parse {}'.format(result['title'])) new_results[idx]['ptn'] = PTN.parse(result['title']) new_results[idx]['resolution'] = get_source( new_results[idx]['ptn']) scored_results = searchresults.score(new_results, imdbid=imdbid) if len(scored_results) == 0: logging.info('No acceptable results found for {}'.format(imdbid)) continue if not store_results(scored_results, imdbid): return False if not Manage.movie_status(imdbid): return False return True
def complete(self, data): ''' Post-processes a complete, successful download data (dict): all gathered file information and metadata data must include the following keys: path (str): path to downloaded item. Can be file or directory guid (str): nzb guid or torrent hash downloadid (str): download id from download client All params can be empty strings if unknown In SEARCHRESULTS marks guid as Finished In MARKEDRESULTS: Creates or updates entry for guid and optional guid with status=bad In MOVIES updates finished_score and finished_date Updates MOVIES status Checks to see if we found a movie file. If not, ends here. If Renamer is enabled, renames movie file according to core.CONFIG If Mover is enabled, moves file to location in core.CONFIG, then... If Clean Up enabled, deletes path after Mover finishes. Clean Up will not execute without Mover success. Returns dict of post-processing results ''' config = core.CONFIG['Postprocessing'] # dict we will json.dump and send back to downloader result = {} result['status'] = 'incomplete' result['data'] = data result['data']['finished_date'] = str(datetime.date.today()) result['tasks'] = {} # mark guid in both results tables logging.info('Marking guid as Finished.') data['guid'] = data['guid'].lower() guid_result = {} if data['guid'] and data.get('imdbid'): if Manage.searchresults(data['guid'], 'Finished', movie_info=data): guid_result['update_SEARCHRESULTS'] = True else: guid_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid'], 'Finished', imdbid=data['imdbid']): guid_result['update_MARKEDRESULTS'] = True else: guid_result['update_MARKEDRESULTS'] = False # create result entry for guid result['tasks'][data['guid']] = guid_result # if we have a guid2, do it all again if data.get('guid2') and data.get('imdbid'): logging.info('Marking guid2 as Finished.') guid2_result = {} if Manage.searchresults(data['guid2'], 'Finished', movie_info=data): guid2_result['update_SEARCHRESULTS'] = True else: guid2_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid2'], 'Finished', imdbid=data['imdbid']): guid2_result['update_MARKEDRESULTS'] = True else: guid2_result['update_MARKEDRESULTS'] = False # create result entry for guid2 result['tasks'][data['guid2']] = guid2_result # set movie status and add finished date/score if data.get('imdbid'): if core.sql.row_exists('MOVIES', imdbid=data['imdbid']): data['category'] = core.sql.get_movie_details( 'imdbid', data['imdbid'])['category'] else: logging.info('{} not found in library, adding now.'.format( data.get('title'))) data['status'] = 'Disabled' Manage.add_movie(data) logging.info('Setting MOVIE status.') r = Manage.movie_status(data['imdbid']) db_update = { 'finished_date': result['data']['finished_date'], 'finished_score': result['data'].get('finished_score') } core.sql.update_multiple_values('MOVIES', db_update, 'imdbid', data['imdbid']) else: logging.info( 'Imdbid not supplied or found, unable to update Movie status.') r = '' result['tasks']['update_movie_status'] = r data.update(Metadata.convert_to_db(data)) # mover. sets ['finished_file'] if config['moverenabled']: result['tasks']['mover'] = {'enabled': True} response = self.mover(data) if not response: result['tasks']['mover']['response'] = False else: data['finished_file'] = response result['tasks']['mover']['response'] = True else: logging.info('Mover disabled.') data['finished_file'] = data.get('original_file') result['tasks']['mover'] = {'enabled': False} # renamer if config['renamerenabled']: result['tasks']['renamer'] = {'enabled': True} new_file_name = self.renamer(data) if new_file_name == '': result['tasks']['renamer']['response'] = False else: path = os.path.split(data['finished_file'])[0] data['finished_file'] = os.path.join(path, new_file_name) result['tasks']['renamer']['response'] = True else: logging.info('Renamer disabled.') result['tasks']['renamer'] = {'enabled': False} if data.get('imdbid') and data['imdbid'] is not 'N/A': core.sql.update('MOVIES', 'finished_file', result['data'].get('finished_file'), 'imdbid', data['imdbid']) # Delete leftover dir. Skip if file links are enabled or if mover disabled/failed if config['cleanupenabled']: result['tasks']['cleanup'] = {'enabled': True} if config['movermethod'] in ('copy', 'hardlink', 'symboliclink'): logging.info( 'File copy or linking enabled -- skipping Cleanup.') result['tasks']['cleanup']['response'] = None return result elif os.path.isfile(data['path']): logging.info( 'Download is file, not directory -- skipping Cleanup.') result['tasks']['cleanup']['response'] = None return result # fail if mover disabled or failed if config['moverenabled'] is False or result['tasks']['mover'][ 'response'] is False: logging.info( 'Mover either disabled or failed -- skipping Cleanup.') result['tasks']['cleanup']['response'] = None else: if self.cleanup(data['path']): r = True else: r = False result['tasks']['cleanup']['response'] = r else: result['tasks']['cleanup'] = {'enabled': False} # all done! result['status'] = 'finished' return result
def failed(self, data): ''' Post-process a failed download data (dict): of gathered data from downloader and localdb/tmdb In SEARCHRESULTS marks guid as Bad In MARKEDRESULTS: Creates or updates entry for guid and optional guid2 with status=Bad Updates MOVIES status If Clean Up is enabled will delete path and contents. If Auto Grab is enabled will grab next best release. Returns dict of post-processing results ''' config = core.CONFIG['Postprocessing'] # dict we will json.dump and send back to downloader result = {} result['status'] = 'finished' result['data'] = data result['tasks'] = {} # mark guid in both results tables logging.info('Marking guid as Bad.') guid_result = {'url': data['guid']} if data['guid']: # guid can be empty string if Manage.searchresults(data['guid'], 'Bad'): guid_result['update_SEARCHRESULTS'] = True else: guid_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid'], 'Bad', imdbid=data['imdbid']): guid_result['update_MARKEDRESULTS'] = True else: guid_result['update_MARKEDRESULTS'] = False # create result entry for guid result['tasks']['guid'] = guid_result # if we have a guid2, do it all again if 'guid2' in data.keys(): logging.info('Marking guid2 as Bad.') guid2_result = {'url': data['guid2']} if Manage.searchresults(data['guid2'], 'Bad'): guid2_result['update SEARCHRESULTS'] = True else: guid2_result['update SEARCHRESULTS'] = False if Manage.markedresults( data['guid2'], 'Bad', imdbid=data['imdbid'], ): guid2_result['update_MARKEDRESULTS'] = True else: guid2_result['update_MARKEDRESULTS'] = False # create result entry for guid2 result['tasks']['guid2'] = guid2_result # set movie status if data['imdbid']: logging.info('Setting MOVIE status.') r = Manage.movie_status(data['imdbid']) else: logging.info( 'Imdbid not supplied or found, unable to update Movie status.') r = '' result['tasks']['update_movie_status'] = r # delete failed files if config['cleanupfailed']: result['tasks']['cleanup'] = { 'enabled': True, 'path': data['path'] } logging.info('Deleting leftover files from failed download.') if self.cleanup(data['path']) is True: result['tasks']['cleanup']['response'] = True else: result['tasks']['cleanup']['response'] = False else: result['tasks']['cleanup'] = {'enabled': False} # grab the next best release if core.CONFIG['Search']['autograb']: result['tasks']['autograb'] = {'enabled': True} logging.info('Grabbing the next best release.') if data.get('imdbid') and data.get('quality'): best_release = snatcher.get_best_release(data) if best_release and snatcher.download(best_release): r = True else: r = False else: r = False result['tasks']['autograb']['response'] = r else: result['tasks']['autograb'] = {'enabled': False} # all done! result['status'] = 'finished' return result
def search(imdbid, title, year, quality): ''' Executes backlog search for required movies imdbid (str): imdb identification number title (str): movie title year (str/int): year of movie release quality (str): name of quality profile Gets new search results from newznab providers. Pulls existing search results and updates new data with old. This way the found_date doesn't change and scores can be updated if the quality profile was modified since last search. Sends ALL results to searchresults.score() to be (re-)scored and filtered. Checks if guid matches entries in MARKEDRESULTS and sets status if found. default status Available. Finally stores results in SEARCHRESULTS Returns Bool if movie is found. ''' logging.info('Performing backlog search for {} {}.'.format(title, year)) proxy.create() results = [] if core.CONFIG['Downloader']['Sources']['usenetenabled']: for i in nn.search_all(imdbid): results.append(i) if core.CONFIG['Downloader']['Sources']['torrentenabled']: for i in torrent.search_all(imdbid, title, year): results.append(i) proxy.destroy() old_results = core.sql.get_search_results(imdbid, quality) for old in old_results: if old['type'] == 'import': results.append(old) active_old_results = remove_inactive(old_results) # update results with old info if guids match for idx, result in enumerate(results): for old in active_old_results: if old['guid'] == result['guid']: result.update(old) results[idx] = result for idx, result in enumerate(results): results[idx]['resolution'] = get_source(result, year) scored_results = searchresults.score(results, imdbid=imdbid) # sets result status based off marked results table marked_results = core.sql.get_marked_results(imdbid) if marked_results: for result in scored_results: if result['guid'] in marked_results: result['status'] = marked_results[result['guid']] if not store_results(scored_results, imdbid, backlog=True): logging.error('Unable to store search results for {}'.format(imdbid)) return False if not Manage.movie_status(imdbid): logging.error('Unable to update movie status for {}'.format(imdbid)) return False if not core.sql.update('MOVIES', 'backlog', '1', 'imdbid', imdbid): logging.error( 'Unable to flag backlog search as complete for {}'.format(imdbid)) return False return True