def searchresults(guid, status, movie_info=None): ''' Marks searchresults status guid (str): download link guid status (str): status to set movie_info (dict): of movie metadata <optional - default None> If guid is in SEARCHRESULTS table, marks it as status. If guid not in SEARCHRESULTS, uses movie_info to create a result. Returns bool ''' TABLE = 'SEARCHRESULTS' logging.info('Marking guid {} as {}.'.format( guid.split('&')[0], status)) if core.sql.row_exists(TABLE, guid=guid): # Mark bad in SEARCHRESULTS logging.info('Marking {} as {} in SEARCHRESULTS.'.format( guid.split('&')[0], status)) if not core.sql.update(TABLE, 'status', status, 'guid', guid): logging.error( 'Setting SEARCHRESULTS status of {} to {} failed.'.format( guid.split('&')[0], status)) return False else: logging.info( 'Successfully marked {} as {} in SEARCHRESULTS.'.format( guid.split('&')[0], status)) return True else: logging.info( 'Guid {} not found in SEARCHRESULTS, attempting to create entry.' .format(guid.split('&')[0])) if movie_info is None: logging.warning( 'Movie metadata not supplied, unable to create SEARCHRESULTS entry.' ) return False search_result = searchresults.generate_simulacrum(movie_info) search_result['indexer'] = 'Post-Processing Import' if not search_result.get('title'): search_result['title'] = movie_info['title'] search_result['size'] = os.path.getsize( movie_info.get('orig_filename') or '.') if not search_result['resolution']: search_result['resolution'] = 'Unknown' search_result = searchresults.score([search_result], imported=True)[0] required_keys = ('score', 'size', 'status', 'pubdate', 'title', 'imdbid', 'indexer', 'date_found', 'info_link', 'guid', 'torrentfile', 'resolution', 'type', 'downloadid', 'freeleech') search_result = { k: v for k, v in search_result.items() if k in required_keys } if core.sql.write('SEARCHRESULTS', search_result): return True else: return False
def rss_sync(movies): ''' Gets latests RSS feed from all indexers movies (list): dicts of movies to look for Gets latest rss feed from all supported indexers. Looks through rss for anything that matches a movie in 'movies' Only stores new results. If you need to update scores or old results force a backlog search. Finally stores results in SEARCHRESULTS Returns bool ''' logging.info('Syncing indexer RSS feeds.') newznab_results = [] torrent_results = [] proxy.create() if core.CONFIG['Downloader']['Sources']['usenetenabled']: newznab_results = nn.get_rss() if core.CONFIG['Downloader']['Sources']['torrentenabled']: torrent_results = torrent.get_rss() proxy.destroy() for movie in movies: imdbid = movie['imdbid'] title = movie['title'] year = movie['year'] english_title = movie.get('english_title') logging.info('Parsing RSS for {} {}'.format(title, year)) nn_found = [i for i in newznab_results if i['imdbid'] == imdbid] tor_found = [] for i in torrent_results: if _match_torrent_name(title, year, i['title']): tor_found.append(i) elif english_title and _match_torrent_name(english_title, year, i['title']): tor_found.append(i) for idx, result in enumerate(tor_found): result['imdbid'] = imdbid tor_found[idx] = result results = nn_found + tor_found if not results: logging.info('Nothing found in RSS for {} {}'.format(title, year)) continue # Ignore results we've already stored old_results = core.sql.get_search_results(imdbid, rejected=True) new_results = [] for res in results: guid = res['guid'] if all(guid != i['guid'] for i in old_results): new_results.append(res) else: continue logging.info('Found {} new results for {} {}.'.format( len(new_results), title, year)) # Get source media and resolution for idx, result in enumerate(new_results): logging.debug('Parse {}'.format(result['title'])) new_results[idx]['ptn'] = PTN.parse(result['title']) new_results[idx]['resolution'] = get_source( new_results[idx]['ptn']) scored_results = searchresults.score(new_results, imdbid=imdbid) if len(scored_results) == 0: logging.info('No acceptable results found for {}'.format(imdbid)) continue if not store_results(scored_results, imdbid): return False if not Manage.movie_status(imdbid): return False return True
def search(movie): ''' Executes backlog search for required movies movie (dict): movie to run search for Gets new search results from newznab providers. Pulls existing search results and updates new data with old. This way the found_date doesn't change and scores can be updated if the quality profile was modified since last search. Sends ALL results to searchresults.score() to be (re-)scored and filtered. Checks if guid matches entries in MARKEDRESULTS and sets status if found. default status Available. Finally stores results in SEARCHRESULTS Returns Bool if movie is found. ''' imdbid = movie['imdbid'] title = movie['title'] year = movie['year'] quality = movie['quality'] english_title = movie.get('english_title', '') language = movie.get('download_language', '') logging.info('Performing backlog search for {} {}.'.format(title, year)) proxy.create() results = [] if core.CONFIG['Downloader']['Sources']['usenetenabled']: for i in nn.search_all(imdbid): results.append(i) if core.CONFIG['Downloader']['Sources']['torrentenabled']: if title != english_title: for i in torrent.search_all(imdbid, title, year): results.append(i) if english_title and language: for i in torrent.search_all(imdbid, english_title, year, title != english_title): results.append(i) proxy.destroy() old_results = core.sql.get_search_results(imdbid, quality) for old in old_results: if old['type'] == 'import': results.append(old) active_old_results = remove_inactive(old_results) # update results with old info if guids match for idx, result in enumerate(results): for old in active_old_results: if old['guid'] == result['guid']: if 'seeders' in result: old['seeders'] = result['seeders'] if 'leechers' in result: old['leechers'] = result['leechers'] result.update(old) results[idx] = result for idx, result in enumerate(results): logging.debug('Parse {}'.format(result['title'])) results[idx]['ptn'] = PTN.parse(result['title']) results[idx]['resolution'] = get_source(results[idx]['ptn']) scored_results = searchresults.score(results, imdbid=imdbid) # sets result status based off marked results table marked_results = core.sql.get_marked_results(imdbid) if marked_results: for result in scored_results: if result['guid'] in marked_results: result['status'] = marked_results[result['guid']] if not store_results(scored_results, imdbid, backlog=True): logging.error('Unable to store search results for {}'.format(imdbid)) return False if not Manage.movie_status(imdbid): logging.error('Unable to update movie status for {}'.format(imdbid)) return False if not core.sql.update('MOVIES', 'backlog', '1', 'imdbid', imdbid): logging.error( 'Unable to flag backlog search as complete for {}'.format(imdbid)) return False return True
def search(imdbid, title, year, quality): ''' Executes backlog search for required movies imdbid (str): imdb identification number title (str): movie title year (str/int): year of movie release quality (str): name of quality profile Gets new search results from newznab providers. Pulls existing search results and updates new data with old. This way the found_date doesn't change and scores can be updated if the quality profile was modified since last search. Sends ALL results to searchresults.score() to be (re-)scored and filtered. Checks if guid matches entries in MARKEDRESULTS and sets status if found. default status Available. Finally stores results in SEARCHRESULTS Returns Bool if movie is found. ''' logging.info('Performing backlog search for {} {}.'.format(title, year)) proxy.create() results = [] if core.CONFIG['Downloader']['Sources']['usenetenabled']: for i in nn.search_all(imdbid): results.append(i) if core.CONFIG['Downloader']['Sources']['torrentenabled']: for i in torrent.search_all(imdbid, title, year): results.append(i) proxy.destroy() old_results = core.sql.get_search_results(imdbid, quality) for old in old_results: if old['type'] == 'import': results.append(old) active_old_results = remove_inactive(old_results) # update results with old info if guids match for idx, result in enumerate(results): for old in active_old_results: if old['guid'] == result['guid']: result.update(old) results[idx] = result for idx, result in enumerate(results): results[idx]['resolution'] = get_source(result, year) scored_results = searchresults.score(results, imdbid=imdbid) # sets result status based off marked results table marked_results = core.sql.get_marked_results(imdbid) if marked_results: for result in scored_results: if result['guid'] in marked_results: result['status'] = marked_results[result['guid']] if not store_results(scored_results, imdbid, backlog=True): logging.error('Unable to store search results for {}'.format(imdbid)) return False if not Manage.movie_status(imdbid): logging.error('Unable to update movie status for {}'.format(imdbid)) return False if not core.sql.update('MOVIES', 'backlog', '1', 'imdbid', imdbid): logging.error( 'Unable to flag backlog search as complete for {}'.format(imdbid)) return False return True