def check_torrents(): for client, config in core.CONFIG['Downloader']['Torrent'].items(): if config['enabled']: progress = {} now = int(datetime.datetime.timestamp(datetime.datetime.now())) if config.get('removestalledfor'): progress = core.sql.get_download_progress(client) downloader = getattr(downloaders, client) for torrent in downloader.get_torrents_status(stalled_for=config.get('removestalledfor'), progress=progress): progress_update = None if torrent['status'] == 'finished' and config.get('removetorrents'): logging.info('Check if we know finished torrent {} and is postprocessed ({})'.format(torrent['hash'], torrent['name'])) if core.sql.row_exists('MARKEDRESULTS', guid=str(torrent['hash']), status='Finished'): logging.info('Yes, now we remove the torrent') downloader.cancel_download(torrent['hash']) else: logging.info('No, did not find the torrent as finished') if torrent['status'] == 'stalled': logging.info('Check if we know torrent {} and is snatched ({})'.format(torrent['hash'], torrent['name'])) if torrent['hash'] in progress: result = core.sql.get_single_search_result('downloadid', str(torrent['hash'])) movie = core.sql.get_movie_details('imdbid', result['imdbid']) best_release = snatcher.get_best_release(movie, ignore_guid=result['guid']) # if top score is already downloading returns {}, stalled torrent will be deleted and nothing will be snatched if best_release is not None: logging.info('Torrent {} is stalled, download will be cancelled and marked as Bad'.format(torrent['hash'])) Manage.searchresults(result['guid'], 'Bad') Manage.markedresults(result['guid'], 'Bad', imdbid=result['imdbid']) downloader.cancel_download(torrent['hash']) if best_release: logging.info("Snatch {} {}".format(best_release['guid'], best_release['title'])) snatcher.download(best_release) elif config.get('removestalledfor') and 'progress' in torrent and torrent['hash'] in progress: if torrent['status'] == 'downloading': if progress[torrent['hash']]['progress'] is None or torrent['progress'] != progress[torrent['hash']]['progress']: progress_update = {'download_progress': torrent['progress'], 'download_time': now} elif progress[torrent['hash']]['progress']: progress_update = {'download_progress': None, 'download_time': None} if progress_update and core.sql.row_exists('SEARCHRESULTS', downloadid=str(torrent['hash']), status='Snatched'): core.sql.update_multiple_values('SEARCHRESULTS', progress_update, 'downloadid', torrent['hash']) return
def update_status_snatched(guid, imdbid): ''' Sets status to Snatched guid (str): guid for download link imdbid (str): imdb id # Updates MOVIES, SEARCHRESULTS, and MARKEDRESULTS to 'Snatched' Returns bool ''' logging.info('Updating {} to Snatched.'.format(imdbid)) if not Manage.searchresults(guid, 'Snatched'): logging.error('Unable to update search result status to Snatched.') return False if not Manage.markedresults(guid, 'Snatched', imdbid=imdbid): logging.error('Unable to store marked search result as Snatched.') return False if not Manage.movie_status(imdbid): logging.error('Unable to update movie status to Snatched.') return False return True
def complete(self, data): ''' Post-processes a complete, successful download data (dict): all gathered file information and metadata data must include the following keys: path (str): path to downloaded item. Can be file or directory guid (str): nzb guid or torrent hash downloadid (str): download id from download client All params can be empty strings if unknown In SEARCHRESULTS marks guid as Finished In MARKEDRESULTS: Creates or updates entry for guid and optional guid with status=bad In MOVIES updates finished_score and finished_date Updates MOVIES status Checks to see if we found a movie file. If not, ends here. If Renamer is enabled, renames movie file according to core.CONFIG If Mover is enabled, moves file to location in core.CONFIG, then... If Clean Up enabled, deletes path after Mover finishes. Clean Up will not execute without Mover success. Returns dict of post-processing results ''' config = core.CONFIG['Postprocessing'] # dict we will json.dump and send back to downloader result = {} result['status'] = 'incomplete' result['data'] = data result['data']['finished_date'] = str(datetime.date.today()) result['tasks'] = {} # mark guid in both results tables logging.info('Marking guid as Finished.') data['guid'] = data['guid'].lower() guid_result = {} if data['guid'] and data.get('imdbid'): if Manage.searchresults(data['guid'], 'Finished', movie_info=data): guid_result['update_SEARCHRESULTS'] = True else: guid_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid'], 'Finished', imdbid=data['imdbid']): guid_result['update_MARKEDRESULTS'] = True else: guid_result['update_MARKEDRESULTS'] = False # create result entry for guid result['tasks'][data['guid']] = guid_result # if we have a guid2, do it all again if data.get('guid2') and data.get('imdbid'): logging.info('Marking guid2 as Finished.') guid2_result = {} if Manage.searchresults(data['guid2'], 'Finished', movie_info=data): guid2_result['update_SEARCHRESULTS'] = True else: guid2_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid2'], 'Finished', imdbid=data['imdbid']): guid2_result['update_MARKEDRESULTS'] = True else: guid2_result['update_MARKEDRESULTS'] = False # create result entry for guid2 result['tasks'][data['guid2']] = guid2_result # set movie status and add finished date/score if data.get('imdbid'): if core.sql.row_exists('MOVIES', imdbid=data['imdbid']): data['category'] = core.sql.get_movie_details( 'imdbid', data['imdbid'])['category'] else: logging.info('{} not found in library, adding now.'.format( data.get('title'))) data['status'] = 'Disabled' Manage.add_movie(data) logging.info('Setting MOVIE status.') r = Manage.movie_status(data['imdbid']) db_update = { 'finished_date': result['data']['finished_date'], 'finished_score': result['data'].get('finished_score') } core.sql.update_multiple_values('MOVIES', db_update, 'imdbid', data['imdbid']) else: logging.info( 'Imdbid not supplied or found, unable to update Movie status.') r = '' result['tasks']['update_movie_status'] = r data.update(Metadata.convert_to_db(data)) # mover. sets ['finished_file'] if config['moverenabled']: result['tasks']['mover'] = {'enabled': True} response = self.mover(data) if not response: result['tasks']['mover']['response'] = False else: data['finished_file'] = response result['tasks']['mover']['response'] = True else: logging.info('Mover disabled.') data['finished_file'] = data.get('original_file') result['tasks']['mover'] = {'enabled': False} # renamer if config['renamerenabled']: result['tasks']['renamer'] = {'enabled': True} new_file_name = self.renamer(data) if new_file_name == '': result['tasks']['renamer']['response'] = False else: path = os.path.split(data['finished_file'])[0] data['finished_file'] = os.path.join(path, new_file_name) result['tasks']['renamer']['response'] = True else: logging.info('Renamer disabled.') result['tasks']['renamer'] = {'enabled': False} if data.get('imdbid') and data['imdbid'] is not 'N/A': core.sql.update('MOVIES', 'finished_file', result['data'].get('finished_file'), 'imdbid', data['imdbid']) # Delete leftover dir. Skip if file links are enabled or if mover disabled/failed if config['cleanupenabled']: result['tasks']['cleanup'] = {'enabled': True} if config['movermethod'] in ('copy', 'hardlink', 'symboliclink'): logging.info( 'File copy or linking enabled -- skipping Cleanup.') result['tasks']['cleanup']['response'] = None return result elif os.path.isfile(data['path']): logging.info( 'Download is file, not directory -- skipping Cleanup.') result['tasks']['cleanup']['response'] = None return result # fail if mover disabled or failed if config['moverenabled'] is False or result['tasks']['mover'][ 'response'] is False: logging.info( 'Mover either disabled or failed -- skipping Cleanup.') result['tasks']['cleanup']['response'] = None else: if self.cleanup(data['path']): r = True else: r = False result['tasks']['cleanup']['response'] = r else: result['tasks']['cleanup'] = {'enabled': False} # all done! result['status'] = 'finished' return result
def failed(self, data): ''' Post-process a failed download data (dict): of gathered data from downloader and localdb/tmdb In SEARCHRESULTS marks guid as Bad In MARKEDRESULTS: Creates or updates entry for guid and optional guid2 with status=Bad Updates MOVIES status If Clean Up is enabled will delete path and contents. If Auto Grab is enabled will grab next best release. Returns dict of post-processing results ''' config = core.CONFIG['Postprocessing'] # dict we will json.dump and send back to downloader result = {} result['status'] = 'finished' result['data'] = data result['tasks'] = {} # mark guid in both results tables logging.info('Marking guid as Bad.') guid_result = {'url': data['guid']} if data['guid']: # guid can be empty string if Manage.searchresults(data['guid'], 'Bad'): guid_result['update_SEARCHRESULTS'] = True else: guid_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid'], 'Bad', imdbid=data['imdbid']): guid_result['update_MARKEDRESULTS'] = True else: guid_result['update_MARKEDRESULTS'] = False # create result entry for guid result['tasks']['guid'] = guid_result # if we have a guid2, do it all again if 'guid2' in data.keys(): logging.info('Marking guid2 as Bad.') guid2_result = {'url': data['guid2']} if Manage.searchresults(data['guid2'], 'Bad'): guid2_result['update SEARCHRESULTS'] = True else: guid2_result['update SEARCHRESULTS'] = False if Manage.markedresults( data['guid2'], 'Bad', imdbid=data['imdbid'], ): guid2_result['update_MARKEDRESULTS'] = True else: guid2_result['update_MARKEDRESULTS'] = False # create result entry for guid2 result['tasks']['guid2'] = guid2_result # set movie status if data['imdbid']: logging.info('Setting MOVIE status.') r = Manage.movie_status(data['imdbid']) else: logging.info( 'Imdbid not supplied or found, unable to update Movie status.') r = '' result['tasks']['update_movie_status'] = r # delete failed files if config['cleanupfailed']: result['tasks']['cleanup'] = { 'enabled': True, 'path': data['path'] } logging.info('Deleting leftover files from failed download.') if self.cleanup(data['path']) is True: result['tasks']['cleanup']['response'] = True else: result['tasks']['cleanup']['response'] = False else: result['tasks']['cleanup'] = {'enabled': False} # grab the next best release if core.CONFIG['Search']['autograb']: result['tasks']['autograb'] = {'enabled': True} logging.info('Grabbing the next best release.') if data.get('imdbid') and data.get('quality'): best_release = snatcher.get_best_release(data) if best_release and snatcher.download(best_release): r = True else: r = False else: r = False result['tasks']['autograb']['response'] = r else: result['tasks']['autograb'] = {'enabled': False} # all done! result['status'] = 'finished' return result
def putio_process(self, *args, **transfer_data): ''' Method to handle postprocessing callbacks from Put.io Gets called from Put.IO when download completes via POST request including download metadata as transfer_data kwargs. Sample kwargs: { "apikey": "APIKEY", "percent_done": "100", "peers_getting_from_us": "0", "completion_percent": "0", "seconds_seeding": "0", "current_ratio": "0.00", "created_torrent": "False", "size": "507637", "up_speed": "0", "callback_url": "http://MYDDNS/watcher/postprocessing/putio_process?apikey=APIKEY", "source": "<full magnet uri including trackers>", "peers_connected": "0", "down_speed": "0", "is_private": "False", "id": "45948956", # Download ID "simulated": "True", "type": "TORRENT", "save_parent_id": "536510251", "file_id": "536514172", # Put.io file ID # "download_id": "21596709", "torrent_link": "https://api.put.io/v2/transfers/<transferid>/torrent", "finished_at": "2018-04-09 04:13:58", "status": "COMPLETED", "downloaded": "0", "extract": "False", "name": "<download name>", "status_message": "Completed", "created_at": "2018-04-09 04:13:57", "uploaded": "0", "peers_sending_to_us": "0" } ''' logging.info('########################################') logging.info('PUT.IO Post-processing request received.') logging.info('########################################') conf = core.CONFIG['Downloader']['Torrent']['PutIO'] data = {'downloadid': str(transfer_data['id'])} if transfer_data['source'].startswith('magnet'): data['guid'] = transfer_data['source'].split('btih:')[1].split( '&')[0] else: data['guid'] = None data.update(self.get_movie_info(data)) if conf['downloadwhencomplete']: logging.info('Downloading Put.IO files and processing locally.') download = PutIO.download(transfer_data['file_id']) if not download['response']: logging.error('PutIO processing failed.') return data['path'] = download['path'] data['original_file'] = self.get_movie_file(data['path']) data.update(self.complete(data)) if data['status'] == 'finished' and conf['deleteafterdownload']: data['tasks']['delete_putio'] = PutIO.delete( transfer_data['file_id']) else: logging.info('Marking guid as Finished.') guid_result = {} if data['guid']: if Manage.searchresults(data['guid'], 'Finished'): guid_result['update_SEARCHRESULTS'] = True else: guid_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid'], 'Finished', imdbid=data['imdbid']): guid_result['update_MARKEDRESULTS'] = True else: guid_result['update_MARKEDRESULTS'] = False # create result entry for guid data['tasks'][data['guid']] = guid_result # update MOVIES table if data.get('imdbid'): db_update = { 'finished_file': 'https://app.put.io/files/{}'.format( transfer_data['file_id']), 'status': 'finished' } core.sql.update_multiple_values('MOVIES', db_update, 'imdbid', data['imdbid']) title = data['data'].get('title') year = data['data'].get('year') imdbid = data['data'].get('imdbid') resolution = data['data'].get('resolution') rated = data['data'].get('rated') original_file = data['data'].get('original_file') finished_file = data['data'].get('finished_file') downloadid = data['data'].get('downloadid') finished_date = data['data'].get('finished_date') quality = data['data'].get('quality') plugins.finished(title, year, imdbid, resolution, rated, original_file, finished_file, downloadid, finished_date, quality) logging.info('#################################') logging.info('Post-processing complete.') logging.info(data) logging.info('#################################')