def putio_process(self, *args, **transfer_data): ''' Method to handle postprocessing callbacks from Put.io Gets called from Put.IO when download completes via POST request including download metadata as transfer_data kwargs. Sample kwargs: { "apikey": "APIKEY", "percent_done": "100", "peers_getting_from_us": "0", "completion_percent": "0", "seconds_seeding": "0", "current_ratio": "0.00", "created_torrent": "False", "size": "507637", "up_speed": "0", "callback_url": "http://MYDDNS/watcher/postprocessing/putio_process?apikey=APIKEY", "source": "<full magnet uri including trackers>", "peers_connected": "0", "down_speed": "0", "is_private": "False", "id": "45948956", # Download ID "simulated": "True", "type": "TORRENT", "save_parent_id": "536510251", "file_id": "536514172", # Put.io file ID # "download_id": "21596709", "torrent_link": "https://api.put.io/v2/transfers/<transferid>/torrent", "finished_at": "2018-04-09 04:13:58", "status": "COMPLETED", "downloaded": "0", "extract": "False", "name": "<download name>", "status_message": "Completed", "created_at": "2018-04-09 04:13:57", "uploaded": "0", "peers_sending_to_us": "0" } ''' logging.info('########################################') logging.info('PUT.IO Post-processing request received.') logging.info('########################################') conf = core.CONFIG['Downloader']['Torrent']['PutIO'] data = {'downloadid': str(transfer_data['id'])} if transfer_data['source'].startswith('magnet'): data['guid'] = transfer_data['source'].split('btih:')[1].split( '&')[0] else: data['guid'] = None data.update(self.get_movie_info(data)) if conf['downloadwhencomplete']: logging.info('Downloading Put.IO files and processing locally.') download = PutIO.download(transfer_data['file_id']) if not download['response']: logging.error('PutIO processing failed.') return data['path'] = download['path'] data['original_file'] = self.get_movie_file(data['path']) data.update(self.complete(data)) if data['status'] == 'finished' and conf['deleteafterdownload']: data['tasks']['delete_putio'] = PutIO.delete( transfer_data['file_id']) else: logging.info('Marking guid as Finished.') guid_result = {} if data['guid']: if Manage.searchresults(data['guid'], 'Finished'): guid_result['update_SEARCHRESULTS'] = True else: guid_result['update_SEARCHRESULTS'] = False if Manage.markedresults(data['guid'], 'Finished', imdbid=data['imdbid']): guid_result['update_MARKEDRESULTS'] = True else: guid_result['update_MARKEDRESULTS'] = False # create result entry for guid data['tasks'][data['guid']] = guid_result # update MOVIES table if data.get('imdbid'): db_update = { 'finished_file': 'https://app.put.io/files/{}'.format( transfer_data['file_id']), 'status': 'finished' } core.sql.update_multiple_values('MOVIES', db_update, 'imdbid', data['imdbid']) title = data['data'].get('title') year = data['data'].get('year') imdbid = data['data'].get('imdbid') resolution = data['data'].get('resolution') rated = data['data'].get('rated') original_file = data['data'].get('original_file') finished_file = data['data'].get('finished_file') downloadid = data['data'].get('downloadid') finished_date = data['data'].get('finished_date') quality = data['data'].get('quality') plugins.finished(title, year, imdbid, resolution, rated, original_file, finished_file, downloadid, finished_date, quality) logging.info('#################################') logging.info('Post-processing complete.') logging.info(data) logging.info('#################################')
def default(self, **data): ''' Handles post-processing requests. **data: keyword params send through POST request payload Required kw params: apikey (str): Watcher api key mode (str): post-processing mode (complete, failed) guid (str): download link of file. Can be url or magnet link. path (str): absolute path to downloaded files. Can be single file or dir Optional kw params: imdbid (str): imdb identification number (tt123456) downloadid (str): id number from downloader While processing many variables are produced to track files through renaming, moving, etc Perhaps the most important name is data['finished_file'], which is the current name/location of the file being processed. This is updated when renamed, moved, etc. Returns dict of post-processing tasks and data ''' logging.info('#################################') logging.info('Post-processing request received.') logging.info('#################################') # check for required keys for key in ('apikey', 'mode', 'guid', 'path'): if key not in data: logging.warning('Missing key {}'.format(key)) return { 'response': False, 'error': 'missing key: {}'.format(key) } # check if api key is correct if data['apikey'] != core.CONFIG['Server']['apikey']: logging.warning('Incorrect API key.'.format(key)) return {'response': False, 'error': 'incorrect api key'} # check if mode is valid if data['mode'] not in ('failed', 'complete'): logging.warning('Invalid mode value: {}.'.format(data['mode'])) return {'response': False, 'error': 'invalid mode value'} logging.debug(data) # modify path based on remote mapping data['path'] = self.map_remote(data['path']) # get the actual movie file name data['original_file'] = self.get_movie_file( data['path'], check_size=False if data['mode'] == 'failed' else True) data['parent_dir'] = os.path.basename( os.path.dirname( data['original_file'])) if data.get('original_file') else '' if not data['original_file']: logging.warning('Movie file not found') data['mode'] = 'failed' # Get possible local data or get TMDB data to merge with self.params. logging.info('Gathering release information.') data.update(self.get_movie_info(data)) # At this point we have all of the information we're going to get. if data['mode'] == 'failed': logging.warning('Post-processing as Failed.') response = self.failed(data) elif data['mode'] == 'complete': logging.info('Post-processing as Complete.') if 'task' not in data: directory = core.CONFIG['Postprocessing']['Scanner'][ 'directory'] if data['path'] == directory: core.sql.save_postprocessed_path(data['original_file']) else: core.sql.save_postprocessed_path(data['path']) response = self.complete(data) response['data'].pop('backlog', '') response['data'].pop('predb', '') response['data'].pop('source', '') title = response['data'].get('title') year = response['data'].get('year') imdbid = response['data'].get('imdbid') resolution = response['data'].get('resolution') rated = response['data'].get('rated') original_file = response['data'].get('original_file') finished_file = response['data'].get('finished_file') downloadid = response['data'].get('downloadid') finished_date = response['data'].get('finished_date') quality = response['data'].get('quality') plugins.finished(title, year, imdbid, resolution, rated, original_file, finished_file, downloadid, finished_date, quality) else: logging.warning('Invalid mode value: {}.'.format(data['mode'])) return {'response': False, 'error': 'invalid mode value'} logging.info('#################################') logging.info('Post-processing complete.') logging.info(json.dumps(response, indent=2, sort_keys=True)) logging.info('#################################') return response