def resource_get_failed(self): """Get data from the failed.db/failed table.""" limit = self.get_argument('limit' '').strip() failed_db_con = db.DBConnection('failed.db') if int(limit): sql_results = failed_db_con.select( 'SELECT ROWID AS id, release, size, provider ' 'FROM failed ' 'LIMIT ?', [limit]) else: sql_results = failed_db_con.select( 'SELECT ROWID AS id, release, size, provider ' 'FROM failed') results = [] for result in sql_results: provider = providers.get_provider_class( GenericProvider.make_id(result['provider'])) results.append({ 'id': result['id'], 'release': result['release'], 'size': result['size'], 'provider': { 'id': provider.get_id(), 'name': provider.name, 'imageName': provider.image_name() } }) return self._ok(data=results)
def _test_provider(self, data): """Test provider on returning results.""" provider_id = data.get('providerId') provider = providers.get_provider_class(provider_id) if not provider: return self._not_found( f'Could not locate provider by id {provider_id}') result = provider.search(dict(RSS=[''])) if result and len(result): return self._created( f'{provider.name} returned {len(result)} results') return self._not_found('No results found')
def patch(self, identifier, **kwargs): """Patch provider config.""" data = json_decode(self.request.body) if not identifier: return self._bad_request( 'You should provide the provider you want to patch') provider = get_provider_class(identifier) if not provider: return self._bad_request('Could not locate provider by id') self._set_common_settings(provider, data) if isinstance(provider, TorrentProvider): self._set_torrent_settings(provider, data) app.instance.save_config() return self._ok()
def create_history_item(history_row, compact=False): """ Create a history object, using the data from a history db row item. Calculate additional data, where needed. :param history_row: a main.db history row. :param compact: A boolean indicating if this is used for a compact layout. :returns: A dict with history information. """ from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider from medusa.tv.series import Series, SeriesIdentifier provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if history_row['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(history_row['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': history_row['provider'], 'imageName': f'{provider_id}.png' }) release_name = history_row['resource'] if history_row['action'] == DOWNLOADED: release_group = history_row['provider'] file_name = history_row['resource'] if history_row['action'] == SUBTITLED: subtitle_language = history_row['resource'] provider['name'] = history_row['provider'] if history_row['client_status'] is not None: status = ClientStatus(status=history_row['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if history_row['indexer_id'] and history_row['showid']: identifier = SeriesIdentifier.from_id(history_row['indexer_id'], history_row['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title history_row['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, history_row['season'], history_row['episode']) return { 'series': show_slug, 'status': history_row['action'], 'statusName': statusStrings.get(history_row['action']), 'actionDate': history_row['date'], 'quality': history_row['quality'], 'resource': basename(history_row['resource']), 'size': history_row['size'], 'properTags': history_row['proper_tags'], 'season': history_row['season'], 'episode': history_row['episode'], 'episodeTitle': history_row['episodeTitle'], 'manuallySearched': bool(history_row['manually_searched']), 'infoHash': history_row['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': history_row['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(history_row['part_of_batch']) }
def _check_torrent_ratio(self, client): """Perform configured action after seed ratio reached (or by configuration).""" if app.TORRENT_SEED_ACTION == '': log.debug( 'No global ratio or provider ratio configured for {client}, skipping actions.', {'client': client.name}) return # The base ClienStatus to include in the query. include = [ ClientStatusEnum.COMPLETED.value | ClientStatusEnum.POSTPROCESSED.value ] from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider for history_result in self._get_history_results_from_db( 'torrent', include_status=include, ): provider_id = GenericProvider.make_id(history_result['provider']) provider = get_provider_class(provider_id) if not provider: log.debug( 'Skip provider {provider} with id: {provider_id}', { 'provider': history_result['provider'], 'provider_id': provider_id }) continue provider_ratio = -1 if provider.ratio == '' else provider.ratio try: desired_ratio = provider_ratio if provider_ratio > -1 else app.TORRENT_SEED_RATIO except TypeError: log.warning( 'could not get provider ratio {ratio} for provider {provider}', { 'ratio': provider_ratio, 'provider': provider_id }) desired_ratio = app.TORRENT_SEED_RATIO if desired_ratio == -1: # Not sure if this option is of use. continue try: status = client.get_status(history_result['info_hash']) except DownloadClientConnectionException as error: log.warning( 'The client cannot be reached or authentication is failing.' '\nAbandon check torrent ratio. error: {error}', {'error': error}) continue if not status: continue action_after_seeding = desired_ratio * 1.0 > 0.0 if status.ratio < desired_ratio * action_after_seeding: continue if not action_after_seeding: log.debug('Action after seeding disabled') log.debug( 'Ratio of ({ratio}) reached for torrent {info_hash}, starting action: {action}.', { 'ratio': status.ratio, 'info_hash': history_result['info_hash'], 'action': app.TORRENT_SEED_ACTION }) hash = history_result['info_hash'] # Perform configured action. if app.TORRENT_SEED_ACTION == 'remove': # Remove torrent from client client.remove_torrent(hash) elif app.TORRENT_SEED_ACTION == 'pause': # Pause torrent on client client.pause_torrent(hash) elif app.TORRENT_SEED_ACTION == 'remove_with_data': # Remove torrent and all files from disk (not implemented for each client!) client.remove_torrent_data(hash) else: log.debug('Invalid action {action}', {'action': app.TORRENT_SEED_ACTION}) continue self.save_status_to_history( history_result, ClientStatus(status_string='SeededAction'))
def get(self, identifier, path_param=None): """ Query provider information. Return a list of provider id's. :param identifier: provider id. E.g.: myawesomeprovider :param path_param: """ show_slug = self._parse(self.get_argument('showslug', default=None), str) season = self._parse(self.get_argument('season', default=None), str) episode = self._parse(self.get_argument('episode', default=None), str) if not identifier: # return a list of provider id's provider_list = providers.sorted_provider_list() return self._ok([provider.to_json() for provider in provider_list]) provider = providers.get_provider_class(identifier) if not provider: return self._not_found('Provider not found') if not path_param == 'results': return self._ok(provider.to_json()) provider_results = provider.cache.get_results(show_slug=show_slug, season=season, episode=episode) arg_page = self._get_page() arg_limit = self._get_limit(default=50) def data_generator(): """Read log lines based on the specified criteria.""" start = arg_limit * (arg_page - 1) + 1 for item in provider_results[start - 1:start - 1 + arg_limit]: episodes = [int(ep) for ep in item['episodes'].strip('|').split('|') if ep != ''] yield { 'identifier': item['identifier'], 'release': item['name'], 'season': item['season'], 'episodes': episodes, # For now if episodes is 0 or (multiepisode) mark as season pack. 'seasonPack': len(episodes) == 0 or len(episodes) > 1, 'indexer': item['indexer'], 'seriesId': item['indexerid'], 'showSlug': show_slug, 'url': item['url'], 'time': datetime.fromtimestamp(item['time']), 'quality': item['quality'], 'releaseGroup': item['release_group'], 'dateAdded': datetime.fromtimestamp(item['date_added']), 'version': item['version'], 'seeders': item['seeders'], 'size': item['size'], 'leechers': item['leechers'], 'pubdate': parser.parse(item['pubdate']).replace(microsecond=0) if item['pubdate'] else None, 'provider': { 'id': provider.get_id(), 'name': provider.name, 'imageName': provider.image_name() } } if not len(provider_results): return self._not_found('Provider cache results not found') return self._paginate(data_generator=data_generator)
def pickManualSearch(self, provider=None, identifier=None): """ Tries to Perform the snatch for a manualSelected episode, episodes or season pack. @param provider: The provider id, passed as usenet_crawler and not the provider name (Usenet-Crawler) @param identifier: The provider's cache table's identifier (unique). @return: A json with a {'success': true} or false. """ # Try to retrieve the cached result from the providers cache table. provider_obj = providers.get_provider_class(provider) try: cached_result = Cache(provider_obj).load_from_row(identifier) except Exception as msg: error_message = "Couldn't read cached results. Error: {error}".format( error=msg) logger.log(error_message) return self._genericMessage('Error', error_message) if not cached_result or not all([ cached_result['url'], cached_result['quality'], cached_result['name'], cached_result['indexer'], cached_result['indexerid'], cached_result['season'] is not None, provider ]): return self._genericMessage( 'Error', "Cached result doesn't have all needed info to snatch episode") try: series_obj = Show.find_by_id(app.showList, cached_result['indexer'], cached_result['indexerid']) except (ValueError, TypeError): return self._genericMessage( 'Error', 'Invalid show ID: {0}'.format(cached_result['indexerid'])) if not series_obj: return self._genericMessage( 'Error', 'Could not find a show with id {0} in the list of shows, ' 'did you remove the show?'.format(cached_result['indexerid'])) search_result = provider_obj.get_result(series=series_obj, cache=cached_result) search_result.search_type = SearchType.MANUAL_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem(search_result.series, search_result.episodes, search_result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item(snatch_queue_item) while snatch_queue_item.success is not False: if snatch_queue_item.started and snatch_queue_item.success: # If the snatch was successfull we'll need to update the original searched segment, # with the new status: SNATCHED (2) update_finished_search_queue_item(snatch_queue_item) return json.dumps({ 'result': 'success', }) time.sleep(1) return json.dumps({ 'result': 'failure', })
def run(self): """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True result = providers.get_provider_class(self.provider).get_result(self.segment) result.series = self.show result.url = self.cached_result['url'] result.quality = int(self.cached_result['quality']) result.name = self.cached_result['name'] result.size = int(self.cached_result['size']) result.seeders = int(self.cached_result['seeders']) result.leechers = int(self.cached_result['leechers']) result.release_group = self.cached_result['release_group'] result.version = int(self.cached_result['version']) result.proper_tags = self.cached_result['proper_tags'].split('|') \ if self.cached_result['proper_tags'] else '' result.manually_searched = True try: log.info('Beginning to manual snatch release: {name}', {'name': result.name}) if result: if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' ' and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) self.success = snatch_episode(result) else: log.info('Unable to snatch release: {name}', {'name': result.name}) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False log.exception('Manual snatch failed! For result: {name}', {'name': result.name}) ui.notifications.message('Error while snatching selected result', 'Unable to snatch the result for <i>{name}</i>'.format(name=result.name)) if self.success is None: self.success = False self.finish()
def data_generator_compact(): """ Read and paginate history records. Results are provided grouped per showid+season+episode. The results are flattened into a structure of [{'actionDate': .., 'showSlug':.., 'rows':Array(history_items)},] """ start = arg_limit * (arg_page - 1) for compact_item in list(results.values())[start:start + arg_limit]: return_item = {'rows': []} for item in compact_item: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] item['showSlug'] = None item['showTitle'] = 'Missing Show' if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) item['showSlug'] = identifier.slug show = Series.find_by_identifier(identifier) if show: item['showTitle'] = show.title return_item['actionDate'] = item['date'] return_item['showSlug'] = item['showslug'] return_item[ 'episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( item['showTitle'], item['season'], item['episode']) return_item['quality'] = item['quality'] return_item['rows'].append({ 'actionDate': item['date'], 'id': item['rowid'], 'series': item['showSlug'], 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': item['showslug'], 'showTitle': item['showTitle'] }) yield return_item
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] if item['client_status'] is not None: status = ClientStatus(status=item['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, item['season'], item['episode']) yield { 'id': item['rowid'], 'series': show_slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'episodeTitle': item['episodeTitle'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': item['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(item['part_of_batch']) }