def test_create_search_string_sports(p, create_tvshow, create_tvepisode): series_name = p['series_name'] separator = p['separator'] series_alias = p['series_alias'] add_string = p['add_string'] expected = p['expected'] mock_series = create_tvshow(indexer=1, name=series_name) provider = GenericProvider('mock_provider') provider.series = mock_series provider.search_separator = separator episode = create_tvepisode(mock_series, 1, 12) episode.airdate = date(2018, 1, 10) search_string = { 'Episode': [] } # Create search strings for alias in [series_name] + series_alias: provider._create_sports_search_string(alias, episode, search_string, add_string=add_string) actual = search_string['Episode'] assert expected == actual
def test_create_search_string_anime(p, create_tvshow, create_tvepisode, monkeypatch_function_return): series_name = p['series_name'] separator = p['separator'] series_alias = p['series_alias'] add_string = p['add_string'] expected = p['expected'] monkeypatch_function_return([( 'medusa.scene_exceptions.get_season_scene_exceptions', p['season_scene_name_exceptions'] )]) mock_series = create_tvshow(indexer=1, name=series_name) provider = GenericProvider('mock_provider') provider.series = mock_series provider.search_separator = separator episode = create_tvepisode(mock_series, 1, 12) episode.scene_episode = p['scene_episode'] episode.scene_absolute_number = p['scene_episode_absolute'] episode.scene_season = p['scene_season'] search_string = { 'Episode': [] } # Create search strings for alias in [series_name] + series_alias: provider._create_anime_search_string(alias, episode, search_string, add_string=add_string) actual = search_string['Episode'] assert expected == actual
def test__get_title_and_url(self): """Test _get_title_and_url.""" items_list = [ None, {}, { 'link': None, 'title': None }, { 'link': '', 'title': '' }, { 'link': 'http://www.google.com/&foo=bar%26tr%3Dtest', 'title': 'Some Title' } ] results_list = [('', ''), ('', ''), ('', ''), ('', ''), ('Some.Title', 'http://www.google.com/&foo=bar&tr=test')] unicode_items_list = [{ 'link': u'', 'title': u'' }, { 'link': u'http://www.google.com/&foo=bar%26tr%3Dtest', 'title': u'Some Title' }] unicode_results_list = [('', ''), ('Some.Title', 'http://www.google.com/&foo=bar&tr=test')] self.assertEqual( len(items_list), len(results_list), 'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))) self.assertEqual( len(unicode_items_list), len(unicode_results_list), 'Number of parameters (%d) and results (%d) does not match' % (len(unicode_items_list), len(unicode_results_list))) for (index, item) in enumerate(items_list): self.assertEqual( GenericProvider('Test Provider')._get_title_and_url(item), results_list[index]) for (index, item) in enumerate(unicode_items_list): self.assertEqual( GenericProvider('Test Provider')._get_title_and_url(item), unicode_results_list[index])
def test_search(self): test_cases = { None: [], 123: [], 12.3: [], -123: [], -12.3: [], '': [], '123': [], '12.3': [], '-123': [], '-12.3': [], } unicode_test_cases = { u'': [], u'123': [], u'12.3': [], u'-123': [], u'-12.3': [], } for test in test_cases, unicode_test_cases: for (search_params, result) in iteritems(test): self.assertEqual( GenericProvider('Test Provider').search(search_params), result)
def resource_get_failed(self): """Get data from the failed.db/failed table.""" limit = self.get_argument('limit' '').strip() failed_db_con = db.DBConnection('failed.db') if int(limit): sql_results = failed_db_con.select( 'SELECT ROWID AS id, release, size, provider ' 'FROM failed ' 'LIMIT ?', [limit]) else: sql_results = failed_db_con.select( 'SELECT ROWID AS id, release, size, provider ' 'FROM failed') results = [] for result in sql_results: provider = providers.get_provider_class( GenericProvider.make_id(result['provider'])) results.append({ 'id': result['id'], 'release': result['release'], 'size': result['size'], 'provider': { 'id': provider.get_id(), 'name': provider.name, 'imageName': provider.image_name() } }) return self._ok(data=results)
def test_get_id(self): test_cases = { None: '', 123: '123', 12.3: '12_3', 0: '', -123: '_123', -12.3: '_12_3', '': '', ' ': '', '123': '123', ' 123 ': '123', '12.3': '12_3', ' 12.3 ': '12_3', '0': '0', ' 0 ': '0', '-123': '_123', ' -123 ': '_123', '-12.3': '_12_3', ' -12.3 ': '_12_3', 'abc': 'abc', ' abc ': 'abc', 'ABC': 'abc', ' ABC ': 'abc', '.def': '_def', 'g,hi': 'g_hi', 'jk!l': 'jk_l', 'mno?': 'mno_', '_pqr$': '_pqr_', } unicode_test_cases = { u'': '', u' ': '', u'123': '123', u' 123 ': '123', u'12.3': '12_3', u' 12.3 ': '12_3', u'0': '0', u' 0 ': '0', u'-123': '_123', u' -123 ': '_123', u'-12.3': '_12_3', u' -12.3 ': '_12_3', u'abc': 'abc', u' abc ': 'abc', u'ABC': 'abc', u' ABC ': 'abc', u'.def': '_def', u'g,hi': 'g_hi', u'jk!l': 'jk_l', u'mno?': 'mno_', u'_pqr$': '_pqr_', } for test in test_cases, unicode_test_cases: for (name, result) in iteritems(test): self.assertEqual(GenericProvider(name).get_id(), result)
def test_image_name(self): test_cases = { None: '.png', 123: '123.png', 12.3: '12_3.png', 0: '.png', -123: '_123.png', -12.3: '_12_3.png', '': '.png', ' ': '.png', '123': '123.png', ' 123 ': '123.png', '12.3': '12_3.png', ' 12.3 ': '12_3.png', '0': '0.png', ' 0 ': '0.png', '-123': '_123.png', ' -123 ': '_123.png', '-12.3': '_12_3.png', ' -12.3 ': '_12_3.png', 'abc': 'abc.png', ' abc ': 'abc.png', 'ABC': 'abc.png', ' ABC ': 'abc.png', '.def': '_def.png', 'g,hi': 'g_hi.png', 'jk!l': 'jk_l.png', 'mno?': 'mno_.png', '_pqr$': '_pqr_.png', } unicode_test_cases = { u'': '.png', u' ': '.png', u'123': '123.png', u' 123 ': '123.png', u'12.3': '12_3.png', u' 12.3 ': '12_3.png', u'0': '0.png', u' 0 ': '0.png', u'-123': '_123.png', u' -123 ': '_123.png', u'-12.3': '_12_3.png', u' -12.3 ': '_12_3.png', u'abc': 'abc.png', u' abc ': 'abc.png', u'ABC': 'abc.png', u' ABC ': 'abc.png', u'.def': '_def.png', u'g,hi': 'g_hi.png', u'jk!l': 'jk_l.png', u'mno?': 'mno_.png', u'_pqr$': '_pqr_.png', } for test in test_cases, unicode_test_cases: for (name, result) in iteritems(test): self.assertEqual(GenericProvider(name).image_name(), result)
def test_make_id(self): test_cases = { None: '', 123: '123', 12.3: '12_3', 0: '', -123: '_123', -12.3: '_12_3', '': '', ' ': '', '123': '123', ' 123 ': '123', '12.3': '12_3', ' 12.3 ': '12_3', '0': '0', ' 0 ': '0', '-123': '_123', ' -123 ': '_123', '-12.3': '_12_3', ' -12.3 ': '_12_3', 'abc': 'abc', ' abc ': 'abc', 'ABC': 'abc', ' ABC ': 'abc', '.def': '_def', 'g,hi': 'g_hi', 'jk!l': 'jk_l', 'mno?': 'mno_', '_pqr$': '_pqr_', } unicode_test_cases = { u'': '', u' ': '', u'123': '123', u' 123 ': '123', u'12.3': '12_3', u' 12.3 ': '12_3', u'0': '0', u' 0 ': '0', u'-123': '_123', u' -123 ': '_123', u'-12.3': '_12_3', u' -12.3 ': '_12_3', u'abc': 'abc', u' abc ': 'abc', u'ABC': 'abc', u' ABC ': 'abc', u'.def': '_def', u'g,hi': 'g_hi', u'jk!l': 'jk_l', u'mno?': 'mno_', u'_pqr$': '_pqr_', } for test in test_cases, unicode_test_cases: for (name, result) in iteritems(test): self.assertEqual(GenericProvider.make_id(name), result)
def test_create_search_string_anime(p, create_tvshow, create_tvepisode, monkeypatch_function_return): series_name = p['series_name'] separator = p['separator'] series_alias = p['series_alias'] add_string = p['add_string'] expected = p['expected'] monkeypatch_function_return([( 'medusa.scene_exceptions.get_season_scene_exceptions', p['season_scene_name_exceptions'] )]) mock_series = create_tvshow(indexer=1, name=series_name) mock_series.scene = p['series_scene'] provider = GenericProvider('mock_provider') provider.series = mock_series provider.search_separator = separator episode = create_tvepisode(mock_series, 1, 12) episode.scene_episode = p['scene_episode'] episode.scene_season = p['scene_season'] episode.absolute_number = p['absolute_number'] episode.scene_absolute_number = p['scene_absolute_number'] search_string = { 'Episode': [] } # Create search strings for alias in [series_name] + series_alias: provider._create_anime_search_string(alias, episode, search_string, add_string=add_string) actual = search_string['Episode'] assert expected == actual
def test_download_file(self, df_mock): domain = 'domain' filename = 'TestFilename.nzb' urls = [ 'http://{0}/{1}.torrentNO_DOWNLOAD_NAME'.format(domain, filename), 'http://{0}/{1}.torrent'.format(domain, filename), ] # Test the login() check gp1 = GenericProvider('Test Provider 1') login_mock = MagicMock() login_mock.name = 'result 1' login_mock.return_value = False with patch.object(gp1, 'login', login_mock): self.assertFalse(gp1.download_result(login_mock)) self.assertTrue(login_mock.called) # Test the _make_url call gp2 = GenericProvider('Test Provider 2') make_url_mock = MagicMock() make_url_mock.name = 'result 2' make_url_mock.return_value = (urls, filename) df_mock.return_value = True with patch.object(gp2, '_make_url', make_url_mock): resp = gp2.download_result(make_url_mock) self.assertTrue(resp) self.assertTrue('Referer' in gp2.headers) self.assertTrue(domain in gp2.headers['Referer']) self.assertTrue(df_mock.called) # Test the remove_file_failed path gp3 = GenericProvider('Test Provider 3') make_url_mock = MagicMock() make_url_mock.name = 'result 3' make_url_mock.return_value = (urls, filename) verify_download_mock = MagicMock() verify_download_mock.return_value = False df_mock.return_value = True with patch.object(gp3, '_make_url', make_url_mock): with patch.object(gp3, '_verify_download', verify_download_mock): resp = gp3.download_result(make_url_mock) self.assertFalse(resp)
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider.update({ 'id': GenericProvider.make_id(item['provider']), 'name': item['provider'] }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] yield { 'id': item['rowid'], 'series': SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language }
def create_history_item(history_row, compact=False): """ Create a history object, using the data from a history db row item. Calculate additional data, where needed. :param history_row: a main.db history row. :param compact: A boolean indicating if this is used for a compact layout. :returns: A dict with history information. """ from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider from medusa.tv.series import Series, SeriesIdentifier provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if history_row['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(history_row['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': history_row['provider'], 'imageName': f'{provider_id}.png' }) release_name = history_row['resource'] if history_row['action'] == DOWNLOADED: release_group = history_row['provider'] file_name = history_row['resource'] if history_row['action'] == SUBTITLED: subtitle_language = history_row['resource'] provider['name'] = history_row['provider'] if history_row['client_status'] is not None: status = ClientStatus(status=history_row['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if history_row['indexer_id'] and history_row['showid']: identifier = SeriesIdentifier.from_id(history_row['indexer_id'], history_row['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title history_row['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, history_row['season'], history_row['episode']) return { 'series': show_slug, 'status': history_row['action'], 'statusName': statusStrings.get(history_row['action']), 'actionDate': history_row['date'], 'quality': history_row['quality'], 'resource': basename(history_row['resource']), 'size': history_row['size'], 'properTags': history_row['proper_tags'], 'season': history_row['season'], 'episode': history_row['episode'], 'episodeTitle': history_row['episodeTitle'], 'manuallySearched': bool(history_row['manually_searched']), 'infoHash': history_row['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': history_row['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(history_row['part_of_batch']) }
def _check_torrent_ratio(self, client): """Perform configured action after seed ratio reached (or by configuration).""" if app.TORRENT_SEED_ACTION == '': log.debug( 'No global ratio or provider ratio configured for {client}, skipping actions.', {'client': client.name}) return # The base ClienStatus to include in the query. include = [ ClientStatusEnum.COMPLETED.value | ClientStatusEnum.POSTPROCESSED.value ] from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider for history_result in self._get_history_results_from_db( 'torrent', include_status=include, ): provider_id = GenericProvider.make_id(history_result['provider']) provider = get_provider_class(provider_id) if not provider: log.debug( 'Skip provider {provider} with id: {provider_id}', { 'provider': history_result['provider'], 'provider_id': provider_id }) continue provider_ratio = -1 if provider.ratio == '' else provider.ratio try: desired_ratio = provider_ratio if provider_ratio > -1 else app.TORRENT_SEED_RATIO except TypeError: log.warning( 'could not get provider ratio {ratio} for provider {provider}', { 'ratio': provider_ratio, 'provider': provider_id }) desired_ratio = app.TORRENT_SEED_RATIO if desired_ratio == -1: # Not sure if this option is of use. continue try: status = client.get_status(history_result['info_hash']) except DownloadClientConnectionException as error: log.warning( 'The client cannot be reached or authentication is failing.' '\nAbandon check torrent ratio. error: {error}', {'error': error}) continue if not status: continue action_after_seeding = desired_ratio * 1.0 > 0.0 if status.ratio < desired_ratio * action_after_seeding: continue if not action_after_seeding: log.debug('Action after seeding disabled') log.debug( 'Ratio of ({ratio}) reached for torrent {info_hash}, starting action: {action}.', { 'ratio': status.ratio, 'info_hash': history_result['info_hash'], 'action': app.TORRENT_SEED_ACTION }) hash = history_result['info_hash'] # Perform configured action. if app.TORRENT_SEED_ACTION == 'remove': # Remove torrent from client client.remove_torrent(hash) elif app.TORRENT_SEED_ACTION == 'pause': # Pause torrent on client client.pause_torrent(hash) elif app.TORRENT_SEED_ACTION == 'remove_with_data': # Remove torrent and all files from disk (not implemented for each client!) client.remove_torrent_data(hash) else: log.debug('Invalid action {action}', {'action': app.TORRENT_SEED_ACTION}) continue self.save_status_to_history( history_result, ClientStatus(status_string='SeededAction'))
def test_is_active(self): """Test is active.""" self.assertFalse(GenericProvider('Test Provider').is_active())
def test_is_enabled(self): """Test is enabled.""" self.assertFalse(GenericProvider('Test Provider').is_enabled())
def test_seed_ratio(self): self.assertEqual(GenericProvider('Test Provider').seed_ratio(), '')
def test__check_auth(self): self.assertTrue(GenericProvider('Test Provider')._check_auth())
# coding=utf-8 """Provider test code for Generic Provider.""" from __future__ import unicode_literals from collections import namedtuple from datetime import date, datetime, timedelta from dateutil import tz from medusa.providers.generic_provider import GenericProvider import pytest ExceptionTitle = namedtuple('ExceptionTitle', 'title') sut = GenericProvider('FakeProvider') @pytest.mark.parametrize('p', [ { # p0: None 'pubdate': None, 'expected': None }, { # p1: date and time 'pubdate': '2017-05-18 15:00:15', 'expected': datetime(2017, 5, 18, 15, 0, 15, tzinfo=tz.gettz('UTC')) }, { # p2: date, time and timezone 'pubdate': '2017-05-16 17:12:25+02:00', 'expected': datetime(2017, 5, 16, 17, 12, 25, tzinfo=tz.tzoffset(None, 7200)) }, { # p3: human time and minutes
def create(name='AwesomeProvider', **kwargs): provider = GenericProvider(name=name) for attr, value in iteritems(kwargs): setattr(provider, attr, value) return provider
def data_generator_compact(): """ Read and paginate history records. Results are provided grouped per showid+season+episode. The results are flattened into a structure of [{'actionDate': .., 'showSlug':.., 'rows':Array(history_items)},] """ start = arg_limit * (arg_page - 1) for compact_item in list(results.values())[start:start + arg_limit]: return_item = {'rows': []} for item in compact_item: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] item['showSlug'] = None item['showTitle'] = 'Missing Show' if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) item['showSlug'] = identifier.slug show = Series.find_by_identifier(identifier) if show: item['showTitle'] = show.title return_item['actionDate'] = item['date'] return_item['showSlug'] = item['showslug'] return_item[ 'episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( item['showTitle'], item['season'], item['episode']) return_item['quality'] = item['quality'] return_item['rows'].append({ 'actionDate': item['date'], 'id': item['rowid'], 'series': item['showSlug'], 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': item['showslug'], 'showTitle': item['showTitle'] }) yield return_item
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] if item['client_status'] is not None: status = ClientStatus(status=item['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, item['season'], item['episode']) yield { 'id': item['rowid'], 'series': show_slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'episodeTitle': item['episodeTitle'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': item['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(item['part_of_batch']) }
def test_login(self): self.assertTrue(GenericProvider('Test Provider').login())
def test__get_size(self): self.assertEqual(GenericProvider('Test Provider')._get_size(None), -1)
def test__verify_download(self): """Test _verify_download.""" self.assertTrue(GenericProvider('Test Provider')._verify_download())
def test__get_storage_dir(self): self.assertEqual( GenericProvider('Test Provider')._get_storage_dir(), '')