def api_search(args): search_request = SearchRequest(category=args["cat"], offset=args["offset"], limit=args["limit"], query=args["q"]) if args["t"] == "search": search_request.type = "general" logger.info("") elif args["t"] == "tvsearch": search_request.type = "tv" identifier_key = "rid" if args["rid"] else "tvdbid" if args["tvdbid"] else None if identifier_key is not None: identifier_value = args[identifier_key] search_request.identifier_key = identifier_key search_request.identifier_value = identifier_value search_request.season = int(args["season"]) if args["season"] else None search_request.episode = int(args["episode"]) if args["episode"] else None elif args["t"] == "movie": search_request.type = "movie" search_request.identifier_key = "imdbid" if args["imdbid"] is not None else None search_request.identifier_value = args["imdbid"] if args["imdbid"] is not None else None logger.info("API search request: %s" % search_request) result = search.search(False, search_request) results = process_for_external_api(result) content = render_search_results_for_api(results, result["total"], result["offset"]) response = make_response(content) response.headers["Content-Type"] = "application/xml" return content
def testGetEbookUrls(self): searchRequest = SearchRequest(query="novel") urls = self.n1.get_ebook_urls(searchRequest) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020,8010&limit=100&t=search&extended=1&offset=0&q=novel", urls[0]) self.args = SearchRequest(author="anauthor", title="atitle", category=getCategoryByAnyInput(7020)) queries = self.n1.get_ebook_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020&extended=1&limit=100&offset=0&q=anauthor+atitle&t=search", queries[0]) self.newznab1.searchTypes = ["book"] self.n1 = NewzNab(self.newznab1) self.args = SearchRequest(author="anauthor", title="atitle", category=getCategoryByAnyInput(7020)) queries = self.n1.get_ebook_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&author=anauthor&cat=7020&extended=1&limit=100&offset=0&t=book&title=atitle", queries[0])
def testGetShowSearchUrls(self): self.args = SearchRequest(query="aquery") urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=19,20,21", urls[0]) self.args = SearchRequest(query="aquery", category="TV HD") urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0]) self.args = SearchRequest(query="aquery", season=1) urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01&catid=19,20,21", urls[0]) self.args = SearchRequest(query="aquery", season=1, episode=2) urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01e02&catid=19,20,21", urls[0]) self.args = SearchRequest() urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=19,20,21", urls[0])
def testUrlGeneration(self): w = NzbIndex(getIndexerSettingByName("nzbindex")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) print(urls[0]) self.assertEqual('a showtitle s01e02 | 1x02', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 | "season 1"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season="2016", episode="08/08") urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"]) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual( "https://nzbindex.com/search?max=100&hidecross=1&more=1&q=aquery+-ignorethis", urls[0])
def testIgnoreCategories(self): sm = SearchModule(None) cat = categories.getCategoryByName("movies") sr = SearchRequest(category=Bunch({"category": cat})) nsr = NzbSearchResult(pubdate_utc="", category=cat) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) cat.ignoreResults = "always" accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("always" in reason) cat.ignoreResults = "internal" sr = SearchRequest(internal=True, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("internal" in reason) cat.ignoreResults = "external" sr = SearchRequest(internal=True, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) cat.ignoreResults = "internal" sr = SearchRequest(internal=False, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) cat.ignoreResults = "external" sr = SearchRequest(internal=False, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("API" in reason)
def testDuplicateRemovalForExternalApi(self): config.settings.searching.removeDuplicatesExternal = True with self.app.test_request_context('/'): with responses.RequestsMock( assert_all_requests_are_fired=False) as rsps: newznabItems = [[ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(0000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(1000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(3000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab3") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(2000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab4") ]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Test that the newest result is chosen if all scores are equal searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab3", results[0].indexer) # Test that results from an indexer with a higher score are preferred self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) getIndexerSettingByName("newznab2").score = 99 searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab2", results[0].indexer)
def testSearchCategoryWords(self): pi = search.pick_indexers sahd = search.search_and_handle_db # Set forbidden and required category words if configured and the category matches config.settings.categories.categories["movies"].forbiddenWords = "forbidden1, forbidden2" config.settings.categories.categories["movies"].requiredWords = "required1, required2" searchRequest = SearchRequest(type="search", category="movies") search.pick_indexers = MagicMock(return_value=[NewzNab(self.newznab1)]) search.search_and_handle_db = MagicMock(return_value={"results": {}}) search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(["forbidden1", "forbidden2"], updatedSearchRequest.forbiddenWords) self.assertEqual(["required1", "required2"], updatedSearchRequest.requiredWords) # Don't set if category doesn't match searchRequest = SearchRequest(type="search", category="audio") search.search_and_handle_db.reset_mock() search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(0, len(updatedSearchRequest.forbiddenWords)) self.assertEqual(0, len(updatedSearchRequest.requiredWords)) # Don't set when fallback to "all" category searchRequest = SearchRequest(type="search", category="7890") search.search_and_handle_db.reset_mock() search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(0, len(updatedSearchRequest.forbiddenWords)) self.assertEqual(0, len(updatedSearchRequest.requiredWords)) self.assertEqual("na", updatedSearchRequest.category.category.name) # Use globally configured words and category words config.settings.searching.forbiddenWords = "globalforbidden1, globalforbidden2" config.settings.searching.requiredWords = "globalrequired1, globalrequired2" config.settings.categories.categories["movies"].forbiddenWords = "forbidden1, forbidden2" config.settings.categories.categories["movies"].requiredWords = "required1, required2" searchRequest = SearchRequest(type="search", category="movies") search.search_and_handle_db.reset_mock() search.pick_indexers = MagicMock(return_value=[NewzNab(self.newznab1)]) search.search_and_handle_db = MagicMock(return_value={"results": {}}) search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(["globalforbidden1", "globalforbidden2", "forbidden1", "forbidden2"], updatedSearchRequest.forbiddenWords) self.assertEqual(["globalrequired1", "globalrequired2", "required1", "required2"], updatedSearchRequest.requiredWords) search.pick_indexers = pi search.search_and_handle_db = sahd
def testGetMovieSearchUrls(self): self.args = SearchRequest(identifier_key="imdb", identifier_value="0169547") urls = self.omgwtf.get_moviesearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=tt0169547&catid=15,16,17,18", urls[0]) self.args = SearchRequest(identifier_key="tmdb", identifier_value="14", category="Movies HD") urls = self.omgwtf.get_moviesearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=tt0169547&catid=16", urls[0])
def internalapi_tvsearch(args): logger.debug("TV search request with args %s" % args) search_request = SearchRequest(type="tv", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], episode=args["episode"], season=args["season"], title=args["title"], indexers=args["indexers"]) if args["tvdbid"]: search_request.identifier_key = "tvdbid" search_request.identifier_value = args["tvdbid"] elif args["rid"]: search_request.identifier_key = "rid" search_request.identifier_value = args["rid"] results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)
def testUrlGeneration(self): w = NzbIndex(config.settings.indexers.nzbindex) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) print(urls[0]) self.assertEqual('a showtitle s01e02 | 1x02', furl(urls[0]).args["q"]) self.args= SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 | "season 1"', furl(urls[0]).args["q"])
def internalapi_tvsearch(args): logger.debug("TV search request with args %s" % args) indexers = urllib.unquote(args["indexers"]) if args["indexers"] is not None else None search_request = SearchRequest(type="tv", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], episode=args["episode"], season=args["season"], title=args["title"], indexers=indexers) if args["tvdbid"]: search_request.identifier_key = "tvdbid" search_request.identifier_value = args["tvdbid"] elif args["rid"]: search_request.identifier_key = "rid" search_request.identifier_value = args["rid"] return startSearch(search_request)
def testUrlGeneration(self): w = Binsearch(config.settings.indexers.binsearch) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01e02', furl(urls[0]).args["q"]) self.assertEqual('a showtitle 1x02', furl(urls[1]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01', furl(urls[0]).args["q"]) self.assertEqual('a showtitle "season 1"', furl(urls[1]).args["q"])
def testGetShowSearchUrls(self): self.newznab1.search_ids = ["tvdbid", "rid"] self.args = SearchRequest(identifier_value="47566", identifier_key="rid") urls = self.n1.get_showsearch_urls(self.args) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&rid=47566", urls[0]) self.args = SearchRequest(identifier_value="299350", identifier_key="tvdbid") urls = self.n1.get_showsearch_urls(self.args) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&tvdbid=299350", urls[0])
def testEbookUrlGeneration(self): getIndexerSettingByName("binsearch").searchTypes = [] w = Binsearch(getIndexerSettingByName("binsearch")) self.args = SearchRequest(query="anauthor atitle") urls = w.get_ebook_urls(self.args) self.assertEqual(4, len(urls)) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+ebook", urls[0]) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+mobi", urls[1]) self.args = SearchRequest(author="anauthor", title="atitle") urls = w.get_ebook_urls(self.args) self.assertEqual(4, len(urls)) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+ebook", urls[0]) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+mobi", urls[1])
def internalapi_moviesearch(args): logger.debug("Movie search request with args %s" % args) search_request = SearchRequest(type="movie", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], indexers=args["indexers"]) if args["imdbid"]: search_request.identifier_key = "imdbid" search_request.identifier_value = args["imdbid"] elif args["tmdbid"]: logger.debug("Need to get IMDB id from TMDB id %s" % args["tmdbid"]) imdbid = infos.get_imdbid_from_tmdbid(args["tmdbid"]) search_request.identifier_key = "imdbid" search_request.identifier_value = imdbid results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)
def testProcess_results_totalknown(self): w = Binsearch(getIndexerSettingByName("binsearch")) with open("mock/binsearch--q-testtitle3results.html", encoding="latin-1") as f: body = f.read() result = w.process_query_result(body, SearchRequest()) self.assertFalse(result.has_more) self.assertEqual(3, result.total)
def testProcess_results(self): w = NzbIndex(getIndexerSettingByName("nzbindex")) with open("mock/nzbindex--q-testtitle.html") as f: processing_result = w.process_query_result(f.read(), SearchRequest()) entries = processing_result.entries self.assertEqual('114143855', entries[0].indexerguid) self.assertEqual('testtitle1', entries[0].title) self.assertFalse(entries[0].has_nfo) self.assertEqual('[email protected] (senior)', entries[0].poster) self.assertEqual('alt.binaries.mom', entries[0].group) self.assertEqual( "https://nzbindex.com/download/114143855/testtitle1-testtitle1.nzb", entries[0].link) self.assertEqual(169103851, entries[0].size) self.assertEqual("2014-11-04T10:39:00+01:00", entries[0].pubdate_utc ) # would be perfect, that is the exact pubdate self.assertEqual("Tue, 04 Nov 2014 10:39:00 +0100", entries[0].pubDate) self.assertEqual(1415093940, entries[0].epoch) self.assertEqual(333, entries[0].age_days) self.assertEqual( "https://nzbindex.com/release/114143855/testtitle1-testtitle1.nzb", entries[0].details_link) self.assertTrue(entries[0].passworded) self.assertEqual(0, entries[1].age_days) self.assertEqual("2015-10-03T20:15:00+01:00", entries[1].pubdate_utc ) # would be perfect, that is the exact pubdate self.assertEqual(1443899700, entries[1].epoch) self.assertEqual(1000, processing_result.total) self.assertTrue(processing_result.has_more)
def testParseSearchResult(self): # nzbsorg with open("mock/indexercom_q_testtitle_3results.xml") as f: self.n1.parseXml(f.read()) entries = self.n1.process_query_result(f.read(), SearchRequest()).entries self.assertEqual(3, len(entries)) self.assertEqual(entries[0].title, "testtitle1") assert entries[0].size == 2893890900 assert entries[0].indexerguid == "eff551fbdb69d6777d5030c209ee5d4b" self.assertEqual(entries[0].age_days, 1) self.assertEqual(entries[0].epoch, 1444584857) self.assertEqual(entries[0].pubdate_utc, "2015-10-11T17:34:17+00:00") self.assertEqual(entries[0].poster, "*****@*****.**") self.assertEqual(entries[0].group, "alt.binaries.mom") self.assertEqual(entries[0].details_link, "https://indexer.com/details/eff551fbdb69d6777d5030c209ee5d4b") # Pull group from description self.assertEqual(entries[1].group, "alt.binaries.hdtv.x264") # Use "usenetdate" attribute if available self.assertEqual(entries[1].pubdate_utc, "2015-10-03T22:22:22+00:00") # Sat, 03 Oct 2015 22:22:22 +0000 # Use "info" attribute if available self.assertEqual(entries[0].details_link, "https://indexer.com/details/eff551fbdb69d6777d5030c209ee5d4b") # Don't use "not available" as group self.assertIsNone(entries[2].group) self.assertEqual("English testtitle2", entries[1].title) self.assertEqual("testtitle3", entries[2].title)
def testGetEbookUrls(self): searchRequest = SearchRequest(query="novel") urls = self.n1.get_ebook_urls(searchRequest) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020,8010&limit=100&t=search&extended=1&offset=0&q=novel", urls[0])
def testProcess_results(self): w = Binsearch(config.settings.indexers.binsearch) with open("mock/binsearch--q-testtitle.html", encoding="latin-1") as f: body = f.read() result = w.process_query_result(body, SearchRequest()) entries = list(result.entries) self.assertEqual( 'testtitle1.TrueFrench.1080p.X264.AC3.5.1-JKF.mkv', entries[0].title) self.assertEqual( "https://www.binsearch.info/fcgi/nzb.fcgi?q=176073735", entries[0].link) self.assertEqual(13110387671, entries[0].size) self.assertEqual("176073735", entries[0].indexerguid) self.assertEqual(1443312000, entries[0].epoch) self.assertEqual("2015-09-27T00:00:00+00:00", entries[0].pubdate_utc) self.assertEqual("Sun, 27 Sep 2015 00:00:00 -0000", entries[0].pubDate) self.assertEqual(3, entries[0].age_days) self.assertFalse(entries[0].age_precise) self.assertEqual("[email protected] (Clown_nez)", entries[0].poster) self.assertEqual("alt.binaries.movies.mkv", entries[0].group) self.assertUrlEqual( "https://binsearch.info/?b=testtitle1.3D.TOPBOT.TrueFrench.1080p.X264.A&g=alt.binaries.movies.mkv&p=Ramer%40marmer.com+%28Clown_nez%29&max=250", entries[0].details_link) self.assertTrue(result.has_more) self.assertFalse(result.total_known)
def internalapi_moviesearch(args): logger.debug("Movie search request with args %s" % args) indexers = urllib.unquote(args["indexers"]) if args["indexers"] is not None else None search_request = SearchRequest(type="movie", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], indexers=indexers) if args["imdbid"]: search_request.identifier_key = "imdbid" search_request.identifier_value = args["imdbid"] elif args["tmdbid"]: logger.debug("Need to get IMDB id from TMDB id %s" % args["tmdbid"]) imdbid = infos.convertId("tmdb", "imdb", args["tmdbid"]) if imdbid is None: raise AttributeError("Unable to convert TMDB id %s" % args["tmdbid"]) search_request.identifier_key = "imdbid" search_request.identifier_value = imdbid return startSearch(search_request)
def testGetTvRssUrls(self): searchRequest = SearchRequest(type="tv") urls = self.womble.get_showsearch_urls(searchRequest) self.assertEqual(1, len(urls)) self.assertEqual("https://newshost.co.za/rss?fr=false", urls[0]) searchRequest.category = "TV" urls = self.womble.get_showsearch_urls(searchRequest) self.assertEqual(4, len(urls)) searchRequest.category = "TV HD" urls = self.womble.get_showsearch_urls(searchRequest) self.assertEqual(2, len(urls)) searchRequest.category = "TV SD" urls = self.womble.get_showsearch_urls(searchRequest) self.assertEqual(2, len(urls))
def testLimits(self): sm = SearchModule(None) cat = categories.getCategoryByName("movies") nsr = NzbSearchResult(pubdate_utc="", size=90 * 1024 * 1024, category=cat) sr = SearchRequest(minsize=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("Smaller than" in reason) nsr = NzbSearchResult(pubdate_utc="", size=110 * 1024 * 1024, category=cat) sr = SearchRequest(minsize=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", size=110 * 1024 * 1024, category=cat) sr = SearchRequest(maxsize=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("Bigger than" in reason) nsr = NzbSearchResult(pubdate_utc="", size=90 * 1024 * 1024, category=cat) sr = SearchRequest(maxsize=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", age_days=90, category=cat) sr = SearchRequest(minage=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("Younger than" in reason) nsr = NzbSearchResult(pubdate_utc="", age_days=110, category=cat) sr = SearchRequest(minage=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", age_days=110, category=cat) sr = SearchRequest(maxage=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("Older than" in reason) nsr = NzbSearchResult(pubdate_utc="", age_days=90, category=cat) sr = SearchRequest(maxage=100, category=Bunch({"category": cat})) accepted, reason = sm.accept_result(nsr, sr, None) self.assertTrue(accepted)
def testUrlGeneration(self): w = Binsearch(getIndexerSettingByName("binsearch")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01e02', furl(urls[0]).args["q"]) self.assertEqual('a showtitle 1x02', furl(urls[1]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01', furl(urls[0]).args["q"]) self.assertEqual('a showtitle "season 1"', furl(urls[1]).args["q"]) self.args = SearchRequest(query="a showtitle", season=2016, episode="08/08") urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["q"])
def testGetMovieSearchUrls(self): self.newznab1.search_ids = ["imdbid"] # Doing a query based movie search uses regular search with the proper category searchRequest = SearchRequest(type="movie", query="atitle") urls = self.n1.get_moviesearch_urls(searchRequest) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=search&extended=1&offset=0&cat=2000&q=atitle", urls[0]) searchRequest = SearchRequest(type="movie", identifier_key="imdbid", identifier_value="123") urls = self.n1.get_moviesearch_urls(searchRequest) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=movie&extended=1&offset=0&cat=2000&imdbid=123", urls[0])
def api(args): logger.debug(request.url) logger.debug("API request: %s" % args) # Map newznab api parameters to internal args["category"] = args["cat"] args["episode"] = args["ep"] if args["q"] is not None and args["q"] != "": args["query"] = args["q"] # Because internally we work with "query" instead of "q" if mainSettings.apikey.get_with_default(None) and ("apikey" not in args or args["apikey"] != mainSettings.apikey.get()): logger.error("Tried API access with invalid or missing API key") raise Unauthorized("API key not provided or invalid") elif args["t"] in ("search", "tvsearch", "movie"): search_request = SearchRequest(category=args["cat"], offset=args["offset"], limit=args["limit"], query=args["q"]) if args["t"] == "search": search_request.type = "general" elif args["t"] == "tvsearch": search_request.type = "tv" identifier_key = "rid" if args["rid"] else "tvdbid" if args["tvdbid"] else None if identifier_key is not None: identifier_value = args[identifier_key] search_request.identifier_key = identifier_key search_request.identifier_value = identifier_value search_request.season = int(args["season"]) if args["season"] else None search_request.episode = int(args["episode"]) if args["episode"] else None elif args["t"] == "movie": search_request.type = "movie" search_request.identifier_key = "imdbid" if args["imdbid"] is not None else None search_request.identifier_value = args["imdbid"] if args["imdbid"] is not None else None result = search.search(False, search_request) results = process_for_external_api(result) content = render_search_results_for_api(results, result["total"], result["offset"]) response = make_response(content) response.headers["Content-Type"] = "application/xml" return content elif args["t"] == "get": args = rison.loads(urllib.parse.unquote(args["id"])) return extract_nzb_infos_and_return_response(args["indexer"], args["guid"], args["title"], args["searchid"]) elif args["t"] == "caps": xml = render_template("caps.html") return Response(xml, mimetype="text/xml") else: pprint(request) return "Unknown API request. Supported functions: search, tvsearch, movie, get, caps", 500
def testDuplicateTaggingForInternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: newznabItems = [[mockbuilder.buildNewznabItem(title="title%d" % i, pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=i, indexer_name="newznab1", guid="newznab1result%d" % i) for i in range(1, 250)], [mockbuilder.buildNewznabItem(title="title%d" % i, pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=i, indexer_name="newznab2", guid="newznab2result%d" % i) for i in range(1, 250)], [mockbuilder.buildNewznabItem(title="title%d" % i, pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=i, indexer_name="newznab3", guid="newznab3result%d" % i) for i in range(1, 250)]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) searchRequest = SearchRequest(type="search") result = search.search(searchRequest)
def testUrlGeneration(self): w = NzbIndex(config.settings.indexers.nzbindex) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) print(urls[0]) self.assertEqual('a showtitle s01e02 | 1x02', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 | "season 1"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="aquery", ignoreWords=["ignorethis"]) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual( "https://nzbindex.com/search?max=100&hidecross=1&more=1&q=aquery+-ignorethis&minsize=1", urls[0])
def testLimitAndOffset(self): with self.app.test_request_context('/'): # Only use newznab indexers with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: # Prepare 12 results self.prepareSearchMocks(rsps, 2, 6) # Search with a limit of 6 searchRequest = SearchRequest(limit=6, type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(6, len(results), "Expected the limit of 6 to be respected") self.assertEqual("newznab1result1.title", results[0].title) self.assertEqual("newznab1result6.title", results[5].title) # Search again with an offset, expect the next (and last ) 6 results searchRequest = SearchRequest(offset=6, limit=100, type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(6, len(results), "Expected the limit of 6 to be respected") self.assertEqual("newznab2result1.title", results[0].title)
def internalapi_search(args): logger.debug("Search request with args %s" % args) if args["category"].lower() == "ebook": type = "ebook" elif args["category"].lower() == "audiobook": type = "audiobook" else: type = "general" indexers = urllib.unquote(args["indexers"]) if args["indexers"] is not None else None search_request = SearchRequest(type=type, query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], indexers=indexers) return cached_search(search_request)
def testProcess_results(self): with open("mock/womble--sec-tv-dvd.xml") as f: entries = self.womble.process_query_result(f.read(), SearchRequest()).entries self.assertEqual("testtitle1", entries[0].title) self.assertEqual("http://www.newshost.co.za/nzb/79d/testtitle1.nzb", entries[0].link) self.assertEqual(336592896, entries[0].size) self.assertEqual("TV SD", entries[0].category) self.assertEqual("79d/testtitle1.nzb", entries[0].indexerguid) self.assertEqual(1442790103, entries[0].epoch) self.assertEqual("2015-09-20T23:01:43+00:00", entries[0].pubdate_utc) self.assertEqual(0, entries[0].age_days) self.assertEqual("79d/testtitle1.nzb", entries[0].indexerguid)
def internalapi_moviesearch(args): logger.debug("Movie search request with args %s" % args) indexers = urllib.unquote( args["indexers"]) if args["indexers"] is not None else None search_request = SearchRequest(type="movie", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], indexers=indexers) if args["imdbid"]: search_request.identifier_key = "imdbid" search_request.identifier_value = args["imdbid"] elif args["tmdbid"]: logger.debug("Need to get IMDB id from TMDB id %s" % args["tmdbid"]) imdbid = infos.convertId("tmdb", "imdb", args["tmdbid"]) if imdbid is None: raise AttributeError("Unable to convert TMDB id %s" % args["tmdbid"]) search_request.identifier_key = "imdbid" search_request.identifier_value = imdbid return startSearch(search_request)
def testUrlGeneration(self): self.args = SearchRequest(query="aquery") urls = self.omgwtf.get_search_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery", urls[0]) self.args = SearchRequest(query="aquery", category="TV HD") urls = self.omgwtf.get_search_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0]) self.args = SearchRequest(query="aquery", maxage=100) urls = self.omgwtf.get_search_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&retention=100", urls[0]) self.args = SearchRequest(query="aquery", category="TV HD", maxage=100) urls = self.omgwtf.get_search_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&retention=100&catid=20", urls[0]) self.args = SearchRequest() urls = self.omgwtf.get_search_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser", urls[0]) self.args = SearchRequest(category="TV HD") urls = self.omgwtf.get_search_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=20", urls[0])
def testGetShowSearchUrls(self): self.args = SearchRequest(query="aquery") urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=19,20,21", urls[0]) self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd")) urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0]) self.args = SearchRequest(query="aquery", season=1) urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01&catid=19,20,21", urls[0]) self.args = SearchRequest(query="aquery", season=1, episode=2) urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01e02&catid=19,20,21", urls[0]) self.args = SearchRequest() urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=19,20,21", urls[0]) self.args = SearchRequest(query="a showtitle", season="2016", episode="08/08") urls = self.omgwtf.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["search"])
def testParseSpotwebSearchResult(self): # nzbsorg with open("mock/spotweb_q_testtitle_3results.xml") as f: entries = self.n1.process_query_result(f.read(), SearchRequest()).entries self.assertEqual(3, len(entries)) self.assertEqual(entries[0].title, "testtitle1") assert entries[0].size == 3960401206 assert entries[0].indexerguid == "*****@*****.**" self.assertEqual(entries[0].age_days, 5) self.assertEqual(entries[0].epoch, 1453663845) self.assertEqual(entries[0].pubdate_utc, "2016-01-24T19:30:45+00:00") self.assertEqual(entries[0].poster, "*****@*****.**") self.assertIsNone(entries[0].group)
def testDuplicateTaggingForInternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock() as rsps: newznabItems = [[ mockbuilder.buildNewznabItem( title="title1", pubdate=arrow.get(1000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1", guid="newznab1result1"), mockbuilder.buildNewznabItem( title="title2", pubdate=arrow.get(3000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1", guid="newznab1result2") ], [ mockbuilder.buildNewznabItem( title="title1", pubdate=arrow.get(2000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2", guid="newznab1result1"), mockbuilder.buildNewznabItem( title="title2", pubdate=arrow.get(4000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2", guid="newznab2result2") ]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) searchRequest = SearchRequest(type="search") result = search.search(searchRequest) results = result["results"] self.assertEqual(4, len(results)) results = sorted(results, key=lambda x: x.hash) self.assertEqual(results[0].hash, results[1].hash) self.assertEqual(results[2].hash, results[3].hash)
def testWords(self): sm = SearchModule(None) #Required words sr = SearchRequest(forbiddenWords=[], requiredWords=["rqa", "rqb", "rq-c", "rq.d"], category=Bunch({"category": categories.getCategoryByName("all")})) nsr = NzbSearchResult(pubdate_utc="", title="xyz rqa", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="rqa", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="a.title.rqa.xyz", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="a title rqa xyz", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="a-title-rq-c", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="a title rq.d", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="rqatsch", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) nsr = NzbSearchResult(pubdate_utc="", title="xyz.rqa", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="xyz rqa rqb", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="xyz", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("None of the required" in reason) #Forbidden words sr = SearchRequest(forbiddenWords=["fba", "fbb", "fb-c", "fb.d"], requiredWords=[], category=Bunch({"category": categories.getCategoryByName("all")})) nsr = NzbSearchResult(pubdate_utc="", title="xyz fba") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"fba\" is in the list" in reason) nsr = NzbSearchResult(pubdate_utc="", title="xyzfba") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) nsr = NzbSearchResult(pubdate_utc="", title="xyzfb-ca") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"fb-c\" is in the list" in reason) #Both sr = SearchRequest(forbiddenWords=["fba", "fbb", "fb-c", "fb.d"], requiredWords=["rqa", "rqb", "rq-c", "rq.d"], category=Bunch({"category": categories.getCategoryByName("all")})) nsr = NzbSearchResult(pubdate_utc="", title="xyz fba rqa") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"fba\" is in the list" in reason) nsr = NzbSearchResult(pubdate_utc="", title="xyz FBA rqb") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"fba\" is in the list" in reason) nsr = NzbSearchResult(pubdate_utc="", title="xyz rqa.rqb.fbb") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"fbb\" is in the list" in reason) nsr = NzbSearchResult(pubdate_utc="", title="xyz rqa.rqb.fba.fbc") accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"fba\" is in the list" in reason) nsr = NzbSearchResult(pubdate_utc="", title="xyz acd") sr = SearchRequest(forbiddenWords=["ACD"]) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) self.assertTrue("\"ACD\" is in the list" in reason) sr = SearchRequest(forbiddenWords=[], requiredWords=[], category=Bunch({"category": categories.getCategoryByName("all")})) config.settings.searching.applyRestrictions = "both" config.settings.searching.requiredWords = "" config.settings.searching.forbiddenWords = "" config.settings.searching.requiredRegex = "" config.settings.searching.forbiddenRegex = "" sr.category.category.applyRestrictions = "both" sr.category.category.forbiddenWords = "" sr.category.category.requiredWords = "" sr.category.category.forbiddenRegex = "" sr.category.category.requiredRegex = "" sr.internal = True config.settings.searching.applyRestrictions = "both" config.settings.searching.forbiddenRegex = "abc" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) config.settings.searching.applyRestrictions = "internal" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) config.settings.searching.applyRestrictions = "external" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) sr.internal = False config.settings.searching.applyRestrictions = "both" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) config.settings.searching.applyRestrictions = "internal" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) config.settings.searching.applyRestrictions = "external" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) sr.internal = True config.settings.searching.forbiddenRegex = "" sr.category.category.forbiddenRegex = "abc" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) sr.category.category.applyRestrictions = "internal" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) sr.category.category.applyRestrictions = "external" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) sr.internal = False sr.category.category.applyRestrictions = "both" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted) sr.category.category.applyRestrictions = "internal" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertTrue(accepted) sr.category.category.applyRestrictions = "external" nsr = NzbSearchResult(pubdate_utc="", title="abc", category=categories.getCategoryByName("all")) accepted, reason, ri = sm.accept_result(nsr, sr, None) self.assertFalse(accepted)
def test_pick_indexers(self): config.settings.searching.generate_queries = [] config.settings.indexers.extend([self.newznab1, self.newznab2]) getIndexerSettingByName("womble").enabled = True getIndexerSettingByName("womble").accessType = "both" getIndexerSettingByName("nzbclub").enabled = True getIndexerSettingByName("nzbclub").accessType = "both" read_indexers_from_config() search_request = SearchRequest() indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) # Indexers with tv search and which support queries (actually searching for particular releases) search_request.query = "bla" indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) # Indexers with tv search, including those that only provide a list of latest releases (womble) but excluding the one that needs a query (nzbclub) search_request.query = None indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) search_request.identifier_key = "tvdbid" indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) self.assertEqual("newznab1", indexers[0].name) self.assertEqual("newznab2", indexers[1].name) search_request.identifier_key = "imdbid" search_request.category = getCategoryByName("movies") indexers = search.pick_indexers(search_request) self.assertEqual(1, len(indexers)) self.assertEqual("newznab1", indexers[0].name) # WIth query generation NZBClub should also be returned infos.title_from_id = mock config.settings.searching.generate_queries = ["internal"] search_request.identifier_key = "tvdbid" search_request.query = None search_request.category = None indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) self.assertEqual("nzbclub", indexers[0].name) self.assertEqual("newznab1", indexers[1].name) self.assertEqual("newznab2", indexers[2].name) # Test picking depending on internal, external, both getIndexerSettingByName("womble").enabled = False getIndexerSettingByName("nzbclub").enabled = False getIndexerSettingByName("newznab1").accessType = "both" search_request.internal = True indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) search_request.internal = False indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) config.settings.indexers = [self.newznab1, self.newznab2] getIndexerSettingByName("newznab1").accessType = "external" read_indexers_from_config() search_request.internal = True indexers = search.pick_indexers(search_request) self.assertEqual(1, len(indexers)) search_request.internal = False indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) getIndexerSettingByName("newznab1").accessType = "internal" read_indexers_from_config() search_request.internal = True indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) search_request.internal = False indexers = search.pick_indexers(search_request) self.assertEqual(1, len(indexers))