def testEbookUrlGeneration(self): getIndexerSettingByName("binsearch").searchTypes = [] w = Binsearch(getIndexerSettingByName("binsearch")) self.args = SearchRequest(query="anauthor atitle") urls = w.get_ebook_urls(self.args) self.assertEqual(4, len(urls)) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+ebook", urls[0]) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+mobi", urls[1]) self.args = SearchRequest(author="anauthor", title="atitle") urls = w.get_ebook_urls(self.args) self.assertEqual(4, len(urls)) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+ebook", urls[0]) self.assertEqual("https://binsearch.info/index.php?max=100&postdate=date&min=0&adv_sort=date&adv_col=on&q=anauthor+atitle+mobi", urls[1])
def testProcess_results(self): w = NzbIndex(getIndexerSettingByName("nzbindex")) with open("mock/nzbindex--q-testtitle.html") as f: processing_result = w.process_query_result(f.read(), SearchRequest()) entries = processing_result.entries self.assertEqual('114143855', entries[0].indexerguid) self.assertEqual('testtitle1', entries[0].title) self.assertFalse(entries[0].has_nfo) self.assertEqual('[email protected] (senior)', entries[0].poster) self.assertEqual('alt.binaries.mom', entries[0].group) self.assertEqual( "https://nzbindex.com/download/114143855/testtitle1-testtitle1.nzb", entries[0].link) self.assertEqual(169103851, entries[0].size) self.assertEqual("2014-11-04T10:39:00+01:00", entries[0].pubdate_utc ) # would be perfect, that is the exact pubdate self.assertEqual("Tue, 04 Nov 2014 10:39:00 +0100", entries[0].pubDate) self.assertEqual(1415093940, entries[0].epoch) self.assertEqual(333, entries[0].age_days) self.assertEqual( "https://nzbindex.com/release/114143855/testtitle1-testtitle1.nzb", entries[0].details_link) self.assertTrue(entries[0].passworded) self.assertEqual(0, entries[1].age_days) self.assertEqual("2015-10-03T20:15:00+01:00", entries[1].pubdate_utc ) # would be perfect, that is the exact pubdate self.assertEqual(1443899700, entries[1].epoch) self.assertEqual(1000, processing_result.total) self.assertTrue(processing_result.has_more)
def testProcess_results_totalknown(self): w = Binsearch(getIndexerSettingByName("binsearch")) with open("mock/binsearch--q-testtitle3results.html", encoding="latin-1") as f: body = f.read() result = w.process_query_result(body, SearchRequest()) self.assertFalse(result.has_more) self.assertEqual(3, result.total)
def testUrlGeneration(self): w = NzbIndex(getIndexerSettingByName("nzbindex")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) print(urls[0]) self.assertEqual('a showtitle s01e02 | 1x02', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 | "season 1"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season="2016", episode="08/08") urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"]) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual( "https://nzbindex.com/search?max=100&hidecross=1&more=1&q=aquery+-ignorethis", urls[0])
def testProcess_results(self): w = NzbIndex(getIndexerSettingByName("nzbindex")) with open("mock/nzbindex--q-testtitle.html") as f: processing_result = w.process_query_result(f.read(), SearchRequest()) entries = processing_result.entries self.assertEqual('114143855', entries[0].indexerguid) self.assertEqual('testtitle1', entries[0].title) self.assertFalse(entries[0].has_nfo) self.assertEqual('[email protected] (senior)', entries[0].poster) self.assertEqual('alt.binaries.mom', entries[0].group) self.assertEqual("https://nzbindex.com/download/114143855/testtitle1-testtitle1.nzb", entries[0].link) self.assertEqual(169103851, entries[0].size) self.assertEqual("2014-11-04T10:39:00+01:00", entries[0].pubdate_utc) # would be perfect, that is the exact pubdate self.assertEqual("Tue, 04 Nov 2014 10:39:00 +0100", entries[0].pubDate) self.assertEqual(1415093940, entries[0].epoch) self.assertEqual(333, entries[0].age_days) self.assertEqual("https://nzbindex.com/release/114143855/testtitle1-testtitle1.nzb", entries[0].details_link) self.assertTrue(entries[0].passworded) self.assertEqual(0, entries[1].age_days) self.assertEqual("2015-10-03T20:15:00+01:00", entries[1].pubdate_utc) # would be perfect, that is the exact pubdate self.assertEqual(1443899700, entries[1].epoch) self.assertEqual(1000, processing_result.total) self.assertTrue(processing_result.has_more)
def testDuplicateRemovalForExternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: newznabItems = [ [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(0000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")], [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")], [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(3000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab3")], [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(2000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab4")] ] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Test that the newest result is chosen if all scores are equal searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab3", results[0].indexer) # Test that results from an indexer with a higher score are preferred self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) getIndexerSettingByName("newznab2").score = 99 searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab2", results[0].indexer)
def testDuplicateRemovalForExternalApi(self): config.settings.searching.removeDuplicatesExternal = True with self.app.test_request_context('/'): with responses.RequestsMock( assert_all_requests_are_fired=False) as rsps: newznabItems = [[ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(0000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(1000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(3000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab3") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(2000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab4") ]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Test that the newest result is chosen if all scores are equal searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab3", results[0].indexer) # Test that results from an indexer with a higher score are preferred self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) getIndexerSettingByName("newznab2").score = 99 searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab2", results[0].indexer)
def setUp(self): set_and_drop() getIndexerSettingByName("binsearch").enabled = False getIndexerSettingByName("nzbindex").enabled = False getIndexerSettingByName("omgwtf").enabled = False getIndexerSettingByName("womble").enabled = False getIndexerSettingByName("nzbclub").enabled = False self.newznab1 = Bunch() self.newznab1.enabled = True self.newznab1.name = "newznab1" self.newznab1.host = "https://indexer.com" self.newznab1.apikey = "apikeyindexer.com" self.newznab1.timeout = None self.newznab1.hitLimit = None self.newznab1.score = 0 self.newznab1.type = "newznab" self.newznab1.accessType = "both" self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"] self.newznab1.searchTypes = ["book", "tvsearch", "movie"] self.newznab2 = Bunch() self.newznab2.enabled = True self.newznab2.name = "newznab2" self.newznab2.host = "https://indexer.com" self.newznab2.apikey = "apikeyindexer.com" self.newznab2.timeout = None self.newznab2.hitLimit = None self.newznab2.accessType = "both" self.newznab2.score = 0 self.newznab2.type = "newznab" self.newznab2.search_ids = ["rid", "tvdbid"] self.newznab2.searchTypes = ["tvsearch", "movie"] # config.settings.indexers = [self.newznab1, self.newznab2] self.oldExecute_search_queries = search.start_search_futures database.IndexerStatus.delete().execute() database.IndexerSearch.delete().execute() infos.convertId = mock self.app = flask.Flask(__name__) self.response_callbacks = []
def setUp(self): set_and_drop() getIndexerSettingByName("binsearch").enabled = False getIndexerSettingByName("nzbindex").enabled = False getIndexerSettingByName("womble").enabled = False getIndexerSettingByName("nzbclub").enabled = False self.newznab1 = Bunch() self.newznab1.enabled = True self.newznab1.name = "newznab1" self.newznab1.host = "https://indexer.com" self.newznab1.apikey = "apikeyindexer.com" self.newznab1.timeout = None self.newznab1.hitLimit = None self.newznab1.score = 0 self.newznab1.type = "newznab" self.newznab1.accessType = "both" self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"] self.newznab1.searchTypes = ["book", "tvsearch", "movie"] self.newznab2 = Bunch() self.newznab2.enabled = True self.newznab2.name = "newznab2" self.newznab2.host = "https://indexer.com" self.newznab2.apikey = "apikeyindexer.com" self.newznab2.timeout = None self.newznab2.hitLimit = None self.newznab2.accessType = "both" self.newznab2.score = 0 self.newznab2.type = "newznab" self.newznab2.search_ids = ["rid", "tvdbid"] self.newznab2.searchTypes = ["tvsearch", "movie"] # config.settings.indexers = [self.newznab1, self.newznab2] self.oldExecute_search_queries = search.start_search_futures database.IndexerStatus.delete().execute() database.IndexerSearch.delete().execute() infos.convertId = mock self.app = flask.Flask(__name__) self.response_callbacks = []
def testUrlGeneration(self): w = Binsearch(getIndexerSettingByName("binsearch")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01e02', furl(urls[0]).args["q"]) self.assertEqual('a showtitle 1x02', furl(urls[1]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01', furl(urls[0]).args["q"]) self.assertEqual('a showtitle "season 1"', furl(urls[1]).args["q"])
def testProcess_results(self): w = NzbClub(getIndexerSettingByName("nzbclub")) with open("mock/nzbclub--q-testtitle.xml", encoding="latin-1") as f: entries = w.process_query_result(f.read(), SearchRequest()).entries self.assertEqual('testtitle1', entries[0].title) self.assertEqual("http://www.nzbclub.com/nzb_get/60269450/testtitle1.nzb", entries[0].link) self.assertEqual(1075514926, entries[0].size) self.assertEqual("60269450", entries[0].indexerguid) self.assertEqual(1443019463, entries[0].epoch) self.assertEqual("2015-09-23T09:44:23-05:00", entries[0].pubdate_utc) self.assertEqual("Wed, 23 Sep 2015 09:44:23 -0500", entries[0].pubDate) self.assertEqual(0, entries[0].age_days) self.assertEqual("http://www.nzbclub.com/nzb_view/60269450/testtitle1", entries[0].details_link) self.assertEqual("[email protected] (YIFY)", entries[0].poster) self.assertEqual("alt.binaries.movies", entries[0].group)
def testUrlGeneration(self): w = NzbIndex(getIndexerSettingByName("nzbindex")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) print(urls[0]) self.assertEqual('a showtitle s01e02 | 1x02', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 | "season 1"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"]) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://nzbindex.com/search?max=100&hidecross=1&more=1&q=aquery+-ignorethis", urls[0])
def testGetEntryById(self): Indexer(name="nzbindex").save() n = NzbIndex(getIndexerSettingByName("nzbindex")) with open("mock/nzbindex--details.html", encoding="latin-1") as f: xml = f.read() with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: url_re = re.compile(r'.*') rsps.add(responses.GET, url_re, body=xml, status=200, content_type='application/x-html') item = n.get_entry_by_id("aguid", "atitle") self.assertEqual("atitle", item.title) self.assertEqual(3816816, item.size) self.assertEqual("alt.binaries.pwp | alt.binaries.usenetrevolution", item.group) self.assertEqual("[email protected] (Janusch)", item.poster) self.assertEqual("https://nzbindex.com/download/aguid/atitle.nzb", item.link) self.assertEqual("https://nzbindex.com/release/aguid/atitle.nzb", item.details_link)
def testUrlGeneration(self): w = Binsearch(getIndexerSettingByName("binsearch")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01e02', furl(urls[0]).args["q"]) self.assertEqual('a showtitle 1x02', furl(urls[1]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(2, len(urls)) self.assertEqual('a showtitle s01', furl(urls[0]).args["q"]) self.assertEqual('a showtitle "season 1"', furl(urls[1]).args["q"]) self.args = SearchRequest(query="a showtitle", season=2016, episode="08/08") urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["q"])
def testProcess_results(self): w = Binsearch(getIndexerSettingByName("binsearch")) with open("mock/binsearch--q-testtitle.html", encoding="latin-1") as f: body = f.read() result = w.process_query_result(body, SearchRequest()) entries = list(result.entries) self.assertEqual('testtitle1.TrueFrench.1080p.X264.AC3.5.1-JKF.mkv', entries[0].title) self.assertEqual("https://binsearch.info?action=nzb&176073735=1", entries[0].link) self.assertEqual(13110387671, entries[0].size) self.assertEqual("176073735", entries[0].indexerguid) self.assertEqual(1443312000, entries[0].epoch) self.assertEqual("2015-09-27T00:00:00+00:00", entries[0].pubdate_utc) self.assertEqual("Sun, 27 Sep 2015 00:00:00 -0000", entries[0].pubDate) self.assertEqual(3, entries[0].age_days) self.assertFalse(entries[0].age_precise) self.assertEqual("[email protected] (Clown_nez)", entries[0].poster) self.assertEqual("alt.binaries.movies.mkv", entries[0].group) self.assertUrlEqual("https://binsearch.info/?b=testtitle1.3D.TOPBOT.TrueFrench.1080p.X264.A&g=alt.binaries.movies.mkv&p=Ramer%40marmer.com+%28Clown_nez%29&max=250", entries[0].details_link) self.assertTrue(result.has_more) self.assertFalse(result.total_known)
def testProcess_results(self): w = NzbClub(getIndexerSettingByName("nzbclub")) with open("mock/nzbclub--q-testtitle.xml", encoding="latin-1") as f: entries = w.process_query_result(f.read(), SearchRequest()).entries self.assertEqual('testtitle1', entries[0].title) self.assertEqual( "http://www.nzbclub.com/nzb_get/60269450/testtitle1.nzb", entries[0].link) self.assertEqual(1075514926, entries[0].size) self.assertEqual("60269450", entries[0].indexerguid) self.assertEqual(1443019463, entries[0].epoch) self.assertEqual("2015-09-23T09:44:23-05:00", entries[0].pubdate_utc) self.assertEqual("Wed, 23 Sep 2015 09:44:23 -0500", entries[0].pubDate) self.assertEqual(0, entries[0].age_days) self.assertEqual( "http://www.nzbclub.com/nzb_view/60269450/testtitle1", entries[0].details_link) self.assertEqual("[email protected] (YIFY)", entries[0].poster) self.assertEqual("alt.binaries.movies", entries[0].group)
def testGetEntryById(self): Indexer(name="nzbindex").save() n = NzbIndex(getIndexerSettingByName("nzbindex")) with open("mock/nzbindex--details.html", encoding="latin-1") as f: xml = f.read() with responses.RequestsMock( assert_all_requests_are_fired=False) as rsps: url_re = re.compile(r'.*') rsps.add(responses.GET, url_re, body=xml, status=200, content_type='application/x-html') item = n.get_entry_by_id("aguid", "atitle") self.assertEqual("atitle", item.title) self.assertEqual(3816816, item.size) self.assertEqual( "alt.binaries.pwp | alt.binaries.usenetrevolution", item.group) self.assertEqual("[email protected] (Janusch)", item.poster) self.assertEqual("https://nzbindex.com/download/aguid/atitle.nzb", item.link) self.assertEqual("https://nzbindex.com/release/aguid/atitle.nzb", item.details_link)
def testUrlGeneration(self): w = NzbClub(getIndexerSettingByName("nzbclub")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01e02 or a showtitle 1x02', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 or a showtitle "season 1"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="aquery", minage=4) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&ds=4", urls[0]) self.args = SearchRequest(query="aquery", minage=18 * 31) #Beyond the last defined limit of days urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&ds=27", urls[0]) self.args = SearchRequest(query="aquery", minage=70) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&ds=12", urls[0]) self.args = SearchRequest( query="aquery", maxage=18 * 31) # Beyond the last defined limit of days, so don't limit urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5", urls[0]) self.args = SearchRequest(query="aquery", minage=4, maxage=70) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&de=13&ds=4", urls[0]) self.args = SearchRequest(query="aquery", minsize=3) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&szs=8", urls[0]) self.args = SearchRequest(query="aquery", minsize=2400) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&szs=23", urls[0]) self.args = SearchRequest(query="aquery", maxsize=2400) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&sze=24", urls[0]) self.args = SearchRequest( query="aquery", maxsize=30 * 1024 * 1024) #Beyond the last defined limit of size, so don't limit urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5", urls[0]) self.args = SearchRequest(query="aquery", minsize=3, maxsize=2400) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual( "https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&sze=24&szs=8", urls[0]) self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"]) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual( "https://www.nzbclub.com/nzbrss.aspx?rpp=250&ns=1&sn=1&ig=2&st=5&q=aquery+-ignorethis", urls[0]) self.args = SearchRequest(query="a showtitle", season=2016, episode="08/08") urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["q"])
def testGetNzbLink(self): n = Binsearch(getIndexerSettingByName("binsearch")) link = n.get_nzb_link("guid", "title") assert "action=nzb" in link assert "guid=1" in link
def testGetNzbLink(self): n = NzbIndex(getIndexerSettingByName("nzbindex")) link = n.get_nzb_link("guid", "title") self.assertEqual("https://nzbindex.com/download/guid/title.nzb", link)
def test_pick_indexers(self): config.settings.searching.generate_queries = [] config.settings.indexers.extend([self.newznab1, self.newznab2]) getIndexerSettingByName("womble").enabled = True getIndexerSettingByName("womble").accessType = "both" getIndexerSettingByName("nzbclub").enabled = True getIndexerSettingByName("nzbclub").accessType = "both" read_indexers_from_config() search_request = SearchRequest() indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) # Indexers with tv search and which support queries (actually searching for particular releases) search_request.query = "bla" indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) # Indexers with tv search, including those that only provide a list of latest releases (womble) but excluding the one that needs a query (nzbclub) search_request.query = None indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) search_request.identifier_key = "tvdbid" indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) self.assertEqual("newznab1", indexers[0].name) self.assertEqual("newznab2", indexers[1].name) search_request.identifier_key = "imdbid" search_request.category = getCategoryByName("movies") indexers = search.pick_indexers(search_request) self.assertEqual(1, len(indexers)) self.assertEqual("newznab1", indexers[0].name) # WIth query generation NZBClub should also be returned infos.title_from_id = mock config.settings.searching.generate_queries = ["internal"] search_request.identifier_key = "tvdbid" search_request.query = None search_request.category = None indexers = search.pick_indexers(search_request) self.assertEqual(3, len(indexers)) self.assertEqual("nzbclub", indexers[0].name) self.assertEqual("newznab1", indexers[1].name) self.assertEqual("newznab2", indexers[2].name) # Test picking depending on internal, external, both getIndexerSettingByName("womble").enabled = False getIndexerSettingByName("nzbclub").enabled = False getIndexerSettingByName("newznab1").accessType = "both" search_request.internal = True indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) search_request.internal = False indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) config.settings.indexers = [self.newznab1, self.newznab2] getIndexerSettingByName("newznab1").accessType = "external" read_indexers_from_config() search_request.internal = True indexers = search.pick_indexers(search_request) self.assertEqual(1, len(indexers)) search_request.internal = False indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) getIndexerSettingByName("newznab1").accessType = "internal" read_indexers_from_config() search_request.internal = True indexers = search.pick_indexers(search_request) self.assertEqual(2, len(indexers)) search_request.internal = False indexers = search.pick_indexers(search_request) self.assertEqual(1, len(indexers))
def setUp(self): set_and_drop() self.omgwtf = OmgWtf(getIndexerSettingByName("omgwtf"))
def setUp(self): set_and_drop() config.load("testsettings.cfg") self.womble = Womble(getIndexerSettingByName("womble"))
def testGetNzbLink(self): n = NzbClub(getIndexerSettingByName("nzbclub")) link = n.get_nzb_link("guid", "title") self.assertEqual("https://www.nzbclub.com/nzb_get/guid/title.nzb", link)
def testUrlGeneration(self): w = NzbClub(getIndexerSettingByName("nzbclub")) self.args = SearchRequest(query="a showtitle", season=1, episode=2) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01e02 or a showtitle 1x02', furl(urls[0]).args["q"]) self.args = SearchRequest(query="a showtitle", season=1, episode=None) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle s01 or a showtitle "season 1"', furl(urls[0]).args["q"]) self.args = SearchRequest(query="aquery", minage=4) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&ds=4", urls[0]) self.args = SearchRequest(query="aquery", minage=18 * 31) #Beyond the last defined limit of days urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&ds=27", urls[0]) self.args = SearchRequest(query="aquery", minage=70) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&ds=12", urls[0]) self.args = SearchRequest(query="aquery", maxage=18 * 31) # Beyond the last defined limit of days, so don't limit urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5", urls[0]) self.args = SearchRequest(query="aquery", minage=4, maxage=70) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&de=13&ds=4", urls[0]) self.args = SearchRequest(query="aquery", minsize=3) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&szs=8", urls[0]) self.args = SearchRequest(query="aquery", minsize=2400) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&szs=23", urls[0]) self.args = SearchRequest(query="aquery", maxsize=2400) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&sze=24", urls[0]) self.args = SearchRequest(query="aquery", maxsize=30*1024*1024) #Beyond the last defined limit of size, so don't limit urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5", urls[0]) self.args = SearchRequest(query="aquery", minsize=3, maxsize=2400) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertUrlEqual("https://www.nzbclub.com/nzbrss.aspx?ig=2&ns=1&q=aquery&rpp=250&sn=1&st=5&sze=24&szs=8", urls[0]) self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"]) urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual("https://www.nzbclub.com/nzbrss.aspx?rpp=250&ns=1&sn=1&ig=2&st=5&q=aquery+-ignorethis", urls[0]) self.args = SearchRequest(query="a showtitle", season=2016, episode="08/08") urls = w.get_showsearch_urls(self.args) self.assertEqual(1, len(urls)) self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["q"])