def testDuplicateRemovalForExternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: newznabItems = [ [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(0000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")], [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")], [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(3000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab3")], [mockbuilder.buildNewznabItem(title="title", pubdate=arrow.get(2000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab4")] ] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Test that the newest result is chosen if all scores are equal searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab3", results[0].indexer) # Test that results from an indexer with a higher score are preferred self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) getIndexerSettingByName("newznab2").score = 99 searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab2", results[0].indexer)
def testDuplicateRemovalForExternalApi(self): config.settings.searching.removeDuplicatesExternal = True with self.app.test_request_context('/'): with responses.RequestsMock( assert_all_requests_are_fired=False) as rsps: newznabItems = [[ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(0000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(1000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(3000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab3") ], [ mockbuilder.buildNewznabItem( title="title", pubdate=arrow.get(2000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab4") ]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Test that the newest result is chosen if all scores are equal searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab3", results[0].indexer) # Test that results from an indexer with a higher score are preferred self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) getIndexerSettingByName("newznab2").score = 99 searchRequest = SearchRequest(type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) self.assertEqual("newznab2", results[0].indexer)
def api_search(args): search_request = SearchRequest(category=args["cat"], offset=args["offset"], limit=args["limit"], query=args["q"]) if args["t"] == "search": search_request.type = "general" logger.info("") elif args["t"] == "tvsearch": search_request.type = "tv" identifier_key = "rid" if args["rid"] else "tvdbid" if args["tvdbid"] else None if identifier_key is not None: identifier_value = args[identifier_key] search_request.identifier_key = identifier_key search_request.identifier_value = identifier_value search_request.season = int(args["season"]) if args["season"] else None search_request.episode = int(args["episode"]) if args["episode"] else None elif args["t"] == "movie": search_request.type = "movie" search_request.identifier_key = "imdbid" if args["imdbid"] is not None else None search_request.identifier_value = args["imdbid"] if args["imdbid"] is not None else None elif args["t"] == "book": search_request.type = "ebook" search_request.author = args["author"] if args["author"] is not None else None search_request.title = args["title"] if args["title"] is not None else None logger.info("API search request: %s" % search_request) result = search.search(False, search_request) results = process_for_external_api(result) content = render_search_results_for_api(results, result["total"], result["offset"], output=args["o"]) if args["o"].lower() == "xml": response = make_response(content) response.headers["Content-Type"] = "application/xml" elif args["o"].lower() == "json": response = jsonify(content) else: return "Unknown output format", 500 return response
def api_search(args): search_request = SearchRequest(category=args["cat"], offset=args["offset"], limit=args["limit"], query=args["q"]) if args["t"] == "search": search_request.type = "general" logger.info("") elif args["t"] == "tvsearch": search_request.type = "tv" identifier_key = "rid" if args["rid"] else "tvdbid" if args["tvdbid"] else None if identifier_key is not None: identifier_value = args[identifier_key] search_request.identifier_key = identifier_key search_request.identifier_value = identifier_value search_request.season = int(args["season"]) if args["season"] else None search_request.episode = int(args["episode"]) if args["episode"] else None elif args["t"] == "movie": search_request.type = "movie" search_request.identifier_key = "imdbid" if args["imdbid"] is not None else None search_request.identifier_value = args["imdbid"] if args["imdbid"] is not None else None logger.info("API search request: %s" % search_request) result = search.search(False, search_request) results = process_for_external_api(result) content = render_search_results_for_api(results, result["total"], result["offset"]) response = make_response(content) response.headers["Content-Type"] = "application/xml" return content
def testSearchCategoryWords(self): pi = search.pick_indexers sahd = search.search_and_handle_db # Set forbidden and required category words if configured and the category matches config.settings.categories.categories["movies"].forbiddenWords = "forbidden1, forbidden2" config.settings.categories.categories["movies"].requiredWords = "required1, required2" searchRequest = SearchRequest(type="search", category="movies") search.pick_indexers = MagicMock(return_value=[NewzNab(self.newznab1)]) search.search_and_handle_db = MagicMock(return_value={"results": {}}) search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(["forbidden1", "forbidden2"], updatedSearchRequest.forbiddenWords) self.assertEqual(["required1", "required2"], updatedSearchRequest.requiredWords) # Don't set if category doesn't match searchRequest = SearchRequest(type="search", category="audio") search.search_and_handle_db.reset_mock() search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(0, len(updatedSearchRequest.forbiddenWords)) self.assertEqual(0, len(updatedSearchRequest.requiredWords)) # Don't set when fallback to "all" category searchRequest = SearchRequest(type="search", category="7890") search.search_and_handle_db.reset_mock() search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(0, len(updatedSearchRequest.forbiddenWords)) self.assertEqual(0, len(updatedSearchRequest.requiredWords)) self.assertEqual("na", updatedSearchRequest.category.category.name) # Use globally configured words and category words config.settings.searching.forbiddenWords = "globalforbidden1, globalforbidden2" config.settings.searching.requiredWords = "globalrequired1, globalrequired2" config.settings.categories.categories["movies"].forbiddenWords = "forbidden1, forbidden2" config.settings.categories.categories["movies"].requiredWords = "required1, required2" searchRequest = SearchRequest(type="search", category="movies") search.search_and_handle_db.reset_mock() search.pick_indexers = MagicMock(return_value=[NewzNab(self.newznab1)]) search.search_and_handle_db = MagicMock(return_value={"results": {}}) search.search(searchRequest) updatedSearchRequest = search.search_and_handle_db.mock_calls[0][1][1].values()[0] self.assertEqual(["globalforbidden1", "globalforbidden2", "forbidden1", "forbidden2"], updatedSearchRequest.forbiddenWords) self.assertEqual(["globalrequired1", "globalrequired2", "required1", "required2"], updatedSearchRequest.requiredWords) search.pick_indexers = pi search.search_and_handle_db = sahd
def testLimitAndOffset(self): with self.app.test_request_context('/'): # Only use newznab indexers with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: # Prepare 12 results self.prepareSearchMocks(rsps, 2, 6) # Search with a limit of 6 searchRequest = SearchRequest(limit=6, type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(6, len(results), "Expected the limit of 6 to be respected") self.assertEqual("newznab1result1.title", results[0].title) self.assertEqual("newznab1result6.title", results[5].title) # Search again with an offset, expect the next (and last ) 6 results searchRequest = SearchRequest(offset=6, limit=100, type="search", internal=False) result = search.search(searchRequest) results = result["results"] self.assertEqual(6, len(results), "Expected the limit of 6 to be respected") self.assertEqual("newznab2result1.title", results[0].title)
def testDuplicateTaggingForInternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: newznabItems = [[mockbuilder.buildNewznabItem(title="title%d" % i, pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=i, indexer_name="newznab1", guid="newznab1result%d" % i) for i in range(1, 250)], [mockbuilder.buildNewznabItem(title="title%d" % i, pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=i, indexer_name="newznab2", guid="newznab2result%d" % i) for i in range(1, 250)], [mockbuilder.buildNewznabItem(title="title%d" % i, pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=i, indexer_name="newznab3", guid="newznab3result%d" % i) for i in range(1, 250)]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) searchRequest = SearchRequest(type="search") result = search.search(searchRequest)
def internalapi_tvsearch(args): logger.debug("TV search request with args %s" % args) search_request = SearchRequest(type="tv", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], episode=args["episode"], season=args["season"], title=args["title"], indexers=args["indexers"]) if args["tvdbid"]: search_request.identifier_key = "tvdbid" search_request.identifier_value = args["tvdbid"] elif args["rid"]: search_request.identifier_key = "rid" search_request.identifier_value = args["rid"] results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)
def internalapi_moviesearch(args): logger.debug("Movie search request with args %s" % args) search_request = SearchRequest(type="movie", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], indexers=args["indexers"]) if args["imdbid"]: search_request.identifier_key = "imdbid" search_request.identifier_value = args["imdbid"] elif args["tmdbid"]: logger.debug("Need to get IMDB id from TMDB id %s" % args["tmdbid"]) imdbid = infos.get_imdbid_from_tmdbid(args["tmdbid"]) search_request.identifier_key = "imdbid" search_request.identifier_value = imdbid results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)
def testDuplicateTaggingForInternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock() as rsps: newznabItems = [[ mockbuilder.buildNewznabItem( title="title1", pubdate=arrow.get(1000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1", guid="newznab1result1"), mockbuilder.buildNewznabItem( title="title2", pubdate=arrow.get(3000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1", guid="newznab1result2") ], [ mockbuilder.buildNewznabItem( title="title1", pubdate=arrow.get(2000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2", guid="newznab1result1"), mockbuilder.buildNewznabItem( title="title2", pubdate=arrow.get(4000).format( "ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2", guid="newznab2result2") ]] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) searchRequest = SearchRequest(type="search") result = search.search(searchRequest) results = result["results"] self.assertEqual(4, len(results)) results = sorted(results, key=lambda x: x.hash) self.assertEqual(results[0].hash, results[1].hash) self.assertEqual(results[2].hash, results[3].hash)
def api(args): logger.debug(request.url) logger.debug("API request: %s" % args) # Map newznab api parameters to internal args["category"] = args["cat"] args["episode"] = args["ep"] if args["q"] is not None and args["q"] != "": args["query"] = args["q"] # Because internally we work with "query" instead of "q" if mainSettings.apikey.get_with_default(None) and ("apikey" not in args or args["apikey"] != mainSettings.apikey.get()): logger.error("Tried API access with invalid or missing API key") raise Unauthorized("API key not provided or invalid") elif args["t"] in ("search", "tvsearch", "movie"): search_request = SearchRequest(category=args["cat"], offset=args["offset"], limit=args["limit"], query=args["q"]) if args["t"] == "search": search_request.type = "general" elif args["t"] == "tvsearch": search_request.type = "tv" identifier_key = "rid" if args["rid"] else "tvdbid" if args["tvdbid"] else None if identifier_key is not None: identifier_value = args[identifier_key] search_request.identifier_key = identifier_key search_request.identifier_value = identifier_value search_request.season = int(args["season"]) if args["season"] else None search_request.episode = int(args["episode"]) if args["episode"] else None elif args["t"] == "movie": search_request.type = "movie" search_request.identifier_key = "imdbid" if args["imdbid"] is not None else None search_request.identifier_value = args["imdbid"] if args["imdbid"] is not None else None result = search.search(False, search_request) results = process_for_external_api(result) content = render_search_results_for_api(results, result["total"], result["offset"]) response = make_response(content) response.headers["Content-Type"] = "application/xml" return content elif args["t"] == "get": args = rison.loads(urllib.parse.unquote(args["id"])) return extract_nzb_infos_and_return_response(args["indexer"], args["guid"], args["title"], args["searchid"]) elif args["t"] == "caps": xml = render_template("caps.html") return Response(xml, mimetype="text/xml") else: pprint(request) return "Unknown API request. Supported functions: search, tvsearch, movie, get, caps", 500
def testDuplicateTaggingForInternalApi(self): with self.app.test_request_context('/'): with responses.RequestsMock() as rsps: newznabItems = [ [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1", guid="newznab1result1"), mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(3000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1", guid="newznab1result2")], [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(2000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2", guid="newznab1result1"), mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2", guid="newznab2result2")] ] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) searchRequest = SearchRequest(type="search") result = search.search(searchRequest) results = result["results"] self.assertEqual(4, len(results)) results = sorted(results, key=lambda x: x.hash) self.assertEqual(results[0].hash, results[1].hash) self.assertEqual(results[2].hash, results[3].hash)
def testThatResultsAreSortedByAgeDescending(self): with self.app.test_request_context('/'): with responses.RequestsMock() as rsps: newznabItems = [ [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")], [mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(0000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")], [mockbuilder.buildNewznabItem(title="title3", pubdate=arrow.get(3000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab3")], [mockbuilder.buildNewznabItem(title="title4", pubdate=arrow.get(4000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab4")], [mockbuilder.buildNewznabItem(title="title5", pubdate=arrow.get(2000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab5")] ] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) searchRequest = SearchRequest(type="search") result = search.search(searchRequest) results = result["results"] self.assertEqual("title4", results[0].title) self.assertEqual("title3", results[1].title) self.assertEqual("title5", results[2].title) self.assertEqual("title1", results[3].title) self.assertEqual("title2", results[4].title)
def testThatDatabaseValuesAreStored(self): with self.app.test_request_context('/'): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: newznabItems = [ [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")], [mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")] ] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Make the second access unsuccessful rsps._urls.pop(1) rsps.add(responses.GET, r".*", body="an error message", status=500, content_type='application/x-html') searchRequest = SearchRequest(type="search", query="aquery", category="acategory", identifier_key="imdbid", identifier_value="animdbid", season=1, episode=2, indexers="newznab1|newznab2") result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) dbSearch = Search().get() self.assertEqual(True, dbSearch.internal) self.assertEqual("aquery", dbSearch.query) self.assertEqual("All", dbSearch.category) self.assertEqual("imdbid", dbSearch.identifier_key) self.assertEqual("animdbid", dbSearch.identifier_value) self.assertEqual("1", dbSearch.season) self.assertEqual("2", dbSearch.episode) self.assertEqual("search", dbSearch.type) self.assertEqual(18, dbSearch.time.hour) indexerSearch1 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab1")) self.assertEqual(indexerSearch1.search, dbSearch) self.assertEqual(18, indexerSearch1.time.hour) indexerSearch2 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab2")) self.assertEqual(indexerSearch2.search, dbSearch) self.assertEqual(18, indexerSearch2.time.hour) calledUrls = sorted([x.request.url for x in rsps.calls]) indexerApiAccess1 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab1")) self.assertEqual(indexerSearch1, indexerApiAccess1.indexer_search) self.assertEqual(18, indexerApiAccess1.time.hour) self.assertEqual("search", indexerApiAccess1.type) self.assertEqual(calledUrls[0], indexerApiAccess1.url) self.assertTrue(indexerApiAccess1.response_successful) self.assertEqual(0, indexerApiAccess1.response_time) self.assertIsNone(indexerApiAccess1.error) indexerApiAccess2 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab2")) self.assertEqual(indexerSearch2, indexerApiAccess2.indexer_search) self.assertEqual(18, indexerApiAccess2.time.hour) self.assertEqual("search", indexerApiAccess2.type) self.assertEqual(calledUrls[1], indexerApiAccess2.url) self.assertFalse(indexerApiAccess2.response_successful) self.assertIsNone(indexerApiAccess2.response_time) self.assertTrue("Connection refused" in indexerApiAccess2.error) indexerStatus2 = IndexerStatus.get(IndexerStatus.indexer == Indexer.get(Indexer.name == "newznab2")) self.assertEqual(1, indexerStatus2.level) self.assertTrue("Connection refused" in indexerStatus2.reason)
def startSearch(search_request): results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)
def internalapi_search(args): logger.debug("Search request with args %s" % args) search_request = SearchRequest(type="general", query=args["query"], offset=args["offset"], category=args["category"], minsize=args["minsize"], maxsize=args["maxsize"], minage=args["minage"], maxage=args["maxage"], indexers=args["indexers"]) results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)
def cached_search(search_request): results = search.search(True, search_request) return process_and_jsonify_for_internalapi(results)