Ejemplo n.º 1
0
    def testGetEbookUrls(self):

        searchRequest = SearchRequest(query="novel")
        urls = self.n1.get_ebook_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020,8010&limit=100&t=search&extended=1&offset=0&q=novel",
            urls[0],
        )

        self.args = SearchRequest(author="anauthor", title="atitle", category=getCategoryByAnyInput(7020))
        queries = self.n1.get_ebook_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020&extended=1&limit=100&offset=0&q=anauthor+atitle&t=search",
            queries[0],
        )

        self.newznab1.searchTypes = ["book"]
        self.n1 = NewzNab(self.newznab1)
        self.args = SearchRequest(author="anauthor", title="atitle", category=getCategoryByAnyInput(7020))
        queries = self.n1.get_ebook_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&author=anauthor&cat=7020&extended=1&limit=100&offset=0&t=book&title=atitle",
            queries[0],
        )
Ejemplo n.º 2
0
    def testUrlGeneration(self):
        self.args = SearchRequest(query="aquery")
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery", urls[0])

        self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd"))
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0])

        self.args = SearchRequest(query="aquery", maxage=100)
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&retention=100", urls[0])

        self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd"), maxage=100)
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&retention=100&catid=20", urls[0])

        self.args = SearchRequest()
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser", urls[0])
        
        self.args = SearchRequest(category=getCategoryByAnyInput("tvhd"))
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=20", urls[0])
Ejemplo n.º 3
0
    def testUrlGeneration(self):
        self.args = SearchRequest(query="aquery")
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery", urls[0])

        self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd"))
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0])

        self.args = SearchRequest(query="aquery", maxage=100)
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&retention=100", urls[0])

        self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd"), maxage=100)
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&retention=100&catid=20", urls[0])

        self.args = SearchRequest()
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser", urls[0])
        
        self.args = SearchRequest(category=getCategoryByAnyInput("tvhd"))
        urls = self.omgwtf.get_search_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=20", urls[0])
Ejemplo n.º 4
0
    def testGetEbookUrls(self):

        searchRequest = SearchRequest(query="novel")
        urls = self.n1.get_ebook_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020,8010&limit=100&t=search&extended=1&offset=0&q=novel",
            urls[0])

        self.args = SearchRequest(author="anauthor",
                                  title="atitle",
                                  category=getCategoryByAnyInput(7020))
        queries = self.n1.get_ebook_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020&extended=1&limit=100&offset=0&q=anauthor+atitle&t=search",
            queries[0])

        self.newznab1.searchTypes = ["book"]
        self.n1 = NewzNab(self.newznab1)
        self.args = SearchRequest(author="anauthor",
                                  title="atitle",
                                  category=getCategoryByAnyInput(7020))
        queries = self.n1.get_ebook_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&author=anauthor&cat=7020&extended=1&limit=100&offset=0&t=book&title=atitle",
            queries[0])
Ejemplo n.º 5
0
    def testGetShowSearchUrls(self):
        self.args = SearchRequest(query="aquery")
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=19,20,21", urls[0])

        self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd"))
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0])

        self.args = SearchRequest(query="aquery", season=1)
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01&catid=19,20,21", urls[0])

        self.args = SearchRequest(query="aquery", season=1, episode=2)
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01e02&catid=19,20,21", urls[0])

        self.args = SearchRequest()
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=19,20,21", urls[0])

        self.args = SearchRequest(query="a showtitle", season="2016", episode="08/08")
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual('a showtitle "2016 08 08"', furl(urls[0]).args["search"])
Ejemplo n.º 6
0
    def testGetShowSearchUrls(self):
        self.args = SearchRequest(query="aquery")
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=19,20,21", urls[0])

        self.args = SearchRequest(query="aquery", category=getCategoryByAnyInput("tvhd"))
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery&catid=20", urls[0])

        self.args = SearchRequest(query="aquery", season=1)
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01&catid=19,20,21", urls[0])

        self.args = SearchRequest(query="aquery", season=1, episode=2)
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=aquery+s01e02&catid=19,20,21", urls[0])

        self.args = SearchRequest()
        urls = self.omgwtf.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://rss.omgwtfnzbs.org/rss-download.php?api=apikey&user=anuser&catid=19,20,21", urls[0])
Ejemplo n.º 7
0
 def get_comic_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("comic")
     if hasattr(self.settings, "comicCategory") and self.settings.comicCategory:
         self.debug("Using %s as determined newznab comic category" % self.settings.comicCategory)
         search_request.category.category.newznabCategories = [self.settings.comicCategory]
     return self.get_search_urls(search_request)
Ejemplo n.º 8
0
    def get_showsearch_urls(self, search_request):
        if search_request.category is None:
            search_request.category = getCategoryByAnyInput("tv")

        url = self.build_base_url("tvsearch", search_request.category, offset=search_request.offset)
        if search_request.identifier_key:
            canBeConverted, toType, id = infos.convertIdToAny(search_request.identifier_key, self.search_ids, search_request.identifier_value)
            if canBeConverted:
                search_request.identifier_key = toType.replace("tvrage", "rid").replace("tvdb", "tvdbid")
                search_request.identifier_value = id
            else:
                self.info("Unable to search using ID type %s" % search_request.identifier_key)
                return []

            url.add({search_request.identifier_key: search_request.identifier_value})
        if search_request.episode:
            url.add({"ep": search_request.episode})
        if search_request.season:
            url.add({"season": search_request.season})
        if search_request.query:
            url.add({"q": search_request.query})
        if search_request.maxage:
            url.add({"maxage": search_request.maxage})

        return [url.url]
Ejemplo n.º 9
0
 def get_anime_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("anime")
     if hasattr(self.settings, "animeCategory") and self.settings.animeCategory:
         self.debug("Using %s as determinted newznab anime category" % self.settings.animeCategory)
         search_request.category.category.newznabCategories = [self.settings.animeCategory]
     return self.get_search_urls(search_request)
Ejemplo n.º 10
0
    def get_moviesearch_urls(self, search_request):
        if search_request.category is None:
            search_request.category = getCategoryByAnyInput("movies")

        # A lot of indexers seem to disregard the "q" parameter for "movie" search, so if we have a query use regular search instead
        if search_request.query:
            url = self.build_base_url("search",
                                      search_request.category,
                                      offset=search_request.offset)
            url.add({"q": search_request.query})
        else:
            url = self.build_base_url("movie",
                                      search_request.category,
                                      offset=search_request.offset)
            if search_request.identifier_key:
                canBeConverted, toType, id = infos.convertIdToAny(
                    search_request.identifier_key, self.search_ids,
                    search_request.identifier_value)
                if canBeConverted:
                    search_request.identifier_key = toType.replace(
                        "tvrage",
                        "rid").replace("tvdb",
                                       "tvdbid").replace("imdb", "imdbid")
                    search_request.identifier_value = id
                else:
                    self.info("Unable to search using ID type %s" %
                              search_request.identifier_key)
                    return []

                url.add({
                    search_request.identifier_key:
                    search_request.identifier_value
                })

        return [url.url]
Ejemplo n.º 11
0
    def get_showsearch_urls(self, search_request):
        if search_request.category is None:
            search_request.category = getCategoryByAnyInput("tv")

        url = self.build_base_url("tvsearch",
                                  search_request.category,
                                  offset=search_request.offset)
        if search_request.identifier_key:
            canBeConverted, toType, id = infos.convertIdToAny(
                search_request.identifier_key, self.search_ids,
                search_request.identifier_value)
            if canBeConverted:
                search_request.identifier_key = toType.replace(
                    "tvrage", "rid").replace("tvdb", "tvdbid")
                search_request.identifier_value = id
            else:
                self.info("Unable to search using ID type %s" %
                          search_request.identifier_key)
                return []

            url.add({
                search_request.identifier_key:
                search_request.identifier_value
            })
        if search_request.episode:
            url.add({"ep": search_request.episode})
        if search_request.season:
            url.add({"season": search_request.season})
        if search_request.query:
            url.add({"q": search_request.query.replace(":", "")})
        if search_request.maxage:
            url.add({"maxage": search_request.maxage})

        return [url.url]
Ejemplo n.º 12
0
 def get_ebook_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("ebook")
     if hasattr(self.settings,
                "ebookCategory") and self.settings.ebookCategory:
         self.debug("Using %s as determined newznab ebook category" %
                    self.settings.ebookCategory)
         search_request.category.category.newznabCategories = [
             self.settings.ebookCategory
         ]
     if search_request.author or search_request.title:
         if "book" in self.searchTypes:
             # API search
             url = self.build_base_url("book",
                                       search_request.category,
                                       offset=search_request.offset)
             if search_request.author:
                 url.add({"author": search_request.author})
             if search_request.title:
                 url.add({"title": search_request.title.replace(":", "")})
             return [url.url]
         else:
             search_request.query = "%s %s" % (
                 search_request.author if search_request.author else "",
                 search_request.title.replace(":", "")
                 if search_request.title else "")
             return self.get_search_urls(search_request)
     else:
         # internal search
         return self.get_search_urls(search_request)
Ejemplo n.º 13
0
    def testGetCategoryByAnyInput(self):
        config.settings = Bunch.fromDict(config.initialConfig)
        
        cats = "2000"
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("newznab", result.type)
        self.assertEqual([2000], result.original)
        self.assertEqual("movies", result.category.name)

        cats = u"2000"
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("newznab", result.type)
        self.assertEqual([2000], result.original)
        self.assertEqual("movies", result.category.name)

        cats = ""
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("hydra", result.type)
        self.assertEqual("all", result.category.name)

        cats = []
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("newznab", result.type)
        self.assertEqual("all", result.category.name)

        cats = None
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("hydra", result.type)
        self.assertEqual("all", result.category.name)

        cats = 2000
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("newznab", result.type)
        self.assertEqual([2000], result.original)
        self.assertEqual("movies", result.category.name)

        cats = [2000, 2010]
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("newznab", result.type)
        self.assertEqual([2000, 2010], result.original)
        self.assertEqual("movies", result.category.name)

        cats = ["2000", "2010"]
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("newznab", result.type)
        self.assertEqual([2000, 2010], result.original)
        self.assertEqual("movies", result.category.name)

        cats = "movies"
        result = categories.getCategoryByAnyInput(cats)
        self.assertEqual("hydra", result.type)
        self.assertEqual("movies", result.category.name)
Ejemplo n.º 14
0
 def get_moviesearch_urls(self, search_request):
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("movies")
     if search_request.identifier_key:
         canBeConverted, toType, id = infos.convertIdToAny(search_request.identifier_key, ["imdb"], search_request.identifier_value)
         if canBeConverted:
             search_request.query = "tt%s" % id
         
     return self.get_search_urls(search_request)
Ejemplo n.º 15
0
 def get_moviesearch_urls(self, search_request):
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("movies")
     if search_request.identifier_key:
         canBeConverted, toType, id = infos.convertIdToAny(search_request.identifier_key, ["imdb"], search_request.identifier_value)
         if canBeConverted:
             search_request.query = "tt%s" % id
         
     return self.get_search_urls(search_request)
Ejemplo n.º 16
0
 def get_anime_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("anime")
     if hasattr(self.settings,
                "animeCategory") and self.settings.animeCategory:
         self.debug("Using %s as determined newznab anime category" %
                    self.settings.animeCategory)
         search_request.category.category.newznabCategories = [
             self.settings.animeCategory
         ]
     return self.get_search_urls(search_request)
Ejemplo n.º 17
0
 def get_comic_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("comic")
     if hasattr(self.settings,
                "comicCategory") and self.settings.comicCategory:
         self.debug("Using %s as determinted newznab comic category" %
                    self.settings.comicCategory)
         search_request.category.category.newznabCategories = [
             self.settings.comicCategory
         ]
     return self.get_search_urls(search_request)
Ejemplo n.º 18
0
 def get_showsearch_urls(self, search_request):
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("tv")
     #Should get most results, apparently there is no way of using "or" searches
     if search_request.query:
         query = search_request.query
     elif search_request.title:
         query = search_request.title
     else:
         query = ""
     if search_request.season:
         if search_request.episode:
             search_request.query = "{0} s{1:02d}e{2:02d}".format(query, int(search_request.season), int(search_request.episode))
         else:
             search_request.query = "{0} s{1:02d}".format(query, int(search_request.season))
         
     return self.get_search_urls(search_request)
Ejemplo n.º 19
0
 def get_ebook_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("ebook")
     if search_request.author or search_request.title:
         if "book" in self.searchTypes:
             #API search
             url = self.build_base_url("book", search_request.category, offset=search_request.offset)
             if search_request.author:
                 url.add({"author": search_request.author})
             if search_request.title:
                 url.add({"title": search_request.title})
             return [url.url]
         else:
             search_request.query = "%s %s" % (search_request.author if search_request.author else "", search_request.title if search_request.title else "")
             return self.get_search_urls(search_request)
     else:
         #internal search
         return self.get_search_urls(search_request)
Ejemplo n.º 20
0
 def get_ebook_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("ebook")
     if hasattr(self.settings, "ebookCategory") and self.settings.ebookCategory:
         self.debug("Using %s as determinted newznab ebook category" % self.settings.ebookCategory)
         search_request.category.category.newznabCategories = [self.settings.ebookCategory]
     if search_request.author or search_request.title:
         if "book" in self.searchTypes:
             # API search
             url = self.build_base_url("book", search_request.category, offset=search_request.offset)
             if search_request.author:
                 url.add({"author": search_request.author})
             if search_request.title:
                 url.add({"title": search_request.title})
             return [url.url]
         else:
             search_request.query = "%s %s" % (search_request.author if search_request.author else "", search_request.title if search_request.title else "")
             return self.get_search_urls(search_request)
     else:
         # internal search
         return self.get_search_urls(search_request)
Ejemplo n.º 21
0
 def get_showsearch_urls(self, search_request):
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("tv")
     #Should get most results, apparently there is no way of using "or" searches
     if search_request.query:
         query = search_request.query
     elif search_request.title:
         query = search_request.title
     else:
         query = ""
     if search_request.season:
         if search_request.episode:
             if self.isNumber(search_request.episode):
                 search_request.query = "{0} s{1:02d}e{2:02d}".format(query, int(search_request.season), int(search_request.episode))
             else:
                 search_request.query = '%s "%s %s"' % (search_request.query, search_request.season, search_request.episode.replace("/", " "))
                 self.debug("Assuming we're searching for a daily show. Using query: " + search_request.query)
         elif self.isNumber(search_request.season):
             search_request.query = "{0} s{1:02d}".format(query, int(search_request.season))
         
     return self.get_search_urls(search_request)
Ejemplo n.º 22
0
    def get_moviesearch_urls(self, search_request):
        if search_request.category is None:
            search_request.category = getCategoryByAnyInput("movies")
        
        #A lot of indexers seem to disregard the "q" parameter for "movie" search, so if we have a query use regular search instead 
        if search_request.query:
            url = self.build_base_url("search", search_request.category, offset=search_request.offset)
            url.add({"q": search_request.query})
        else:
            url = self.build_base_url("movie", search_request.category, offset=search_request.offset)
            if search_request.identifier_key:
                canBeConverted, toType, id = infos.convertIdToAny(search_request.identifier_key, self.search_ids, search_request.identifier_value)
                if canBeConverted:
                    search_request.identifier_key = toType.replace("tvrage", "rid").replace("tvdb", "tvdbid").replace("imdb", "imdbid")
                    search_request.identifier_value = id
                else:
                    self.info("Unable to search using ID type %s" % search_request.identifier_key)
                    return []
                
                url.add({search_request.identifier_key: search_request.identifier_value})

        return [url.url]
Ejemplo n.º 23
0
    def testProcessSearchResults(self):
        with open("mock/omgwtf_search.xml", encoding="latin-1") as f:
            body = f.read()
            result = self.omgwtf.process_query_result(body, SearchRequest())
            entries = result.entries
            self.assertEqual(3, len(entries))
            self.assertEqual('atvshow.S02E09.720p.HDTV.DD5.1.x264-NTb', entries[0].title)
            self.assertEqual("https://api.omgwtfnzbs.org/nzb/?id=x30FI&user=auser&api=apikey", entries[0].link)
            self.assertEqual(2396942366, entries[0].size)
            self.assertEqual("x30FI", entries[0].indexerguid)
            self.assertEqual(1449855118, entries[0].epoch)
            self.assertEqual("2015-12-11T17:31:58+00:00", entries[0].pubdate_utc)
            self.assertEqual(18, entries[0].age_days)
            self.assertTrue(entries[0].age_precise)
            self.assertEqual(NzbSearchResult.HAS_NFO_NO, entries[0].has_nfo)
            self.assertEqual("alt.binaries.hdtv", entries[0].group)
            self.assertEqual(getCategoryByAnyInput("tvhd").category.name, entries[0].category.name)
            self.assertEqual("https://omgwtfnzbs.org/details?id=x30FI", entries[0].details_link)
            self.assertFalse(result.has_more)
            self.assertTrue(result.total_known)
            self.assertEqual(3, result.total)

            self.assertEqual(NzbSearchResult.HAS_NFO_YES, entries[2].has_nfo)
Ejemplo n.º 24
0
    def testProcessSearchResults(self):
        with open("mock/omgwtf_search.xml", encoding="latin-1") as f:
            body = f.read()
            result = self.omgwtf.process_query_result(body, SearchRequest())
            entries = result.entries
            self.assertEqual(3, len(entries))
            self.assertEqual('atvshow.S02E09.720p.HDTV.DD5.1.x264-NTb', entries[0].title)
            self.assertEqual("https://api.omgwtfnzbs.org/nzb/?id=x30FI&user=auser&api=apikey", entries[0].link)
            self.assertEqual(2396942366, entries[0].size)
            self.assertEqual("x30FI", entries[0].indexerguid)
            self.assertEqual(1449855118, entries[0].epoch)
            self.assertEqual("2015-12-11T17:31:58+00:00", entries[0].pubdate_utc)
            self.assertEqual(18, entries[0].age_days)
            self.assertTrue(entries[0].age_precise)
            self.assertEqual(NzbSearchResult.HAS_NFO_NO, entries[0].has_nfo)
            self.assertEqual("alt.binaries.hdtv", entries[0].group)
            self.assertEqual(getCategoryByAnyInput("tvhd").category.name, entries[0].category.name)
            self.assertEqual("https://omgwtfnzbs.org/details?id=x30FI", entries[0].details_link)
            self.assertFalse(result.has_more)
            self.assertTrue(result.total_known)
            self.assertEqual(3, result.total)

            self.assertEqual(NzbSearchResult.HAS_NFO_YES, entries[2].has_nfo)
Ejemplo n.º 25
0
    def testGetMovieSearchUrls(self):
        self.args = SearchRequest(identifier_key="imdb", identifier_value="0169547")
        urls = self.omgwtf.get_moviesearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=tt0169547&catid=15,16,17,18", urls[0])

        self.args = SearchRequest(identifier_key="tmdb", identifier_value="14", category=getCategoryByAnyInput("movieshd"))
        urls = self.omgwtf.get_moviesearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=tt0169547&catid=16", urls[0])
Ejemplo n.º 26
0
 def get_comic_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("comic")
     return self.get_search_urls(search_request)
Ejemplo n.º 27
0
    def testNewznabSearchQueries(self):
        self.args = SearchRequest(query="aquery")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery&t=search",
            query)

        self.args = SearchRequest(query=None)
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search",
            query)

        self.args = SearchRequest(query="")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search",
            query)

        self.args = SearchRequest(category=getCategoryByAnyInput("audio"))
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=3000&extended=1&limit=100&offset=0&t=search",
            query)

        self.args = SearchRequest()
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(query=None)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(query="")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(category=getCategoryByAnyInput("all"))
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="8511",
                                  identifier_key="rid")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="8511",
                                  identifier_key="rid",
                                  season=1)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="8511",
                                  identifier_key="rid",
                                  season=1,
                                  episode=2)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&ep=2&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="12345678",
                                  identifier_key="imdbid")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&imdbid=12345678&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(identifier_value="12345678",
                                  identifier_key="imdbid",
                                  category=getCategoryByAnyInput("movieshd"))
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2040,2050,2060&extended=1&imdbid=12345678&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(category=getCategoryByAnyInput("movies"))
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(category=getCategoryByAnyInput("movies"),
                                  query=None)
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(category=getCategoryByAnyInput("movies"),
                                  query="")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie",
            query)

        config.settings.searching.forbiddenWords = "ignorethis"
        self.args = SearchRequest(query="aquery",
                                  forbiddenWords=["ignorethis"])
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery !ignorethis&t=search",
            query)

        config.settings.searching.forbiddenWords = "ignorethis"
        self.args = SearchRequest(query="aquery",
                                  forbiddenWords=["ignorethis"])
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery !ignorethis&t=search",
            query)
Ejemplo n.º 28
0
def search(search_request):
    if search_request.maxage is None and config.settings.searching.maxAge:
        search_request.maxage = config.settings.searching.maxAge
        logger.info("Will ignore results older than %d days" % search_request.maxage)

    # Clean up cache
    for k in list(pseudo_cache.keys()):
        if pseudo_cache[k]["last_access"].replace(minutes=+5) < arrow.utcnow():
            pseudo_cache.pop(k)

    # Clean up old search results. We do this here because we don't have any background jobs and this is the function most regularly called
    keepFor = config.settings.main.keepSearchResultsForDays
    oldSearchResultsCount = SearchResult.select().where(SearchResult.firstFound < (datetime.date.today() - datetime.timedelta(days=keepFor))).count()
    if oldSearchResultsCount > 0:
        logger.info("Deleting %d search results from database that are older than %d days" % (oldSearchResultsCount, keepFor))
        SearchResult.delete().where(SearchResult.firstFound < (datetime.date.today() - datetime.timedelta(days=keepFor))).execute()
    else:
        if logger.getEffectiveLevel() == logging.DEBUG:
            logger.debug("%d search results stored in database" % SearchResult.select().count())

    limit = search_request.limit
    external_offset = int(search_request.offset)
    search_hash = search_request.search_hash
    categoryResult = categories.getCategoryByAnyInput(search_request.category)
    search_request.category = categoryResult
    if search_hash not in pseudo_cache.keys() or search_request.offset == 0:  # If it's a new search (which starts with offset 0) do it again instead of using the cached results
        logger.debug("Didn't find this query in cache or want to do a new search")
        cache_entry = {"results": [], "indexer_infos": {}, "total": 0, "last_access": arrow.utcnow(), "offset": 0}
        category = categoryResult.category
        indexers_to_call = pick_indexers(search_request)
        for p in indexers_to_call:
            cache_entry["indexer_infos"][p] = {"has_more": True, "search_request": search_request, "total_included": False}
        
        dbsearch = Search(internal=search_request.internal, query=search_request.query, category=categoryResult.category.pretty, identifier_key=search_request.identifier_key, identifier_value=search_request.identifier_value, season=search_request.season, episode=search_request.episode, type=search_request.type,
                          username=search_request.username)
        # dbsearch.save()
        cache_entry["dbsearch"] = dbsearch

        # Find ignored words and parse query for ignored words
        search_request.forbiddenWords = []
        search_request.requiredWords = []
        applyRestrictionsGlobal = config.settings.searching.applyRestrictions == "both" or (config.settings.searching.applyRestrictions == "internal" and search_request.internal) or (config.settings.searching.applyRestrictions == "external" and not search_request.internal)
        applyRestrictionsCategory = category.applyRestrictions == "both" or (category.applyRestrictions == "internal" and search_request.internal) or (search_request.category.category.applyRestrictions == "external" and not search_request.internal)
        if config.settings.searching.forbiddenWords and applyRestrictionsGlobal:
            logger.debug("Using configured global forbidden words: %s" % config.settings.searching.forbiddenWords)
            search_request.forbiddenWords.extend([x.lower().strip() for x in list(filter(bool, config.settings.searching.forbiddenWords.split(",")))])
        if config.settings.searching.requiredWords and applyRestrictionsGlobal:
            logger.debug("Using configured global required words: %s" % config.settings.searching.requiredWords)
            search_request.requiredWords.extend([x.lower().strip() for x in list(filter(bool, config.settings.searching.requiredWords.split(",")))])
        
        if category.forbiddenWords and applyRestrictionsCategory:
            logger.debug("Using configured forbidden words for category %s: %s" % (category.pretty, category.forbiddenWords))
            search_request.forbiddenWords.extend([x.lower().strip() for x in list(filter(bool, category.forbiddenWords.split(",")))])
        if category.requiredWords and applyRestrictionsCategory:
            logger.debug("Using configured required words for category %s: %s" % (category.pretty, category.requiredWords))
            search_request.requiredWords.extend([x.lower().strip() for x in list(filter(bool, category.requiredWords.split(",")))])
        
        
        if search_request.query:
            forbiddenWords = [str(x[1]) for x in re.findall(r"[\s|\b](\-\-|!)(?P<term>\w+)", search_request.query)]
            if len(forbiddenWords) > 0:
                logger.debug("Query before removing NOT terms: %s" % search_request.query)
                search_request.query = re.sub(r"[\s|\b](\-\-|!)(?P<term>\w+)", "", search_request.query)
                logger.debug("Query after removing NOT terms: %s" % search_request.query)
                logger.debug("Found NOT terms: %s" % ",".join(forbiddenWords))

                search_request.forbiddenWords.extend(forbiddenWords)

        pseudo_cache[search_hash] = cache_entry
    else:
        cache_entry = pseudo_cache[search_hash]
        indexers_to_call = [indexer for indexer, info in cache_entry["indexer_infos"].items() if info["has_more"]]
        dbsearch = cache_entry["dbsearch"]
        logger.debug("Found search in cache")

        logger.debug("Will search at indexers as long as we don't have enough results for the current offset+limit and any indexer has more results.")
    while len(cache_entry["results"]) < external_offset + limit and len(indexers_to_call) > 0:
        logger.debug("We want %d results but have only %d so far" % ((external_offset + limit), len(cache_entry["results"])))
        logger.debug("%d indexers still have results" % len(indexers_to_call))
        search_request.offset = cache_entry["offset"]
        
        logger.debug("Searching indexers with offset %d" % search_request.offset)
        result = search_and_handle_db(dbsearch, {x: search_request for x in indexers_to_call})
        logger.debug("All search calls to indexers completed")
        search_results = []
        indexers_to_call = []

        for indexer, queries_execution_result in result["results"].items():
            #Drastically improves db access time but means that if one database write fails all fail. That's a risk we need to take 
            with db.atomic():
                logger.debug("%s returned %d results. Writing them to database..." % (indexer, len(queries_execution_result.results)))
                for result in queries_execution_result.results:
                    if result.title is None or result.link is None or result.indexerguid is None:
                        logger.info("Skipping result with missing data: %s" % result)
                        continue
                    searchResult, _ = SearchResult().create_or_get(indexer=indexer.indexer, guid=result.indexerguid, title= result.title, link= result.link, details= result.details_link)
                    result.searchResultId = searchResult.id
                    search_results.append(result)
                logger.debug("Written results results to database")

                cache_entry["indexer_infos"][indexer].update(
                    {"did_search": queries_execution_result.didsearch, "indexer": indexer.name, "search_request": search_request, "has_more": queries_execution_result.has_more, "total": queries_execution_result.total, "total_known": queries_execution_result.total_known,
                     "indexer_search": queries_execution_result.indexerSearchEntry})
                if queries_execution_result.has_more:
                    indexers_to_call.append(indexer)
                    logger.debug("%s still has more results so we could use it the next round" % indexer)
        
                if queries_execution_result.total_known:
                    if not cache_entry["indexer_infos"][indexer]["total_included"]:
                        cache_entry["total"] += queries_execution_result.total
                        logger.debug("%s reports %d total results. We'll include in the total this time only" % (indexer, queries_execution_result.total))
                        cache_entry["indexer_infos"][indexer]["total_included"] = True
                elif queries_execution_result.has_more:
                    logger.debug("%s doesn't report an exact number of results so let's just add another 100 to the total" % indexer)
                    cache_entry["total"] += 100

        if search_request.internal or config.settings.searching.removeDuplicatesExternal:
            logger.debug("Searching for duplicates")
            countBefore = len(search_results)
            grouped_by_sameness = find_duplicates(search_results)
            allresults = []
            for group in grouped_by_sameness:
                if search_request.internal:
                    for i in group:
                        # We give each group of results a unique value by which they can be identified later
                        i.hash = hash(group[0].details_link)
                        allresults.append(i)

                else:
                    # We sort by age first and then by indexerscore so the newest result with the highest indexer score is chosen
                    group = sorted(group, key=lambda x: x.epoch, reverse=True)
                    group = sorted(group, key=lambda x: x.indexerscore, reverse=True)
                    allresults.append(group[0])
            search_results = allresults
            if not search_request.internal:
                countAfter = len(search_results)
                countRemoved = countBefore - countAfter
                logger.info("Removed %d duplicates from %d results" % (countRemoved, countBefore))
        search_results = sorted(search_results, key=lambda x: x.epoch, reverse=True)

        cache_entry["results"].extend(search_results)
        cache_entry["offset"] += limit

    if search_request.internal:
        logger.debug("We have %d cached results and return them all because we search internally" % len(cache_entry["results"]))
        nzb_search_results = copy.deepcopy(cache_entry["results"][external_offset:])
    else:
        logger.debug("We have %d cached results and return %d-%d of %d total available accounting for the limit set for the API search" % (len(cache_entry["results"]), external_offset, external_offset + limit, cache_entry["total"]))
        nzb_search_results = copy.deepcopy(cache_entry["results"][external_offset:(external_offset + limit)])
    cache_entry["last_access"] = arrow.utcnow()
    logger.info("Returning %d results" % len(nzb_search_results))
    return {"results": nzb_search_results, "indexer_infos": cache_entry["indexer_infos"], "dbsearchid": cache_entry["dbsearch"].id, "total": cache_entry["total"], "offset": external_offset}
Ejemplo n.º 29
0
def search(search_request):
    logger.info("Starting new search: %s" % search_request)
    if search_request.maxage is None and config.settings.searching.maxAge:
        search_request.maxage = config.settings.searching.maxAge
        logger.info("Will ignore results older than %d days" % search_request.maxage)

    # Clean up cache
    for k in list(pseudo_cache.keys()):
        if pseudo_cache[k]["last_access"].replace(minutes=+5) < arrow.utcnow():
            pseudo_cache.pop(k)

    # Clean up old search results. We do this here because we don't have any background jobs and this is the function most regularly called
    keepFor = config.settings.main.keepSearchResultsForDays
    oldSearchResultsCount = countOldSearchResults(keepFor)
    if oldSearchResultsCount > 0:
        logger.info("Deleting %d search results from database that are older than %d days" % (oldSearchResultsCount, keepFor))
        SearchResult.delete().where(SearchResult.firstFound < (datetime.date.today() - datetime.timedelta(days=keepFor))).execute()
    else:
        if logger.getEffectiveLevel() == logging.DEBUG:
            logger.debug("%d search results stored in database" % SearchResult.select().count())

    limit = search_request.limit
    external_offset = int(search_request.offset)
    search_hash = search_request.search_hash
    categoryResult = categories.getCategoryByAnyInput(search_request.category)
    search_request.category = categoryResult
    if search_hash not in pseudo_cache.keys() or search_request.offset == 0:  # If it's a new search (which starts with offset 0) do it again instead of using the cached results
        logger.debug("Didn't find this query in cache or want to do a new search")
        cache_entry = {"results": [], "indexer_infos": {}, "total": 0, "last_access": arrow.utcnow(), "offset": 0, "rejected": SearchModule.getRejectedCountDict()}
        category = categoryResult.category
        indexers_to_call = pick_indexers(search_request)
        for p in indexers_to_call:
            cache_entry["indexer_infos"][p] = {"has_more": True, "search_request": search_request, "total_included": False}

        dbsearch = Search(internal=search_request.internal, query=search_request.query, category=categoryResult.category.pretty, identifier_key=search_request.identifier_key, identifier_value=search_request.identifier_value, season=search_request.season, episode=search_request.episode,
                          type=search_request.type, title=search_request.title, author=search_request.author, username=search_request.username)
        saveSearch(dbsearch)
        # dbsearch.save()
        cache_entry["dbsearch"] = dbsearch

        # Find ignored words and parse query for ignored words
        search_request.forbiddenWords = []
        search_request.requiredWords = []
        applyRestrictionsGlobal = config.settings.searching.applyRestrictions == "both" or (config.settings.searching.applyRestrictions == "internal" and search_request.internal) or (config.settings.searching.applyRestrictions == "external" and not search_request.internal)
        applyRestrictionsCategory = category.applyRestrictions == "both" or (category.applyRestrictions == "internal" and search_request.internal) or (search_request.category.category.applyRestrictions == "external" and not search_request.internal)
        if config.settings.searching.forbiddenWords and applyRestrictionsGlobal:
            logger.debug("Using configured global forbidden words: %s" % config.settings.searching.forbiddenWords)
            search_request.forbiddenWords.extend([x.lower().strip() for x in list(filter(bool, config.settings.searching.forbiddenWords.split(",")))])
        if config.settings.searching.requiredWords and applyRestrictionsGlobal:
            logger.debug("Using configured global required words: %s" % config.settings.searching.requiredWords)
            search_request.requiredWords.extend([x.lower().strip() for x in list(filter(bool, config.settings.searching.requiredWords.split(",")))])

        if category.forbiddenWords and applyRestrictionsCategory:
            logger.debug("Using configured forbidden words for category %s: %s" % (category.pretty, category.forbiddenWords))
            search_request.forbiddenWords.extend([x.lower().strip() for x in list(filter(bool, category.forbiddenWords.split(",")))])
        if category.requiredWords and applyRestrictionsCategory:
            logger.debug("Using configured required words for category %s: %s" % (category.pretty, category.requiredWords))
            search_request.requiredWords.extend([x.lower().strip() for x in list(filter(bool, category.requiredWords.split(",")))])

        if search_request.query:
            forbiddenWords = [str(x[1]) for x in re.findall(r"[\s|\b](\-\-|!)(?P<term>\w+)", search_request.query)]
            if len(forbiddenWords) > 0:
                logger.debug("Query before removing NOT terms: %s" % search_request.query)
                search_request.query = re.sub(r"[\s|\b](\-\-|!)(?P<term>\w+)", "", search_request.query)
                logger.debug("Query after removing NOT terms: %s" % search_request.query)
                logger.debug("Found NOT terms: %s" % ",".join(forbiddenWords))

                search_request.forbiddenWords.extend(forbiddenWords)
        cache_entry["forbiddenWords"] = search_request.forbiddenWords
        cache_entry["requiredWords"] = search_request.requiredWords
        cache_entry["query"] = search_request.query

        pseudo_cache[search_hash] = cache_entry
    else:
        cache_entry = pseudo_cache[search_hash]
        indexers_to_call = [indexer for indexer, info in cache_entry["indexer_infos"].items() if info["has_more"]]
        dbsearch = cache_entry["dbsearch"]
        search_request.forbiddenWords = cache_entry["forbiddenWords"]
        search_request.requiredWords = cache_entry["requiredWords"]
        search_request.query = cache_entry["query"]
        logger.debug("Found search in cache")

        logger.debug("Will search at indexers as long as we don't have enough results for the current offset+limit and any indexer has more results.")
    if search_request.loadAll:
        logger.debug("Requested to load all results. Will continue to search until all indexers are exhausted")
    while (len(cache_entry["results"]) < external_offset + limit or search_request.loadAll) and len(indexers_to_call) > 0:
        if len(cache_entry["results"]) < external_offset + limit:
            logger.debug("We want %d results but have only %d so far" % ((external_offset + limit), len(cache_entry["results"])))
        elif search_request.loadAll:
            logger.debug("All results requested. Continuing to search.")
        logger.debug("%d indexers still have results" % len(indexers_to_call))
        search_request.offset = cache_entry["offset"]

        logger.debug("Searching indexers with offset %d" % search_request.offset)
        result = search_and_handle_db(dbsearch, {x: search_request for x in indexers_to_call})
        logger.debug("All search calls to indexers completed")
        search_results = []
        indexers_to_call = []

        waslocked = False
        before = arrow.now()
        if databaseLock.locked():
            logger.debug("Database accesses locked by other search. Will wait for our turn.")
            waslocked = True
        databaseLock.acquire()
        if waslocked:
            after = arrow.now()
            took = (after - before).seconds * 1000 + (after - before).microseconds / 1000
            logger.debug("Waited %dms for database lock" % took)
        for indexer, queries_execution_result in result["results"].items():
            with db.atomic():
                logger.info("%s returned %d results" % (indexer, len(queries_execution_result.results)))
                for result in queries_execution_result.results:
                    if result.title is None or result.link is None or result.indexerguid is None:
                        logger.info("Skipping result with missing data: %s" % result)
                        continue
                    try:
                        searchResultId = hashlib.sha1(str(indexer.indexer.id) + result.indexerguid).hexdigest()
                        tryGetOrCreateSearchResultDbEntry(searchResultId, indexer.indexer.id, result)
                        result.searchResultId = searchResultId
                        search_results.append(result)
                    except (IntegrityError, OperationalError) as e:
                        logger.error("Error while trying to save search result to database. Skipping it. Error: %s" % e)

            cache_entry["indexer_infos"][indexer].update(
                {"did_search": queries_execution_result.didsearch, "indexer": indexer.name, "search_request": search_request, "has_more": queries_execution_result.has_more, "total": queries_execution_result.total, "total_known": queries_execution_result.total_known,
                 "indexer_search": queries_execution_result.indexerSearchEntry, "rejected": queries_execution_result.rejected, "processed_results": queries_execution_result.loaded_results})
            if queries_execution_result.has_more:
                indexers_to_call.append(indexer)
                logger.debug("%s still has more results so we could use it the next round" % indexer)

            if queries_execution_result.total_known:
                if not cache_entry["indexer_infos"][indexer]["total_included"]:
                    cache_entry["total"] += queries_execution_result.total
                    logger.debug("%s reports %d total results. We'll include in the total this time only" % (indexer, queries_execution_result.total))
                    cache_entry["indexer_infos"][indexer]["total_included"] = True
            elif queries_execution_result.has_more:
                logger.debug("%s doesn't report an exact number of results so let's just add another 100 to the total" % indexer)
                cache_entry["total"] += 100
            for rejectKey in cache_entry["rejected"].keys():
                if rejectKey in cache_entry["indexer_infos"][indexer]["rejected"].keys():
                    cache_entry["rejected"][rejectKey] += cache_entry["indexer_infos"][indexer]["rejected"][rejectKey]

        databaseLock.release()

        logger.debug("Searching for duplicates")
        numberResultsBeforeDuplicateRemoval = len(search_results)
        grouped_by_sameness, uniqueResultsPerIndexer = find_duplicates(search_results)
        allresults = []
        for group in grouped_by_sameness:
            if search_request.internal:
                for i in group:
                    # We give each group of results a unique value by which they can be identified later
                    i.hash = hash(group[0].details_link)
                    allresults.append(i)

            else:
                # We sort by age first and then by indexerscore so the newest result with the highest indexer score is chosen
                group = sorted(group, key=lambda x: x.epoch, reverse=True)
                group = sorted(group, key=lambda x: x.indexerscore, reverse=True)
                allresults.append(group[0])
        search_results = allresults

        with databaseLock:
            for indexer, infos in cache_entry["indexer_infos"].iteritems():
                if indexer.name in uniqueResultsPerIndexer.keys():  # If the search failed it isn't contained in the duplicates list
                    uniqueResultsCount = uniqueResultsPerIndexer[infos["indexer"]]
                    processedResults = infos["processed_results"]
                    logger.debug("Indexer %s had a unique results share of %d%% (%d of %d total results were only provided by this indexer)" % (indexer.name, 100 / (numberResultsBeforeDuplicateRemoval / uniqueResultsCount), uniqueResultsCount, numberResultsBeforeDuplicateRemoval))
                    infos["indexer_search"].uniqueResults = uniqueResultsCount
                    infos["indexer_search"].processedResults = processedResults
                    infos["indexer_search"].save()

        if not search_request.internal:
            countAfter = len(search_results)
            countRemoved = numberResultsBeforeDuplicateRemoval - countAfter
            logger.info("Removed %d duplicates from %d results" % (countRemoved, numberResultsBeforeDuplicateRemoval))

        search_results = sorted(search_results, key=lambda x: x.epoch, reverse=True)

        cache_entry["results"].extend(search_results)
        cache_entry["offset"] += limit

    if len(indexers_to_call) == 0:
        logger.info("All indexers exhausted")
    elif len(cache_entry["results"]) >= external_offset + limit:
        logger.debug("Loaded a total of %d results which is enough for the %d requested. Stopping search." % (len(cache_entry["results"]), (external_offset + limit)))

    if search_request.internal:
        logger.debug("We have %d cached results and return them all because we search internally" % len(cache_entry["results"]))
        nzb_search_results = copy.deepcopy(cache_entry["results"][external_offset:])
    else:
        logger.debug("We have %d cached results and return %d-%d of %d total available accounting for the limit set for the API search" % (len(cache_entry["results"]), external_offset, external_offset + limit, cache_entry["total"]))
        nzb_search_results = copy.deepcopy(cache_entry["results"][external_offset:(external_offset + limit)])
    cache_entry["last_access"] = arrow.utcnow()
    for k, v in cache_entry["rejected"].items():
        if v > 0:
            logger.info("Rejected %d because: %s" % (v, k))
    logger.info("Returning %d results" % len(nzb_search_results))
    return {"results": nzb_search_results, "indexer_infos": cache_entry["indexer_infos"], "dbsearchid": cache_entry["dbsearch"].id, "total": cache_entry["total"], "offset": external_offset, "rejected": cache_entry["rejected"].items()}
Ejemplo n.º 30
0
 def get_ebook_urls(self, search_request):
     if not search_request.query and (search_request.author or search_request.title):
         search_request.query = "%s %s" % (search_request.author if search_request.author else "", search_request.title if search_request.title else "")
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("ebook")
     return self.get_search_urls(search_request)
Ejemplo n.º 31
0
 def get_audiobook_urls(self, search_request):
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("audiobook")
     return self.get_search_urls(search_request)
Ejemplo n.º 32
0
def search(search_request):
    # type: (bool, nzbhydra.search.SearchRequest) -> Dict[unicode, future.types.newint.newint]
    if search_request.maxage is None and config.settings.searching.maxAge:
        search_request.maxage = config.settings.searching.maxAge
        logger.info("Will ignore results older than %d days" % search_request.maxage)

    # Clean up cache
    for k in list(pseudo_cache.keys()):
        if pseudo_cache[k]["last_access"].replace(minutes=+5) < arrow.utcnow():
            pseudo_cache.pop(k)

    # Clean up old search results. We do this here because we don't have any background jobs and this is the function most regularly called
    keepFor = config.settings.main.keepSearchResultsForDays
    oldSearchResultsCount = SearchResult.select().where(SearchResult.firstFound < (datetime.date.today() - datetime.timedelta(days=keepFor))).count()
    if oldSearchResultsCount > 0:
        logger.info("Deleting %d search results from database that are older than %d days" % (oldSearchResultsCount, keepFor))
        SearchResult.delete().where(SearchResult.firstFound < (datetime.date.today() - datetime.timedelta(days=keepFor))).execute()

    limit = search_request.limit
    external_offset = int(search_request.offset)
    search_hash = search_request.search_hash
    if search_hash not in pseudo_cache.keys() or search_request.offset == 0:  # If it's a new search (which starts with offset 0) do it again instead of using the cached results
        logger.debug("Didn't find this query in cache or want to do a new search")
        cache_entry = {"results": [], "indexer_infos": {}, "total": 0, "last_access": arrow.utcnow(), "offset": 0}
        indexers_to_call = pick_indexers(search_request)
        for p in indexers_to_call:
            cache_entry["indexer_infos"][p] = {"has_more": True, "search_request": search_request, "total_included": False}
        categoryResult = categories.getCategoryByAnyInput(search_request.category)
        search_request.category = categoryResult
        category = categoryResult.category
        dbsearch = Search(internal=search_request.internal, query=search_request.query, category=categoryResult.category.pretty, identifier_key=search_request.identifier_key, identifier_value=search_request.identifier_value, season=search_request.season, episode=search_request.episode, type=search_request.type,
                          username=search_request.username)
        # dbsearch.save()
        cache_entry["dbsearch"] = dbsearch

        # Find ignored words and pParse query for ignored words
        search_request.forbiddenWords = []
        search_request.requiredWords = []
        if config.settings.searching.forbiddenWords:
            logger.debug("Using configured global forbidden words: %s" % config.settings.searching.forbiddenWords)
            search_request.forbiddenWords.extend([x.lower().strip() for x in list(filter(bool, config.settings.searching.forbiddenWords.split(",")))])
        if config.settings.searching.requiredWords:
            logger.debug("Using configured global required words: %s" % config.settings.searching.requiredWords)
            search_request.requiredWords.extend([x.lower().strip() for x in list(filter(bool, config.settings.searching.requiredWords.split(",")))])
        if category.applyRestrictions == "both" or (category.applyRestrictions == "internal" and search_request.internal) or (category.applyRestrictions == "external" and not search_request.internal):
            if category.forbiddenWords:
                logger.debug("Using configured forbidden words for category %s: %s" % (category.pretty, category.forbiddenWords))
                search_request.forbiddenWords.extend([x.lower().strip() for x in list(filter(bool, category.forbiddenWords.split(",")))])
            if category.requiredWords:
                logger.debug("Using configured required words for category %s: %s" % (category.pretty, category.requiredWords))
                search_request.requiredWords.extend([x.lower().strip() for x in list(filter(bool, category.requiredWords.split(",")))])
        
        
        if search_request.query:
            forbiddenWords = [str(x[1]) for x in re.findall(r"[\s|\b](\-\-|!)(?P<term>\w+)", search_request.query)]
            if len(forbiddenWords) > 0:
                logger.debug("Query before removing NOT terms: %s" % search_request.query)
                search_request.query = re.sub(r"[\s|\b](\-\-|!)(?P<term>\w+)", "", search_request.query)
                logger.debug("Query after removing NOT terms: %s" % search_request.query)
                logger.debug("Found NOT terms: %s" % ",".join(forbiddenWords))

                search_request.forbiddenWords.extend(forbiddenWords)

        pseudo_cache[search_hash] = cache_entry
    else:
        cache_entry = pseudo_cache[search_hash]
        indexers_to_call = [indexer for indexer, info in cache_entry["indexer_infos"].items() if info["has_more"]]
        dbsearch = cache_entry["dbsearch"]
        logger.debug("Found search in cache")

        logger.debug("Will search at indexers as long as we don't have enough results for the current offset+limit and any indexer has more results.")
    while len(cache_entry["results"]) < external_offset + limit and len(indexers_to_call) > 0:
        logger.debug("We want %d results but have only %d so far" % ((external_offset + limit), len(cache_entry["results"])))
        logger.debug("%d indexers still have results" % len(indexers_to_call))
        search_request.offset = cache_entry["offset"]
        
        logger.debug("Searching indexers with offset %d" % search_request.offset)
        result = search_and_handle_db(dbsearch, {x: search_request for x in indexers_to_call})
        
        search_results = []
        indexers_to_call = []

        for indexer, queries_execution_result in result["results"].items():
            with db.atomic():
                for result in queries_execution_result.results:
                    if result.title is None or result.link is None or result.indexerguid is None:
                        logger.info("Skipping result with missing data: %s" % result)
                        continue
                    searchResult = SearchResult().get_or_create(indexer=indexer.indexer, title=result.title, link=result.link, details=result.details_link, guid=result.indexerguid)
                    searchResult = searchResult[0]  # Second is a boolean determining if the search result was created
                    result.searchResultId = searchResult.id
                    search_results.append(result)

            logger.debug("%s returned %d results" % (indexer, len(queries_execution_result.results)))
            cache_entry["indexer_infos"][indexer].update(
                {"did_search": queries_execution_result.didsearch, "indexer": indexer.name, "search_request": search_request, "has_more": queries_execution_result.has_more, "total": queries_execution_result.total, "total_known": queries_execution_result.total_known,
                 "indexer_search": queries_execution_result.indexerSearchEntry})
            if queries_execution_result.has_more:
                indexers_to_call.append(indexer)
                logger.debug("%s still has more results so we could use it the next round" % indexer)

            if queries_execution_result.total_known:
                if not cache_entry["indexer_infos"][indexer]["total_included"]:
                    cache_entry["total"] += queries_execution_result.total
                    logger.debug("%s reports %d total results. We'll include in the total this time only" % (indexer, queries_execution_result.total))
                    cache_entry["indexer_infos"][indexer]["total_included"] = True
            elif queries_execution_result.has_more:
                logger.debug("%s doesn't report an exact number of results so let's just add another 100 to the total" % indexer)
                cache_entry["total"] += 100

        if search_request.internal or config.settings.searching.removeDuplicatesExternal:
            countBefore = len(search_results)
            grouped_by_sameness = find_duplicates(search_results)
            allresults = []
            for group in grouped_by_sameness:
                if search_request.internal:
                    for i in group:
                        # We give each group of results a unique value by which they can be identified later
                        i.hash = hash(group[0].guid)
                        allresults.append(i)

                else:
                    # We sort by age first and then by indexerscore so the newest result with the highest indexer score is chosen
                    group = sorted(group, key=lambda x: x.epoch, reverse=True)
                    group = sorted(group, key=lambda x: x.indexerscore, reverse=True)
                    allresults.append(group[0])
            search_results = allresults
            if not search_request.internal:
                countAfter = len(search_results)
                countRemoved = countBefore - countAfter
                logger.info("Removed %d duplicates from %d results" % (countRemoved, countBefore))
        search_results = sorted(search_results, key=lambda x: x.epoch, reverse=True)

        cache_entry["results"].extend(search_results)
        cache_entry["offset"] += limit

    if search_request.internal:
        logger.debug("We have %d cached results and return them all because we search internally" % len(cache_entry["results"]))
        nzb_search_results = copy.deepcopy(cache_entry["results"][external_offset:])
    else:
        logger.debug("We have %d cached results and return %d-%d of %d total available accounting for the limit set for the API search" % (len(cache_entry["results"]), external_offset, external_offset + limit, cache_entry["total"]))
        nzb_search_results = copy.deepcopy(cache_entry["results"][external_offset:(external_offset + limit)])
    cache_entry["last_access"] = arrow.utcnow()

    return {"results": nzb_search_results, "indexer_infos": cache_entry["indexer_infos"], "dbsearchid": cache_entry["dbsearch"].id, "total": cache_entry["total"], "offset": external_offset}
Ejemplo n.º 33
0
 def get_comic_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("comic")
     logger.info("Searching for comics in ebook category")
     return self.get_search_urls(search_request)
Ejemplo n.º 34
0
    def testNewznabSearchQueries(self):
        self.args = SearchRequest(query="aquery")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery&t=search", query
        )

        self.args = SearchRequest(query=None)
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search", query
        )

        self.args = SearchRequest(query="")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search", query
        )

        self.args = SearchRequest(category=getCategoryByAnyInput("audio"))
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=3000&extended=1&limit=100&offset=0&t=search", query
        )

        self.args = SearchRequest()
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch", query
        )

        self.args = SearchRequest(query=None)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch", query
        )

        self.args = SearchRequest(query="")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch", query
        )

        self.args = SearchRequest(category=getCategoryByAnyInput("all"))
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=tvsearch", query
        )

        self.args = SearchRequest(identifier_value="8511", identifier_key="rid")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&t=tvsearch",
            query,
        )

        self.args = SearchRequest(identifier_value="8511", identifier_key="rid", season=1)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch",
            query,
        )

        self.args = SearchRequest(identifier_value="8511", identifier_key="rid", season=1, episode=2)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&ep=2&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch",
            query,
        )

        self.args = SearchRequest(identifier_value="12345678", identifier_key="imdbid")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&imdbid=12345678&limit=100&offset=0&t=movie",
            query,
        )

        self.args = SearchRequest(
            identifier_value="12345678", identifier_key="imdbid", category=getCategoryByAnyInput("movieshd")
        )
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2040,2050,2060&extended=1&imdbid=12345678&limit=100&offset=0&t=movie",
            query,
        )

        self.args = SearchRequest(category=getCategoryByAnyInput("movies"))
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie", query
        )

        self.args = SearchRequest(category=getCategoryByAnyInput("movies"), query=None)
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie", query
        )

        self.args = SearchRequest(category=getCategoryByAnyInput("movies"), query="")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie", query
        )

        config.settings.searching.forbiddenWords = "ignorethis"
        self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"])
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery !ignorethis&t=search",
            query,
        )

        config.settings.searching.forbiddenWords = "ignorethis"
        self.args = SearchRequest(query="aquery", forbiddenWords=["ignorethis"])
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery !ignorethis&t=search",
            query,
        )
Ejemplo n.º 35
0
    def testGetMovieSearchUrls(self):
        self.args = SearchRequest(identifier_key="imdb", identifier_value="0169547")
        urls = self.omgwtf.get_moviesearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=tt0169547&catid=15,16,17,18", urls[0])

        self.args = SearchRequest(identifier_key="tmdb", identifier_value="14", category=getCategoryByAnyInput("movieshd"))
        urls = self.omgwtf.get_moviesearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertEqual("https://api.omgwtfnzbs.org/xml/?api=apikey&user=anuser&search=tt0169547&catid=16", urls[0])
Ejemplo n.º 36
0
 def get_ebook_urls(self, search_request):
     if not search_request.query and (search_request.author or search_request.title):
         search_request.query = "%s %s" % (search_request.author if search_request.author else "", search_request.title if search_request.title else "")
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("ebook")
     return self.get_search_urls(search_request)
Ejemplo n.º 37
0
 def get_audiobook_urls(self, search_request):
     if search_request.category is None:
         search_request.category = getCategoryByAnyInput("audiobook")
     return self.get_search_urls(search_request)
Ejemplo n.º 38
0
 def get_comic_urls(self, search_request):
     if not search_request.category:
         search_request.category = getCategoryByAnyInput("comic")
     logger.info("Searching for comics in ebook category")
     return self.get_search_urls(search_request)