async def search_image(self, url): if self.fail: raise Exception("Task failed (intentional)") else: return ImageResult(["test"], ["test"], "safe")
async def index_parser(self, json): """Parse the output from a succesful saucenao search to retrieve data from specific indexes. :param json: JSON output from the API. :type json: dict :return: Dictionary containing data that matches the output for :meth:`SauceNao.search_image_source` :rtype: dict """ base_similarity = json["header"][ "minimum_similarity"] # Grab the minimum similarity saucenao advises, going lower is generally gonna give false positives. # Below we cast the _entry_ similarity to a float since somehow it's stored as an str. # Damn API inaccuracy source_results = [ entry for entry in json["results"] if entry["header"]["index_id"] in self.source_indexes and float(entry["header"]["similarity"]) > base_similarity ] source = None source_priority = len( self.source_indexes ) # No result priority is 1 above the least wanted result for entry in source_results: list_index = self.source_indexes.index(entry["header"]["index_id"]) if list_index < source_priority: # If our exsting result priority is lower than the current result... source = entry["data"]["ext_urls"][ 0] # We update the source with that result source_priority = list_index # And we update the priority # See my comment above source_results as to what I'm doing here. tag_results_list = [ entry for entry in json["results"] if entry["header"]["index_id"] in self.tag_indexes and float(entry["header"]["similarity"]) > base_similarity ] # Unlike the source, these require specific lookups based on their ID. As a result, I'll rearrange the results to a dict. tag_results = {} for entry in tag_results_list: tag_results[entry["header"]["index_id"]] = entry # Kinda looks stupid, but whatever. loop = asyncio.get_event_loop() tags = [] for index_id, entry in tag_results.items(): if index_id == 9: # Danbooru r = await loop.run_in_executor( None, functools.partial( fuck_aiohttp.get, "https://danbooru.donmai.us/posts/" + str(entry["data"]["danbooru_id"]) + ".json")) j = r.json() tags += j["tag_string"].split() if index_id == 12: # Yande.re # pragma: no cover r = await loop.run_in_executor( None, functools.partial(fuck_aiohttp.get, "https://yande.re/post.json", params={ "tags": "id:" + str(entry["data"]["yandere_id"]) })) j = r.json() tags += j[0]["tags"].split() if index_id == 26: # Konachan # pragma: no cover r = await loop.run_in_executor( None, functools.partial(fuck_aiohttp.get, "http://konachan.com/post.json", params={ "tags": "id:" + str(entry["data"]["konachan_id"]) })) j = r.json() tags += j[0]["tags"].split() return ImageResult(tags, source, None)