def similar_and_recommended(self, tmdb_id, content_type): """Merges the results of similar and recommended. Args: id: An Integer or String containing the TMDB ID. content_type: String containing "movie" or "tv". """ try: thread_list = [] # Get recommended page one recommend_page_one = self.__recommended(tmdb_id, content_type, 1) # Gather additional recommended pages if recommend_page_one["total_pages"] > 1: for page_number in range(2, recommend_page_one["total_pages"]): if page_number <= MAX_RECOMMENDED_PAGES: thread = ReturnThread( target=self.__recommended, args=[tmdb_id, content_type, page_number], ) thread.start() thread_list.append(thread) # Get similar page one similar_page_one = self.__similar(tmdb_id, content_type, 1) # Gather up additional similar pages if similar_page_one["total_pages"] > 1: for page_number in range(2, similar_page_one["total_pages"]): if page_number <= MAX_SIMILAR_PAGES: thread = ReturnThread( target=self.__similar, args=[tmdb_id, content_type, page_number], ) thread.start() thread_list.append(thread) # Merge results of the first page of similar and recommended merged_results = self.__merge_results(recommend_page_one, similar_page_one) # Wait for all the threads to complete and merge them in for thread in thread_list: merged_results = self.__merge_results(merged_results, thread.join()) self.determine_id_validity(merged_results) # Shuffle and return return self.__shuffle_results(merged_results) except: log.handler( "Failed to obtain merged Similar and Recommended!", log.ERROR, self.__logger, ) return {}
def __multi_page_fetch(self, cache_name, function, page_number, content_type, page_multiplier): # Obtain multiple pages of TMDB queries total_pages = page_number * page_multiplier thread_list = [] for subtractor in range(0, page_multiplier): thread = ReturnThread( target=cache.handler, args=[ cache_name, function, total_pages - subtractor, False, POPULAR_AND_TOP_CACHE_TIMEOUT, ], kwargs={ "page": total_pages - subtractor, "language": LANGUAGE }, ) thread.start() thread_list.append(thread) # Merge together these pages merged_results = thread_list[0].join() thread_list.remove(thread_list[0]) for thread in thread_list: merged_results = self.__merge_results(merged_results, thread.join()) # Set to valid ID to True, since tmdb is a valid source for Radarr. if content_type == "movie": for result in merged_results["results"]: result["conreq_valid_id"] = True # Determine if TV has a TVDB ID (required for Sonarr) self.determine_id_validity(merged_results) return merged_results
def all(self, query): """Search Sonarr and Radarr for a query. Sort the results based on their similiarity to the query. Args: query: A string containing a search term. """ try: thread_list = [] # Start query for TV thread = ReturnThread(target=self.television, args=[query]) thread.start() thread_list.append(thread) # Start query for movie thread = ReturnThread(target=self.movie, args=[query]) thread.start() thread_list.append(thread) # Wait for query completion results_list = [] for thread in thread_list: results_list.append(thread.join()) # Sort the results with our conreq ranking algorithm return self.__set_conreq_rank(query, results_list[0] + results_list[1]) except: log.handler( "Searching for all failed!", log.ERROR, self.__logger, ) return []
def more_info(request): content_discovery = ContentDiscovery() template = loader.get_template("viewport/more_info.html") thread_list = [] # Get the ID from the URL tmdb_id = request.GET.get("tmdb_id", None) tvdb_id = request.GET.get("tvdb_id", None) if tmdb_id: content_type = request.GET.get("content_type", None) # Get all the basic metadata for a given ID content = content_discovery.get_by_tmdb_id(tmdb_id, content_type) # Get recommended results similar_and_recommended_thread = ReturnThread( target=content_discovery.similar_and_recommended, args=[tmdb_id, content_type], ) similar_and_recommended_thread.start() # Determine the availability of the current TMDB ID thread = Thread(target=set_single_availability, args=[content]) thread.start() thread_list.append(thread) # Pre-process data attributes within tmdb_result thread = Thread(target=preprocess_tmdb_result, args=[content]) thread.start() thread_list.append(thread) # Get collection information if (content.__contains__("belongs_to_collection") and content["belongs_to_collection"] is not None): tmdb_collection = True tmdb_collection_thread = ReturnThread( target=content_discovery.collections, args=[content["belongs_to_collection"]["id"]], ) tmdb_collection_thread.start() else: tmdb_collection = None # Recommended content tmdb_recommended = similar_and_recommended_thread.join() if not isinstance(tmdb_recommended, dict) or len(tmdb_recommended) == 0: tmdb_recommended = None # Determine the availability for all recommended content thread = Thread(target=set_many_availability, args=[tmdb_recommended["results"]]) thread.start() thread_list.append(thread) # Wait for thread computation to complete for thread in thread_list: thread.join() if tmdb_collection is not None: tmdb_collection = tmdb_collection_thread.join() # Check if the user has already requested this requested = False if UserRequest.objects.filter( content_id=content["id"], source="tmdb", content_type=content["content_type"], ): requested = True # Generate context for page rendering context = generate_context({ "content": content, "recommended": tmdb_recommended, "collection": tmdb_collection, "content_type": content["content_type"], "requested": requested, }) elif tvdb_id: searcher = Search() # Fallback for TVDB content = searcher.television(tvdb_id)[0] thread_list = [] # Preprocess results thread = Thread(target=preprocess_arr_result, args=[content]) thread.start() thread_list.append(thread) # Determine the availability thread = Thread(target=set_single_availability, args=[content]) thread.start() thread_list.append(thread) # Wait for thread computation to complete for thread in thread_list: thread.join() # Generate context for page rendering context = generate_context({ "content": content, "content_type": content["contentType"], }) # Render the page return HttpResponse(template.render(context, request))
def handler(cache_name, function=None, page_key="", force_update_cache=False, cache_duration=DEFAULT_CACHE_DURATION, *args, **kwargs): """Handles caching for results and data. Args: cache: Name of the cache to use. function: A function reference that returns some value to be cached. This function must only use **kwargs. page_key: The page name or page number to use as a key value. force_update_cache: cache_duration: **kwargs: Any parameters that need to be passed into "function". """ cached_results = None # Looks through cache and will perform a search if needed. try: log.handler( cache_name + " - Accessed.", log.DEBUG, __logger, ) # If the function was actually a list, then use set_many and/or get_many # All items must belong to the same cache # { page_key: { # "function": function_value, # "kwargs": {kwargs_value}, # "args": [args_values], # }, # ... } if isinstance(function, dict): if len(function) == 0: # Nothing was passed in return None else: # Obtain all the keys from the passed in dictionary requested_keys = [] for key, value in function.items(): cache_key = generate_cache_key(cache_name, value["args"], value["kwargs"], key) log.handler( cache_name + " - Multi-execution generated cache key " + cache_key, log.DEBUG, __logger, ) requested_keys.append(cache_key) # Search cache for all keys cached_results = cache.get_many(requested_keys) log.handler( cache_name + " - Multi-execution detected " + str(len(cached_results)) + " available keys.", log.INFO, __logger, ) # If nothing was in cache, or cache was expired, run function() thread_list = [] for cache_key in requested_keys: if not cached_results.__contains__(cache_key): key = obtain_key_from_cache_key(cache_key) thread = ReturnThread( target=function[key]["function"], args=function[key]["args"], kwargs=function[key]["kwargs"], ) thread.start() thread_list.append((cache_key, thread)) missing_keys = {} for key, thread in thread_list: missing_keys[key] = thread.join() # Set values in cache for any newly executed functions if bool(missing_keys): log.handler( cache_name + " - Multi-execution detected " + str(len(missing_keys)) + " missing keys.", log.INFO, __logger, ) cache.set_many(missing_keys, cache_duration) # Return all results cached_results.update(missing_keys) # If results were none, log it. if cached_results is None: log.handler( cache_name + " - Multi-execution generated no results!", log.WARNING, __logger, ) return cached_results # Get the cached value cache_key = generate_cache_key(cache_name, args, kwargs, page_key) log.handler( cache_name + " - Generated cache key " + cache_key, log.DEBUG, __logger, ) cached_results = cache.get(cache_key) # No function was provided, just return bare cache value if function is None: log.handler( cache_name + " - Requested raw cache values.", log.DEBUG, __logger, ) return cached_results # If the user wants to force update the cache, nothing # was in cache, or cache was expired, run function() if cached_results is None or force_update_cache: function_results = function(*args, **kwargs) log.handler( cache_name + " - Function " + function.__name__ + " has been executed!", log.INFO, __logger, ) cache.set(cache_key, function_results, cache_duration) return function_results if cached_results is None: log.handler( cache_name + " - No cached results found!", log.INFO, __logger, ) # If a value was in cache and not expired, return that value return cached_results except: # If the function threw an exception, return none. if isinstance(function, dict): log.handler( "Function list failed to execute!", log.ERROR, __logger, ) else: log.handler( "Function " + function.__name__ + " failed to execute!", log.ERROR, __logger, ) return None