def article_recommendations_for_user(user, count): """ Retrieve :param count articles which are equally distributed over all the feeds to which the :param user is registered to. Fails if no language is selected. :return: """ import zeeguu_core user_languages = UserLanguage.all_reading_for_user(user) if not user_languages: return [] reading_pref_hash = reading_preferences_hash(user) recompute_recommender_cache_if_needed(user, zeeguu_core.db.session) all_articles = ArticlesCache.get_articles_for_hash(reading_pref_hash, count) all_articles = [ each for each in all_articles if (not each.broken and each.published_time) ] all_articles = SortedList(all_articles, lambda x: x.published_time) return [ user_article_info(user, article) for article in reversed(all_articles) ]
def _reading_preferences_hash(user): """ Method to retrieve the hash, as this is done several times. :param user: :return: articles_hash: ArticlesHash """ user_filter_subscriptions = TopicFilter.all_for_user(user) filters = [topic_id.topic for topic_id in user_filter_subscriptions] user_topic_subscriptions = TopicSubscription.all_for_user(user) topics = [topic_id.topic for topic_id in user_topic_subscriptions] user_languages = Language.all_reading_for_user(user) user_search_filters = SearchFilter.all_for_user(user) search_filters = [search_id.search for search_id in user_search_filters] user_searches = SearchSubscription.all_for_user(user) searches = [search_id.search for search_id in user_searches] articles_hash = ArticlesCache.calculate_hash( user, topics, filters, searches, search_filters, user_languages ) return articles_hash
def recompute_recommender_cache(reading_preferences_hash_code, session, user, article_limit=42): """ :param reading_preferences_hash_code: :param session: :param user: :param article_limit: set to something low ... say 42 when working in real time... ti's a bit slow otherwise. however, when caching offline you can save :return: """ all_articles = find_articles_for_user(user) count = 0 while count < article_limit: count += 1 try: art = next(all_articles) cache_obj = ArticlesCache(art, reading_preferences_hash_code) session.add(cache_obj) except StopIteration as e: print("could not find as many results as we wanted") break finally: session.commit()
def article_recommendations_for_user(user, count): """ Retrieve :param count articles which are equally distributed over all the feeds to which the :param user is registered to. Fails if no language is selected. :return: """ # Temporary fix for the experiment of Gabriel AIKI_USERS_COHORT_ID = 109 if user.cohort_id == AIKI_USERS_COHORT_ID: return CohortArticleMap.get_articles_info_for_cohort(user.cohort) import zeeguu_core user_languages = Language.all_reading_for_user(user) if not user_languages: return [user.learned_language] reading_pref_hash = _reading_preferences_hash(user) _recompute_recommender_cache_if_needed(user, zeeguu_core.db.session) # two fast calls ot /articles/recommended might result in a race condition # in _recompute_recommender_cache; # race condition in _recompute_recommender_cache might result in # duplicates in the db; since this is being sunset for the elastic search # it's not worth fixing the race condition; instead we're simply # ensuring that duplicate articles are removed at this point all_articles = set( ArticlesCache.get_articles_for_hash(reading_pref_hash, count)) all_articles = [ each for each in all_articles if (not each.broken and each.published_time) ] all_articles = SortedList(all_articles, lambda x: x.published_time) return [ UserArticle.user_article_info(user, article) for article in reversed(all_articles) ]
def _recompute_recommender_cache( reading_preferences_hash_code, session, user, article_limit=42 ): """ :param reading_preferences_hash_code: :param session: :param user: :param article_limit: set to something low ... say 42 when working in real time... ti's a bit slow otherwise. however, when caching offline you can save :return: """ all_articles = _find_articles_for_user(user) for art in all_articles: cache_obj = ArticlesCache(art, reading_preferences_hash_code) session.add(cache_obj) session.commit()
def _recompute_recommender_cache_if_needed(user, session): """ This method first checks if there is an existing hash for the user's content selection, and if so, is done. If non-existent, it retrieves all the articles corresponding with this configuration and stores them as ArticlesCache objects. :param user: To retrieve the subscriptions of the user :param session: Needed to store in the db """ reading_pref_hash = _reading_preferences_hash(user) logger.info(f"Pref hash: {reading_pref_hash}") articles_hash_obj = ArticlesCache.check_if_hash_exists(reading_pref_hash) if articles_hash_obj is False: logger.info("Recomputing recommender cache...") _recompute_recommender_cache(reading_pref_hash, session, user) logger.info("No need to recomputed recommender cache.")
def article_recommendations_for_user(user, count): """ Retrieve :param count articles which are equally distributed over all the feeds to which the :param user is registered to. Fails if no language is selected. :return: """ # Temporary fix for the experiment of Gabriel AIKI_USERS_COHORT_ID = 109 if user.cohort_id == AIKI_USERS_COHORT_ID: return CohortArticleMap.get_articles_info_for_cohort(user.cohort) import zeeguu_core user_languages = Language.all_reading_for_user(user) if not user_languages: return [user.learned_language] reading_pref_hash = _reading_preferences_hash(user) _recompute_recommender_cache_if_needed(user, zeeguu_core.db.session) all_articles = ArticlesCache.get_articles_for_hash(reading_pref_hash, count) all_articles = [ each for each in all_articles if (not each.broken and each.published_time) ] all_articles = SortedList(all_articles, lambda x: x.published_time) return [ UserArticle.user_article_info(user, article) for article in reversed(all_articles) ]