def _feed_articles(self, channel_id, timestamp, feed_count): """ 获取推荐文章 :param channel_id: 频道id :param feed_count: 推荐数量 :param timestamp: 时间戳 :return: [{article_id, trace_params}, ...], timestamp """ req = user_reco_pb2.User() if g.user_id: req.user_id = str(g.user_id) elif g.anonymous_id: req.user_id = str(g.anonymous_id) else: req.user_id = '' req.channel_id = channel_id req.article_num = feed_count req.time_stamp = timestamp stub = user_reco_pb2_grpc.UserRecommendStub(current_app.rpc_reco) try: resp = stub.user_recommend(req, timeout=5) except Exception as e: current_app.logger.error(e) return [], timestamp # 曝光埋点参数 trace_exposure = resp.exposure if len(resp.recommends) > 0 and trace_exposure: write_trace_log(trace_exposure, channel_id=channel_id) return resp.recommends, resp.time_stamp
def _feed_articles(self, channel_id, feed_count): """ 获取推荐文章 :param channel_id: 频道id :param feed_count: 推荐数量 :return: [{article_id, trace_params}, ...] """ req = user_reco_pb2.User() if g.user_id: req.user_id = str(g.user_id) elif g.anonymous_id: req.user_id = str(g.anonymous_id) else: req.user_id = '' req.channel_id = channel_id req.article_num = feed_count stub = user_reco_pb2_grpc.UserRecommendStub(current_app.rpc_reco) resp = stub.user_recommend(req) # 曝光埋点参数 trace_exposure = resp.exposure write_trace_log(trace_exposure, channel_id=channel_id) return resp.recommends
def post(self): """ 用户收藏文章 """ req_parser = RequestParser() req_parser.add_argument('target', type=parser.article_id, required=True, location='json') req_parser.add_argument('Trace', type=inputs.regex(r'^.+$'), required=False, location='headers') args = req_parser.parse_args() target = args.target # 记录埋点日志 if args.Trace: article = cache_article.ArticleInfoCache(target).get() write_trace_log(args.Trace, channel_id=article['ch_id']) ret = 1 try: collection = Collection(user_id=g.user_id, article_id=target) db.session.add(collection) db.session.commit() except IntegrityError: db.session.rollback() ret = Collection.query.filter_by(user_id=g.user_id, article_id=target, is_deleted=True) \ .update({'is_deleted': False}) db.session.commit() if ret > 0: cache_user.UserArticleCollectionsCache(g.user_id).clear() cache_statistic.ArticleCollectingCountStorage.incr(target) cache_statistic.UserArticleCollectingCountStorage.incr(g.user_id) return {'target': target}, 201
def get(self, article_id): """ 获取文章详情 :param article_id: int 文章id """ qs_parser = RequestParser() qs_parser.add_argument('Trace', type=inputs.regex(r'^.+$'), required=False, location='headers') args = qs_parser.parse_args() # 从缓存层中查询文章数据 article_cache = ArticleInfoCache(article_id) if article_cache.exists(): article_dict = article_cache.get() # 向埋点日志中写入推荐系统需要的埋点信息 if args.Trace: write_trace_log(args.Trace) # TODO 从缓存层中查询 文章内容/关注/评论/点赞情况 # TODO 通过RPC向推荐系统索取相关文章推荐 return article_dict else: return {'messsage': 'Invalid article'}, 400
def post(self): req_parser = RequestParser() req_parser.add_argument('Trace', type=inputs.regex(r'^.+$'), required=True, location='headers') req_parser.add_argument('duration', type=inputs.natural, required=True, location='json') req_parser.add_argument('art_id', type=parser.article_id, required=True, location='json') args = req_parser.parse_args() article = cache_article.ArticleInfoCache(args.art_id).get() write_trace_log(args.Trace, args.duration, channel_id=article['ch_id']) return {'message': 'OK'}, 201
def get(self, article_id): """ 获取文章详情 :param article_id: int 文章id """ # 写入埋点日志 qs_parser = RequestParser() qs_parser.add_argument('Trace', type=inputs.regex(r'^.+$'), required=False, location='headers') args = qs_parser.parse_args() user_id = g.user_id # 查询文章数据 exist = cache_article.ArticleInfoCache(article_id).exists() if not exist: abort(404, message='The article does not exist.') article = cache_article.ArticleDetailCache(article_id).get() # 推荐系统所需埋点 if args.Trace: write_trace_log(args.Trace, channel_id=article['ch_id']) article['is_followed'] = False article['attitude'] = None # 增加用户是否收藏了文章 article['is_collected'] = False if user_id: # 非匿名用户添加用户的阅读历史 try: cache_user.UserReadingHistoryStorage(user_id).save(article_id) except ConnectionError as e: current_app.logger.error(e) # 查询关注 # article['is_followed'] = cache_user.UserFollowingCache(user_id).determine_follows_target(article['aut_id']) article['is_followed'] = cache_user.UserRelationshipCache(user_id).determine_follows_target(article['aut_id']) # 查询登录用户对文章的态度(点赞or不喜欢) try: # article['attitude'] = cache_article.ArticleUserAttitudeCache(user_id, article_id).get() article['attitude'] = cache_user.UserArticleAttitudeCache(user_id).get_article_attitude(article_id) except SQLAlchemyError as e: current_app.logger.error(e) article['attitude'] = -1 # 增加用户是否收藏了文章 article['is_collected'] = cache_user.UserArticleCollectionsCache(g.user_id).determine_collect_target(article_id) # 获取相关文章推荐 article['recomments'] = [] try: similar_articles = self._feed_similar_articles(article_id) for _article_id in similar_articles: _article = cache_article.ArticleInfoCache(_article_id).get() article['recomments'].append({ 'art_id': _article['art_id'], 'title': _article['title'] }) except Exception as e: current_app.logger.error(e) # 更新阅读数 cache_statistic.ArticleReadingCountStorage.incr(article_id) cache_statistic.UserArticlesReadingCountStorage.incr(article['aut_id']) return article