def get(self): """ 获取当前用户的文章列表 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] total_count, page_articles = cache_user.UserArticlesCache(g.user_id).get_page(page, per_page) user_article_attitude_cache = cache_user.UserArticleAttitudeCache(g.user_id) for article_id in page_articles: article = cache_article.ArticleInfoCache(article_id).get() if article: article['is_liking'] = user_article_attitude_cache.determine_liking_article(article_id) results.append(article) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def post(self): """ 文章点赞 """ json_parser = RequestParser() json_parser.add_argument('target', type=parser.article_id, required=True, location='json') args = json_parser.parse_args() target = args.target # 此次操作前,用户对文章可能是没有态度,也可能是不喜欢,需要先查询对文章的原始态度,然后对相应的统计数据进行累计或减少 atti = Attitude.query.filter_by(user_id=g.user_id, article_id=target).first() if atti is None: attitude = Attitude(user_id=g.user_id, article_id=target, attitude=Attitude.ATTITUDE.LIKING) db.session.add(attitude) db.session.commit() cache_statistic.ArticleLikingCountStorage.incr(target) else: if atti.attitude == Attitude.ATTITUDE.DISLIKE: # 原先是不喜欢 atti.attitude = Attitude.ATTITUDE.LIKING db.session.add(atti) db.session.commit() cache_statistic.ArticleLikingCountStorage.incr(target) cache_statistic.ArticleDislikeCountStorage.incr(target, -1) cache_statistic.UserLikedCountStorage.incr(g.user_id) elif atti.attitude is None: # 存在数据,但是无态度 atti.attitude = Attitude.ATTITUDE.LIKING db.session.add(atti) db.session.commit() cache_statistic.ArticleLikingCountStorage.incr(target) cache_statistic.UserLikedCountStorage.incr(g.user_id) # cache_article.ArticleUserAttitudeCache(g.user_id, target).clear() cache_user.UserArticleAttitudeCache(g.user_id).clear() # 发送点赞通知 _user = cache_user.UserProfileCache(g.user_id).get() _article = cache_article.ArticleInfoCache(target).get() _data = { 'user_id': g.user_id, 'user_name': _user['name'], 'user_photo': _user['photo'], 'art_id': target, 'art_title': _article['title'], 'timestamp': int(time.time()) } current_app.sio.emit('liking notify', data=_data, room=str(_article['aut_id'])) return {'target': target}, 201
def delete(self, target): """ 取消不喜欢 """ ret = Attitude.query.filter_by(user_id=g.user_id, article_id=target, attitude=Attitude.ATTITUDE.DISLIKE) \ .update({'attitude': None}) db.session.commit() if ret > 0: cache_statistic.ArticleDislikeCountStorage.incr(target, -1) cache_user.UserArticleAttitudeCache(g.user_id).clear() return {'message': 'OK'}, 204
def delete(self, target): """ 取消文章点赞 """ ret = Attitude.query.filter_by(user_id=g.user_id, article_id=target, attitude=Attitude.ATTITUDE.LIKING) \ .update({'attitude': None}) db.session.commit() if ret > 0: cache_statistic.ArticleLikingCountStorage.incr(target, -1) cache_statistic.UserLikedCountStorage.incr(g.user_id, -1) # cache_article.ArticleUserAttitudeCache(g.user_id, target).clear() cache_user.UserArticleAttitudeCache(g.user_id).clear() return {'message': 'OK'}, 204
def post(self): """ 不喜欢 """ json_parser = RequestParser() json_parser.add_argument('target', type=parser.article_id, required=True, location='json') args = json_parser.parse_args() target = args.target # 此次操作前,用户对文章可能是没有态度,也可能是不喜欢,需要先查询对文章的原始态度,然后对相应的统计数据进行累计或减少 atti = Attitude.query.filter_by(user_id=g.user_id, article_id=target).first() if atti is None: attitude = Attitude(user_id=g.user_id, article_id=target, attitude=Attitude.ATTITUDE.DISLIKE) db.session.add(attitude) db.session.commit() cache_statistic.ArticleDislikeCountStorage.incr(target) else: if atti.attitude == Attitude.ATTITUDE.LIKING: # 原先是喜欢 atti.attitude = Attitude.ATTITUDE.DISLIKE db.session.add(atti) db.session.commit() cache_statistic.ArticleDislikeCountStorage.incr(target) cache_statistic.ArticleLikingCountStorage.incr(target, -1) cache_statistic.UserLikedCountStorage.incr(g.user_id, -1) elif atti.attitude is None: # 存在数据,但是无态度(态度被删除) atti.attitude = Attitude.ATTITUDE.DISLIKE db.session.add(atti) db.session.commit() cache_statistic.ArticleDislikeCountStorage.incr(target) cache_user.UserArticleAttitudeCache(g.user_id).clear() return {'target': target}, 201
def get(self, article_id): """ 获取文章详情 :param article_id: int 文章id """ # 写入埋点日志 qs_parser = RequestParser() qs_parser.add_argument('Trace', type=inputs.regex(r'^.+$'), required=False, location='headers') args = qs_parser.parse_args() user_id = g.user_id # 查询文章数据 exist = cache_article.ArticleInfoCache(article_id).exists() if not exist: abort(404, message='The article does not exist.') article = cache_article.ArticleDetailCache(article_id).get() # 推荐系统所需埋点 if args.Trace: write_trace_log(args.Trace, channel_id=article['ch_id']) article['is_followed'] = False article['attitude'] = None # 增加用户是否收藏了文章 article['is_collected'] = False if user_id: # 非匿名用户添加用户的阅读历史 try: cache_user.UserReadingHistoryStorage(user_id).save(article_id) except ConnectionError as e: current_app.logger.error(e) # 查询关注 # article['is_followed'] = cache_user.UserFollowingCache(user_id).determine_follows_target(article['aut_id']) article['is_followed'] = cache_user.UserRelationshipCache(user_id).determine_follows_target(article['aut_id']) # 查询登录用户对文章的态度(点赞or不喜欢) try: # article['attitude'] = cache_article.ArticleUserAttitudeCache(user_id, article_id).get() article['attitude'] = cache_user.UserArticleAttitudeCache(user_id).get_article_attitude(article_id) except SQLAlchemyError as e: current_app.logger.error(e) article['attitude'] = -1 # 增加用户是否收藏了文章 article['is_collected'] = cache_user.UserArticleCollectionsCache(g.user_id).determine_collect_target(article_id) # 获取相关文章推荐 article['recomments'] = [] try: similar_articles = self._feed_similar_articles(article_id) for _article_id in similar_articles: _article = cache_article.ArticleInfoCache(_article_id).get() article['recomments'].append({ 'art_id': _article['art_id'], 'title': _article['title'] }) except Exception as e: current_app.logger.error(e) # 更新阅读数 cache_statistic.ArticleReadingCountStorage.incr(article_id) cache_statistic.UserArticlesReadingCountStorage.incr(article['aut_id']) return article