def get(self): """ 获取当前用户的文章列表 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] total_count, page_articles = cache_user.UserArticlesCache(g.user_id).get_page(page, per_page) user_article_attitude_cache = cache_user.UserArticleAttitudeCache(g.user_id) for article_id in page_articles: article = cache_article.ArticleInfoCache(article_id).get() if article: article['is_liking'] = user_article_attitude_cache.determine_liking_article(article_id) results.append(article) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def get(self, user_id): """ 获取user_id 用户的文章数据 """ exist = cache_user.UserProfileCache(user_id).exists() if not exist: return {'message': 'Invalid request.'}, 400 qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range(constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] total_count, page_articles = cache_user.UserArticlesCache(user_id).get_page(page, per_page) for article_id in page_articles: article = cache_article.ArticleInfoCache(article_id).get() if article: results.append(article) return {'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results}
def put(self, target): """ 修改文章 """ req_parser = RequestParser() req_parser.add_argument('draft', type=inputs.boolean, required=False, location='args') req_parser.add_argument('title', type=inputs.regex(r'.{5,30}'), required=True, location='json') req_parser.add_argument('content', type=inputs.regex(r'.+'), required=True, location='json') req_parser.add_argument('cover', type=self._cover, required=True, location='json') req_parser.add_argument('channel_id', type=self._channel_id, required=True, location='json') args = req_parser.parse_args() content = args['content'] cover = args['cover'] draft = args['draft'] ret = db.session.query(func.count(Article.id)).filter(Article.id == target, Article.user_id == g.user_id).first() if ret[0] == 0: return {'message': 'Invalid article.'}, 400 # 对于自动封面,生成封面 cover_type = cover['type'] if cover_type == -1: cover = self._generate_article_cover(content) Article.query.filter_by(id=target).update(dict( channel_id=args['channel_id'], title=args['title'], cover=cover, status=Article.STATUS.DRAFT if draft else Article.STATUS.UNREVIEWED )) ArticleContent.query.filter_by(id=target).update(dict(content=content)) try: db.session.commit() except Exception as e: current_app.logger.error(e) db.session.rollback() return {'message': 'Server has something wrong.'}, 507 # 清除缓存 cache_user.UserArticlesCache(g.user_id).clear() cache_article.ArticleInfoCache(target).clear() cache_article.ArticleDetailCache(target).clear() # if not draft: # TODO 机器审核 # TODO 新文章消息推送 return {'id': target}, 201
def get(self): """ 获取当前用户的文章列表 """ qs_parser = RequestParser() qs_parser.add_argument('page', type=inputs.positive, required=False, location='args') qs_parser.add_argument('per_page', type=inputs.int_range( constants.DEFAULT_ARTICLE_PER_PAGE_MIN, constants.DEFAULT_ARTICLE_PER_PAGE_MAX, 'per_page'), required=False, location='args') args = qs_parser.parse_args() page = 1 if args.page is None else args.page per_page = args.per_page if args.per_page else constants.DEFAULT_ARTICLE_PER_PAGE_MIN results = [] # 已废弃 # articles = cache_user.get_user_articles(g.user_id) # total_count = len(articles) # page_articles = articles[(page - 1) * per_page:page * per_page] total_count, page_articles = cache_user.UserArticlesCache( g.user_id).get_page(page, per_page) for article_id in page_articles: article = cache_article.ArticleInfoCache(article_id).get() if article: results.append(article) return { 'total_count': total_count, 'page': page, 'per_page': per_page, 'results': results }