def get(request): id = request.get("id", None) if id: obj = AccountUser.query.get(id) if not obj: return 400, "找不到内容", {} else: querypage = request.get('querypage',1) perpage = request.get('perpage',10) keyword = request.get('keyword', None) querys = AccountUser.query.filter() if keyword: querys = querys.filter_by(id.contains(keyword)) querys = querys.order_by(AccountUser.create_time.desc()) AccountUser.query.filter().first() total, result, pageCount, totalPages = _Paginate(querys, querypage, perpage) return 200, "", { "total":total, "result":[i.toDict() for i in result], "pageCount":pageCount, "totalPages":totalPages }
def comment_list(request): masterid = request.get('masterid', None) commentid = request.get('commentid', None) pages = request.get('pages', 1) if not masterid or not commentid: return 400, '参数有误', {} if masterid: data = CommentMain.query.filter(CommentMain.masterid == masterid, CommentMain.comment_type == 1, CommentMain.is_delete == False).order_by( BangumiAnime.create_time.desc()) if commentid: data = CommentMain.query.filter(CommentMain.mainid == commentid, CommentMain.comment_type == 2, CommentMain.is_delete == False).order_by( BangumiAnime.create_time.desc()) count, items, page, pages = _Paginate(data, pages) result = [{ 'senduser_name':AccountUser.query.filter_by(id=i.senduser_id).first().username, 'senduser_head':AccountUser.query.filter_by(id=i.senduser_id).first().head if AccountUser.query.filter_by(id=i.senduser_id).first().head else '', 'content':i.content, 'time':i.create_time.strftime("%Y-%m-%d %H:%M:%S") }for i in items] return 200, 'ok', { 'result':result, 'count':count, 'page':page, 'pages':pages }
def album_list(request): print(request) types = request.get('types', 0) pages = request.get('pages', 1) sfilter = request.get('sfilter', 0) if sfilter == 0: data = AlbumData.query.filter_by(status=1).order_by( AlbumData.create_time.desc()) else: data = AlbumData.query.filter().order_by(AlbumData.create_time.desc()) if types != 0: data = data.filter_by(classification=types) count, items, page, pages = _Paginate(data, pages) result = [{ 'id': i.id, 'classification': i.classification, 'identification': i.identification, 'name': i.name, 'cover': SERVER_GULAOBURL + '/static/com/album/cover/' + i.cover, 'introduce': i.introduce, 'status': i.status, 'show_index': i.show_index, 'relation_bangumi_id': i.relation_bangumi_id, } for i in items] return 200, 'ok', { 'result': result, 'count': count, 'page': page, 'pages': pages }
def list(request): category = request.get('category', None) sfilter = request.get('sfilter', None) userid = request.get('userid', None) types = request.get('type', None) pages = request.get('pages', 1) if pages == 0: pages = 1 # 创建搜索集 if not sfilter: querys = PhotoData.query.filter(PhotoData.verify == 1) else: querys = PhotoData.query.filter() # 审核状态 if types == 1: querys = querys.filter_by(verify=2) # 审核状态 if types == 2: querys = querys.filter_by(verify=1) # 类目 if category: querys = querys.filter(PhotoData.category == category) # 用户id if userid: querys = querys.filter_by(upload_user=int(userid)) count, items, page, pages = _Paginate(querys, pages) result = [{ 'id': i.id, 'upload_userid': i.upload_user, 'file': SERVER_GULAOBURL + '/static/com/photo/image/' + i.file, 'cover': SERVER_GULAOBURL + '/static/com/photo/cover/' + i.file, 'title': i.title, 'info': i.info, 'category': i.category, 'pixiv_author': i.pixiv_author, 'verify': i.verify, 'create_time': i.create_time.strftime("%Y-%m-%d %H:%M:%S") } for i in items] return 200, 'ok', { 'result': result, 'count': count, 'page': page, 'pages': pages }
def admin_account_list(request): querypage = request.get('querypage',1) perpage = request.get('perpage',10) querys = AccountAdmin.query.filter() querys = querys.order_by(AccountAdmin.create_time.desc()) total, result, pageCount, totalPages = _Paginate(querys, querypage, perpage) return 200, "", { "total":total, "result":[i.toDict() for i in result], "pageCount":pageCount, "totalPages":totalPages }
def bangumi_list(request): try: print(request) types = request.get('types', 0) if types: types = int(types) pages = request.get('pages', 1) if pages == 0 or pages == None: pages = 1 sfilter = request.get('sfilter', 0) if sfilter: sfilter = int(sfilter) data = BangumiAnime.query.filter().order_by( BangumiAnime.create_time.desc(), BangumiAnime.sort.desc()) if sfilter == 0: data = data.filter_by(status=1) if types != 0: data = data.filter_by(classification=types) count, items, page, pages = _Paginate(data, pages) result = [ { 'id': i.id, 'classification': i.classification, 'identification': i.identification, 'name': i.name, 'setscount': i.setscount, 'introduce': i.introduce, 'cover': SERVER_GULAOBURL + '/static/com/bangumi/cover/' + i.cover, 'upstatus': i.upstatus, # 'staff':i.staff, 'status': i.status, 'station_play': i.station_play, 'openplay_time': i.openplay_time.strftime("%Y-%m-%d"), 'sort': i.sort } for i in items ] return 200, 'ok', { 'result': result, 'count': count, 'page': page, 'pages': pages } except Exception as e: print(e)
def user_list(request): querypage = request.get('querypage', 1) pagesize = request.get('pagesize', 10) querys = AccountUser.query.filter() keyword = request.get("keyword", None) if keyword: querys = querys.filter(AccountUser.username.contains(keyword),AccountUser.email.contains(keyword)) querys = querys.order_by(AccountUser.create_time.desc()) total, result, currentPage, pageCount = _Paginate( querys, querypage, pagesize) return 200, "", { "total": total, "result": [i.toDict() for i in result], "currentPage": currentPage, "pageCount": pageCount }
def article_list(request): print(request) types = request.get('types', 0) pages = request.get('pages', 1) sfilter = request.get('sfilter', 0) if sfilter == 0: data = ArticleData.query.filter(ArticleData.is_delete == False, ArticleData.status == 1).order_by( ArticleData.create_time.desc(), ArticleData.sort.desc()) else: data = ArticleData.query.filter().order_by( ArticleData.create_time.desc(), ArticleData.sort.desc()) if types != 0: data = data.filter_by(classification=types) count, items, page, pages = _Paginate(data, pages) result = [{ 'id': i.id, 'classification': i.classification, 'identification': i.identification, 'title': i.title, 'cover': SERVER_GULAOBURL + '/static/com/article/cover/' + i.cover, 'introduce': i.introduce, 'sort': i.sort, 'status': i.status, 'show_index': i.show_index, 'is_delete': i.is_delete } for i in items] return 200, 'ok', { 'result': result, 'count': count, 'page': page, 'pages': pages }
def video_list(request): print(request) types = request.get('types', 0) pages = request.get('pages', 1) ctypes = request.get('ctypes', 0) sfilter = request.get('sfilter', 0) userid = request.get('userid', None) data = VideoData.query.filter().order_by(VideoData.create_time.desc()) if userid: data = data.filter_by(upload_userid=int(userid)) if sfilter == 0: data = data.filter_by() if sfilter == 1: data = data.filter_by(verify_type=1) if sfilter == 2: data = data.filter_by(verify_type=2) if sfilter == 3: data = data.filter_by(verify_type=3) if sfilter == 4: data = data.filter_by(verify_type=4) if types == 1: if ctypes != 0: data = data.filter_by(content_classification=types) if types != 0: data = data.filter_by(classification=types) count, items, page, pages = _Paginate(data, pages) result = [ { 'author_head': SERVER_GULAOBURL + '/static/com/userhead/' + AccountUser.query.filter_by(id=int(i.upload_userid)).first().head if AccountUser.query.filter_by( id=int(i.upload_userid)).first().head else SERVER_GULAOBURL + '/static/com/userhead/' + 'default.png', 'author_username': AccountUser.query.filter_by(id=i.upload_userid).first().username, 'id': i.id, 'upload_userid': i.upload_userid, 'verify_type': i.verify_type, 'verify_falseinfo': i.verify_falseinfo, 'classification': i.classification, 'content_classification': i.content_classification, 'identification': i.identification, 'title': i.title, 'cover': SERVER_GULAOBURL + "/static/com/video/cover/" + i.cover, 'introduce': i.introduce, 'source_type': i.source_type, 'original_type': i.original_type, 'original_url': i.original_url, 'original_author': i.original_author, 'videoloadurl': i.videoloadurl, 'show_index': i.show_index, # 'update_time':i.update_time.strftime("%Y-%m-%d"), 'create_time': i.create_time.strftime("%Y-%m-%d %H:%M:%S") } for i in items ] return 200, 'ok', { 'result': result, 'count': count, 'page': page, 'pages': pages }