def index(pk=None, tag=None): if request.method == 'GET': if tag: article_ret = DATABASE.article.find({"tag": tag}) else: article_ret = DATABASE.article.find({}) pager_obj = pagination.Pagination(pk, article_ret.count(), request.path) article_ret = article_ret[pager_obj.start:pager_obj.end] page = pager_obj.page_html() tag_ret = DATABASE.tag.find({}) data = {'article_ret': article_ret, 'tag_ret': tag_ret} return render_template('index.html', data=data, page=page) if request.method == 'POST': wd = request.form.get('wd') ret = DATABASE.article.find({"title": {'$regex': wd}}) pager_obj = pagination.Pagination(pk, ret.count(), request.path) article_ret = ret[pager_obj.start:pager_obj.end] page = pager_obj.page_html() tag_ret = DATABASE.tag.find({}) data = {'article_ret': article_ret, 'tag_ret': tag_ret} return render_template('index.html', data=data, page=page)
def top(page): print("page = {}".format(page)) if page <= 0: return render_template("404.html") row = PER_PAGE * (page - 1) stories_count = db.execute( "SELECT COUNT(*) AS length FROM stories")[0]["length"] stories = db.execute( "SELECT * FROM stories ORDER BY likes DESC LIMIT :row_number, :count", row_number=row, count=PER_PAGE) if not stories and page != 1: return render_template("404.html") for story in stories: get_coms_count(story) pag = pagination.Pagination(page, PER_PAGE, stories_count) pages_count = get_pages_count(stories_count) return render_template("index.html", pagination=pag, stories=stories, pages_count=pages_count, current_page=page, from_where="top")
def get_users(page): page_content = {} query = "SELECT * FROM users" cursor.execute(query) row_headers = [x[0] for x in cursor.description] data = cursor.fetchall() json_data = [] for entry in data: json_data.append(dict(zip(row_headers, entry))) count = len(data) if count == 0: 'No Users Found!' else: pg = pagination.Pagination(count, page) offset = (page - 1) * pg.page_size page_content['data'] = json_data[offset + 1:offset + pg.page_size + 1] if not page_content['data']: abort(404) page_content['offset'] = offset page_content['total'] = count page_content['page'] = page if pg.has_next(): page_content['next'] = str( request.url_root) + 'users/page/' + str(page + 1) if pg.has_previous(): page_content['previous'] = str( request.url_root) + 'users/page/' + str(page - 1) return jsonify(page_content)
def get(self): """List services""" args = pagination.parser.parse_args() page = args.get('page') per_page = args.get('per_page') services = Service.objects().paginate(page, per_page) paging = pagination.Pagination(services) return services.items, 200, paging.headers
def posts_by_tag(tag, page): skip = (page - 1) * int(app.config['PER_PAGE']) posts = postClass.get_posts(int(app.config['PER_PAGE']), skip, tag=tag) count = postClass.get_total_count(tag=tag) if not posts['data']: abort(404) pag = pagination.Pagination(page, app.config['PER_PAGE'], count) return render_template('index.html', posts=posts['data'], pagination=pag, meta_title='Posts by tag: ' + tag)
def index(page): skip = (page - 1) * int(app.config['PER_PAGE']) posts = postClass.get_posts(int(app.config['PER_PAGE']), skip) count = postClass.get_total_count() pag = pagination.Pagination(page, app.config['PER_PAGE'], count) return render_template('index.html', posts=posts['data'], pagination=pag, meta_title=app.config['BLOG_TITLE'])
def get(self): """List clusters""" args = pagination.parser.parse_args() page = args.get('page') per_page = args.get('per_page') clusters = Cluster.objects(roles__in=current_identity.roles).paginate( page, per_page) paging = pagination.Pagination(clusters) return clusters.items, 200, paging.headers
def search_results(page, query): skip = (page - 1) * int(app.config['PER_PAGE']) if query: posts = postClass.get_posts( int(app.config['PER_PAGE']), skip, search=query) else: posts = [] posts['data'] = [] count = postClass.get_total_count(search=query) pag = pagination.Pagination(page, app.config['PER_PAGE'], count) return render_template('index.html', posts=posts['data'], pagination=pag, meta_title='Search results')
def posts(page): session.pop('post-preview', None) skip = (page - 1) * int(app.config['PER_PAGE']) posts = postClass.get_posts(int(app.config['PER_PAGE']), skip) count = postClass.get_total_count() pag = pagination.Pagination(page, app.config['PER_PAGE'], count) if not posts['data']: abort(404) return render_template('posts.html', posts=posts['data'], pagination=pag, meta_title='Posts')
def list_folder_emails(folder, page): maildir_root = '/home/artur/Maildir' maildir = MailFolder(maildir_root) if folder != 'INBOX': if not folder in maildir.list_folders(): flask.abort( flask.make_response('No such mail folder: ' + folder, 416)) else: maildir = maildir.get_folder(folder) messages_list = maildir.get_messages_list() count = len(messages_list) paging = pagination.Pagination(page, PER_PAGE, count) return flask.render_template('mails_list.html', messages=messages_list, maildir_folder=folder, pagination=paging)
def index(page): # Return error if user wants to view page with a negative id if page <= 0: return render_template("404.html") # Find a database id of the first story on the page row = PER_PAGE * (page - 1) # Find how many stories are there in the database stories_count = db.execute( "SELECT COUNT(*) AS length FROM stories")[0]["length"] # Select nessesary stories from database stories = db.execute( "SELECT * FROM stories ORDER BY id DESC LIMIT :row_number, :count", row_number=row, count=PER_PAGE) # Return error if there are no stories on the page with given id if not stories and page != 1: return render_template("404.html") # Find how many comments are there under each story for story in stories: get_coms_count(story) # Create pagination pag = pagination.Pagination(page, PER_PAGE, stories_count) pages_count = get_pages_count(stories_count) return render_template("index.html", pagination=pag, stories=stories, pages_count=pages_count, current_page=page, from_where="index")
def get_posts(facebookID, page): page_content = {} # get query param ?local local = request.args.get('local') # load data from database if exists if local: query = "SELECT * FROM posts WHERE user_id = " + str(facebookID) cursor.execute(query) data = cursor.fetchall() if len(data) is 0: return "You don't have any posts yet!" else: return jsonify(data) # request user's posts user_posts = graph.request('/' + str(facebookID) + '/posts?limit=25') posts = user_posts['data'] query = "SELECT * FROM posts WHERE user_id = " + str( facebookID) + " ORDER BY created_time DESC" cursor.execute(query) data = cursor.fetchall() if len(data) is 0: for post in posts: cursor.callproc('sp_createPost', (post['id'], facebookID, post['message'], post['created_time'])) data = cursor.fetchall() if len(data) is 0: conn.commit() else: i = 0 for i in range(0, len(posts)): # compare last entry with each post in fetched posts if data[0][0] == posts[i]['id']: break # if last post in database is not reached yet, add post[i] cursor.callproc('sp_createPost', (posts[i]['id'], facebookID, posts[i]['message'], posts[i]['created_time'])) d = cursor.fetchall() if len(d) is 0: conn.commit() # delete outdated posts if i > 0: count = len(data) + i for i in range(0, count - 25): query = "DELETE FROM posts WHERE post_id = '" + str( data[len(data) - (i + 1)][0]) + "'" cursor.execute(query) conn.commit() query = "SELECT * FROM posts WHERE user_id = '" + facebookID + "' ORDER BY created_time DESC" cursor.execute(query) row_headers = [x[0] for x in cursor.description] data = cursor.fetchall() json_data = [] for entry in data: json_data.append(dict(zip(row_headers, entry))) count = len(data) pg = pagination.Pagination(count, page) offset = (page - 1) * pg.page_size page_content['data'] = json_data[offset + 1:offset + pg.page_size + 1] if not page_content['data']: abort(404) page_content['offset'] = offset page_content['total'] = count page_content['page'] = page if pg.has_next(): page_content['next'] = str( request.url_root) + 'users/' + facebookID + '/posts/page/' + str( page + 1) if pg.has_previous(): page_content['previous'] = str( request.url_root) + 'users/' + facebookID + '/posts/page/' + str( page - 1) return jsonify(page_content)