def test_with_limit(self): self.assertEqual(g.handle_paging(10, 0), (10, 0, 0)) self.assertEqual(g.handle_paging(10, 2), (10, 2, 20)) # handle paging must not mess up user preferences self.assertEqual(c.user.get_pref('results_per_page'), None) # maximum enforced self.assertEqual(g.handle_paging(99999999, 0), (500, 0, 0))
def test_without_limit(self): # default limit = 25 self.assertEqual(g.handle_paging(None, 0), (25, 0, 0)) self.assertEqual(g.handle_paging(None, 2), (25, 2, 50)) # handle paging must not mess up user preferences self.assertEqual(c.user.get_pref('results_per_page'), None) # user has page size preference c.user.set_pref('results_per_page', 100) self.assertEqual(g.handle_paging(None, 0), (100, 0, 0)) self.assertEqual(g.handle_paging(None, 2), (100, 2, 200)) # handle paging must not mess up user preferences self.assertEqual(c.user.get_pref('results_per_page'), 100)
def test_without_limit_with_default(self): # default limit is not used when explicitly provided self.assertEqual(g.handle_paging(None, 0, 30), (30, 0, 0)) self.assertEqual(g.handle_paging(None, 2, 30), (30, 2, 60)) # handle paging must not mess up user preferences self.assertEqual(c.user.get_pref('results_per_page'), None) # user has page size preference, which is not affected by default c.user.set_pref('results_per_page', 25) self.assertEqual(g.handle_paging(None, 0, 30), (25, 0, 0)) self.assertEqual(g.handle_paging(None, 2, 30), (25, 2, 50)) # handle paging must not mess up user preferences self.assertEqual(c.user.get_pref('results_per_page'), 25)
def index(self, version=None, page=0, limit=None, **kw): if not self.page: redirect(c.app.url + h.urlquote(self.title) + '/edit') c.confirmation = W.confirmation c.thread = W.thread c.attachment_list = W.attachment_list c.subscribe_form = W.page_subscribe_form post_count = self.page.discussion_thread.post_count limit, pagenum, _ = g.handle_paging(limit, page) limit, pagenum = h.paging_sanitizer(limit, pagenum, post_count) page = self.get_version(version) if page is None: if version: redirect('.?version=%d' % (version - 1)) else: redirect('.') elif 'all' not in page.viewable_by and c.user.username not in page.viewable_by: raise exc.HTTPForbidden(detail="You may not view this page.") cur = page.version if cur > 1: prev = cur - 1 else: prev = None next = cur + 1 hide_left_bar = not (c.app.show_left_bar) subscribed_to_page = M.Mailbox.subscribed(artifact=self.page) c.subscribe_form.tool_subscribed = M.Mailbox.subscribed() return dict( page=page, cur=cur, prev=prev, next=next, page_subscribed=subscribed_to_page, hide_left_bar=hide_left_bar, show_meta=c.app.show_right_bar, pagenum=pagenum, limit=limit, count=post_count)
def index(self, page=0, limit=DEFAULT_PAGE_LIMIT, **kw): c.revision_widget = self.revision_widget c.page_list = self.page_list result = dict(commit=self._commit) if self._commit: result.update(self._commit.context()) tree = self._commit.tree limit, page, start = g.handle_paging(limit, page, default=self.DEFAULT_PAGE_LIMIT) diffs = self._commit.paged_diffs(start=start, end=start + limit, onlyChangedFiles=True) result['artifacts'] = [] for t in ('added', 'removed', 'changed', 'copied', 'renamed'): for f in diffs[t]: if t in ('copied', 'renamed'): filepath = f['new'] else: filepath = f is_text = filepath and tree.get_blob_by_path(filepath) and tree.get_blob_by_path(filepath).has_html_view result['artifacts'].append( (t, f, 'blob' if tree.get_blob_by_path(f) else 'tree', is_text) ) count = diffs['total'] result.update(dict(page=page, limit=limit, count=count)) # Sort the result['artifacts'] which is in format as below - # [('added', u'aaa.txt', 'blob', True), # ('added', u'eee.txt', 'blob', True), # ('added', u'ggg.txt', 'blob', True), # ('removed', u'bbb.txt', 'tree', None), # ('removed', u'ddd.txt', 'tree', None), # ('changed', u'ccc.txt', 'blob', True)] result['artifacts'].sort(key=lambda x: x[1]['old'] if(type(x[1]) == dict) else x[1]) return result
def paged_query(cls, app_config, user, query, limit=None, page=0, sort=None, deleted=False, **kw): """ Query tickets, filtering for 'read' permission, sorting and paginating the result. See also paged_search which does a solr search """ limit, page, start = g.handle_paging(limit, page, default=25) q = cls.query.find(dict(query, app_config_id=app_config._id, deleted=deleted)) q = q.sort('ticket_num', pymongo.DESCENDING) if sort: field, direction = sort.split() if field.startswith('_'): field = 'custom_fields.' + field direction = dict( asc=pymongo.ASCENDING, desc=pymongo.DESCENDING)[direction] q = q.sort(field, direction) q = q.skip(start) q = q.limit(limit) tickets = [] count = q.count() for t in q: if security.has_access(t, 'read', user, app_config.project.root_project): tickets.append(t) else: count = count -1 return dict( tickets=tickets, count=count, q=json.dumps(query), limit=limit, page=page, sort=sort, **kw)
def browse_tags(self, sort='alpha', page=0, limit=None, **kw): 'list of all labels in the wiki' c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=25) count = 0 page_tags = {} q = WM.Page.query.find( dict(app_config_id=c.app.config._id, deleted=False, labels={'$ne': []})) for page in q: if page.labels: for label in page.labels: if label not in page_tags: page_tags[label] = [] page_tags[label].append(page) count = len(page_tags) name_labels = list(page_tags) name_labels.sort() return dict(labels=page_tags, limit=limit, count=count, page=pagenum, name_labels=name_labels[start:start + limit])
def index(self, version=None, page=0, limit=None, **kw): if not self.page: redirect(c.app.url + h.urlquote(self.title) + '/edit') c.confirmation = W.confirmation c.thread = W.thread c.attachment_list = W.attachment_list c.subscribe_form = W.page_subscribe_form post_count = self.page.discussion_thread.post_count limit, pagenum, _ = g.handle_paging(limit, page) limit, pagenum = h.paging_sanitizer(limit, pagenum, post_count) page = self.get_version(version) if page is None: if version: redirect('.?version=%d' % (version - 1)) else: redirect('.') elif 'all' not in page.viewable_by and c.user.username not in page.viewable_by: raise exc.HTTPForbidden(detail="You may not view this page.") cur = page.version if cur > 1: prev = cur - 1 else: prev = None next = cur + 1 hide_left_bar = not (c.app.show_left_bar) subscribed_to_page = M.Mailbox.subscribed(artifact=self.page) return dict( page=page, cur=cur, prev=prev, next=next, page_subscribed=subscribed_to_page, hide_left_bar=hide_left_bar, show_meta=c.app.show_right_bar, pagenum=pagenum, limit=limit, count=post_count)
def adminlist(self, sort='alpha', limit=25, page=0, **kw): limit, page, start = g.handle_paging(limit, page) pq = M.Project.query.find( dict(neighborhood_id=self.neighborhood._id, deleted=False)) if sort == 'alpha': pq.sort('name') else: pq.sort('last_updated', pymongo.DESCENDING) count = pq.count() projects = pq.skip(start).limit(int(limit)).all() entries = [] for proj in projects: admin_role = M.ProjectRole.query.get( project_id=proj.root_project._id, name='Admin') if admin_role is None: continue user_role_list = M.ProjectRole.query.find( dict(project_id=proj.root_project._id, name=None)).all() for ur in user_role_list: if ur.user is not None and admin_role._id in ur.roles: entries.append({'project': proj, 'user': ur.user}) set_nav(self.neighborhood) return dict(entries=entries, sort=sort, limit=limit, page=page, count=count, page_list=W.page_list, neighborhood=self.neighborhood, )
def index(self, limit=None, page=0, **kw): limit, page, start = g.handle_paging(limit, int(page)) topics = model.Forum.thread_class().query.find( dict(discussion_id=self.forum._id)) topics = topics.sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) topics = topics.skip(start).limit(limit) count = topics.count() json = {} json['forum'] = self.forum.__json__( limit=1 ) # small limit since we're going to "del" the threads anyway # topics replace threads here del json['forum']['threads'] json['forum']['topics'] = [ dict(_id=t._id, subject=t.subject, num_replies=t.num_replies, num_views=t.num_views, url=h.absurl('/rest' + t.url()), last_post=t.last_post) for t in topics if t.status == 'ok' ] json['count'] = count json['page'] = page json['limit'] = limit return json
def index(self, **kw): kw = WidgetConfig.post_filter.validate(kw, None) page = kw.pop('page', 0) limit = kw.pop('limit', 50) status = kw.pop('status', 'pending') username = kw.pop('username', None) flag = kw.pop('flag', None) c.post_filter = WidgetConfig.post_filter c.moderate_posts = WidgetConfig.moderate_posts c.page_list = WidgetConfig.page_list query = dict( discussion_id=self.discussion._id, deleted=False) if status != '-': query['status'] = status if flag: query['flags'] = {'$gte': int(flag)} if username: filtered_user = User.by_username(username) query['author_id'] = filtered_user._id if filtered_user else None q = self.PostModel.query.find(query).sort('timestamp', -1) count = q.count() limit, page, start = g.handle_paging(limit, page or 0, default=50) q = q.skip(start) q = q.limit(limit) pgnum = (page // limit) + 1 pages = (count // limit) + 1 return dict(discussion=self.discussion, posts=q, page=page, limit=limit, status=status, flag=flag, username=username, pgnum=pgnum, pages=pages, count=count)
def index(self, **kw): kw = WidgetConfig.post_filter.validate(kw, None) page = kw.pop('page', 0) limit = kw.pop('limit', 50) status = kw.pop('status', 'pending') username = kw.pop('username', None) flag = kw.pop('flag', None) c.post_filter = WidgetConfig.post_filter c.moderate_posts = WidgetConfig.moderate_posts query = dict(discussion_id=self.discussion._id, deleted=False) if status != '-': query['status'] = status if flag: query['flags'] = {'$gte': int(flag)} if username: filtered_user = User.by_username(username) query['author_id'] = filtered_user._id if filtered_user else None q = self.PostModel.query.find(query) count = q.count() limit, page, start = g.handle_paging(limit, page or 0, default=50) q = q.skip(start) q = q.limit(limit) pgnum = (page // limit) + 1 pages = (count // limit) + 1 return dict(discussion=self.discussion, posts=q, page=page, limit=limit, status=status, flag=flag, username=username, pgnum=pgnum, pages=pages)
def adminlist(self, sort='alpha', limit=25, page=0, **kw): limit, page, start = g.handle_paging(limit, page) pq = M.Project.query.find( dict(neighborhood_id=self.neighborhood._id, deleted=False)) if sort == 'alpha': pq.sort('name') else: pq.sort('last_updated', pymongo.DESCENDING) count = pq.count() projects = pq.skip(start).limit(int(limit)).all() entries = [] for proj in projects: admin_role = M.ProjectRole.query.get( project_id=proj.root_project._id, name='Admin') if admin_role is None: continue user_role_list = M.ProjectRole.query.find( dict(project_id=proj.root_project._id, name=None)).all() for ur in user_role_list: if ur.user is not None and admin_role._id in ur.roles: entries.append({'project': proj, 'user': ur.user}) set_nav(self.neighborhood) return dict( entries=entries, sort=sort, limit=limit, page=page, count=count, page_list=W.page_list, neighborhood=self.neighborhood, )
def _search(self, model, fields, add_fields, q=None, f=None, page=0, limit=None, **kw): all_fields = fields + [(fld, fld) for fld in add_fields] c.search_form = W.admin_search_form(all_fields) c.page_list = W.page_list c.page_size = W.page_size count = 0 objects = [] limit, page, start = g.handle_paging(limit, page, default=25) if q: match = search.site_admin_search(model, q, f, rows=limit, start=start) if match: count = match.hits objects = match.docs ids = [obj['id'] for obj in objects] mongo_objects = search.mapped_artifacts_from_index_ids( ids, model) for i in range(len(objects)): obj = objects[i] _id = obj['id'].split('#')[1] obj['object'] = mongo_objects.get(_id) # Some objects can be deleted, but still have index in solr, should skip those objects = [o for o in objects if o.get('object')] def convert_fields(obj): # throw the type away (e.g. '_s' from 'url_s') result = {} for k, val in obj.iteritems(): name = k.rsplit('_', 1) if len(name) == 2: name = name[0] else: name = k result[name] = val return result return { 'q': q, 'f': f, 'objects': map(convert_fields, objects), 'count': count, 'page': page, 'limit': limit, 'fields': fields, 'additional_fields': add_fields, 'type_s': model.type_s, }
def index(self, limit=None, page=0, **kw): limit, page, start = g.handle_paging(limit, int(page)) json_data = {} json_data['topic'] = self.topic.__json__(limit=limit, page=page) json_data['count'] = self.topic.query_posts(status='ok').count() json_data['page'] = page json_data['limit'] = limit return json_data
def index(self, limit=100, page=0, **kw): limit, page, start = g.handle_paging(int(limit), int(page)) json_data = {} json_data['topic'] = self.topic.__json__(limit=limit, page=page) json_data['count'] = self.topic.query_posts(status='ok').count() json_data['page'] = page json_data['limit'] = limit return json_data
def index(self, threads=None, limit=25, page=0, count=0, **kw): if self.discussion.deleted: redirect(self.discussion.url()+'deleted') limit, page, start = g.handle_paging(limit, page) c.subscribed=M.Mailbox.subscribed(artifact=self.discussion) threads = DM.ForumThread.query.find(dict(discussion_id=self.discussion._id, num_replies={'$gt': 0})) \ .sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) return super(ForumController, self).index(threads=threads.skip(start).limit(int(limit)).all(), limit=limit, page=page, count=threads.count(), **kw)
def index(self, limit=100, page=0, **kw): limit, page, start = g.handle_paging(int(limit), int(page)) posts = self.topic.query_posts(page=page, limit=limit, style='') json = {} json['topic'] = self.topic.__json__() json['count'] = posts.count() json['page'] = page json['limit'] = limit json['topic']['posts'] = posts.all() return json
def index(self, sort='alpha', limit=25, page=0, **kw): if self.neighborhood.redirect: redirect(self.neighborhood.redirect) if not self.neighborhood.has_home_tool: mount = c.project.ordered_mounts()[0] if mount is not None: if 'ac' in mount: redirect(mount['ac'].options.mount_point + '/') elif 'sub' in mount: redirect(mount['sub'].url()) else: redirect(c.project.app_configs[0].options.mount_point + '/') c.project_summary = W.project_summary c.page_list = W.page_list limit, page, start = g.handle_paging(limit, page) pq = M.Project.query.find( dict( neighborhood_id=self.neighborhood._id, deleted=False, is_nbhd_project=False, )) if sort == 'alpha': pq.sort('name') else: pq.sort('last_updated', pymongo.DESCENDING) count = pq.count() nb_max_projects = self.neighborhood.get_max_projects() projects = pq.skip(start).limit(int(limit)).all() categories = M.ProjectCategory.query.find({ 'parent_id': None }).sort('name').all() c.custom_sidebar_menu = [] if h.has_access(self.neighborhood, 'register')() and (nb_max_projects is None or count < nb_max_projects): c.custom_sidebar_menu += [ SitemapEntry('Add a Project', self.neighborhood.url() + 'add_project', ui_icon=g.icons['add']), SitemapEntry('') ] c.custom_sidebar_menu = c.custom_sidebar_menu + [ SitemapEntry(cat.label, self.neighborhood.url() + 'browse/' + cat.name) for cat in categories ] return dict(neighborhood=self.neighborhood, title="Welcome to " + self.neighborhood.name, text=g.markdown.cached_convert(self.neighborhood, 'homepage'), projects=projects, sort=sort, limit=limit, page=page, count=count)
def index(self, sort='alpha', limit=25, page=0, **kw): text = None if self.neighborhood.use_wiki_page_as_root: default_wiki_page = get_default_wiki_page() if default_wiki_page: text = default_wiki_page.html_text elif self.neighborhood.redirect: redirect(self.neighborhood.redirect) elif not self.neighborhood.has_home_tool: mount = c.project.ordered_mounts()[0] if mount is not None: if 'ac' in mount: redirect(mount['ac'].options.mount_point + '/') elif 'sub' in mount: redirect(mount['sub'].url()) else: redirect(c.project.app_configs[0].options.mount_point + '/') else: text=g.markdown.cached_convert( self.neighborhood, 'homepage'), c.project_summary = W.project_summary c.page_list = W.page_list limit, page, start = g.handle_paging(limit, page) pq = M.Project.query.find(dict( neighborhood_id=self.neighborhood._id, deleted=False, is_nbhd_project=False, )) if sort == 'alpha': pq.sort('name') else: pq.sort('last_updated', pymongo.DESCENDING) count = pq.count() nb_max_projects = self.neighborhood.get_max_projects() projects = pq.skip(start).limit(int(limit)).all() categories = M.ProjectCategory.query.find( {'parent_id': None}).sort('name').all() c.custom_sidebar_menu = [] if h.has_access(self.neighborhood, 'register')() and (nb_max_projects is None or count < nb_max_projects): c.custom_sidebar_menu += [ SitemapEntry('Add a Project', self.neighborhood.url() + 'add_project', ui_icon=g.icons['add']), SitemapEntry('') ] c.custom_sidebar_menu = c.custom_sidebar_menu + [ SitemapEntry(cat.label, self.neighborhood.url() + 'browse/' + cat.name) for cat in categories ] return dict(neighborhood=self.neighborhood, title="Welcome to " + self.neighborhood.name, text=text, projects=projects, sort=sort, limit=limit, page=page, count=count)
def history(self, page=0, limit=None, **kw): if not self.page: raise exc.HTTPNotFound c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=25) count = 0 pages = self.page.history() count = pages.count() pages = pages.skip(start).limit(int(limit)) return dict(title=self.title, pages=pages, limit=limit, count=count, page=pagenum)
def index(self, page=0, limit=None, **kw): query_filter = dict(app_config_id=c.app.config._id) if not has_access(c.app, "write")(): query_filter["state"] = "published" q = BM.BlogPost.query.find(query_filter) post_count = q.count() limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) posts = q.sort("timestamp", pymongo.DESCENDING).skip(page * limit).limit(limit) c.form = W.preview_post_form c.pager = W.pager return dict(posts=posts, page=page, limit=limit, count=post_count)
def index(self, sort='alpha', limit=25, page=0, **kw): c.project_summary = W.project_summary c.page_list = W.page_list limit, page, start = g.handle_paging(limit, page) projects, count = self._find_projects(sort=sort, limit=limit, start=start) title = self._build_title() c.custom_sidebar_menu = self._build_nav() return dict(projects=projects, title=title, text=None, neighborhood=self.neighborhood, sort=sort, limit=limit, page=page, count=count)
def index(self, limit=None, page=0, count=0, **kw): c.thread = self.W.thread c.thread_header = self.W.thread_header limit, page, start = g.handle_paging(limit, page) self.thread.num_views += 1 M.session.artifact_orm_session._get().skip_mod_date = True # the update to num_views shouldn't affect it count = self.thread.query_posts(page=page, limit=int(limit)).count() return dict(discussion=self.thread.discussion, thread=self.thread, page=int(page), count=int(count), limit=int(limit), show_moderate=kw.get('show_moderate'))
def index(self, page=0, limit=None, **kw): query_filter = dict(app_config_id=c.app.config._id) if not has_access(c.app, 'write')(): query_filter['state'] = 'published' q = BM.BlogPost.query.find(query_filter) post_count = q.count() limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) posts = q.sort('timestamp', pymongo.DESCENDING) \ .skip(page * limit).limit(limit) c.form = W.preview_post_form c.pager = W.pager return dict(posts=posts, page=page, limit=limit, count=post_count)
def history(self, page=0, limit=None, **kw): if not self.page: raise exc.HTTPNotFound c.page_list = W.page_list c.page_size = W.page_size c.confirmation = W.confirmation limit, pagenum, start = g.handle_paging(limit, page, default=25) count = 0 pages = self.page.history() count = pages.count() pages = pages.skip(start).limit(int(limit)) return dict(title=self.title, pages=pages, limit=limit, count=count, page=pagenum)
def index(self, sort='alpha', limit=25, page=0, **kw): c.project_summary = W.project_summary c.page_list = W.page_list limit, page, start = g.handle_paging(limit, page) projects, count = self._find_projects( sort=sort, limit=limit, start=start) title = self._build_title() c.custom_sidebar_menu = self._build_nav() return dict(projects=projects, title=title, text=None, neighborhood=self.neighborhood, sort=sort, limit=limit, page=page, count=count)
def _search(self, model, fields, add_fields, q=None, f=None, page=0, limit=None, **kw): all_fields = fields + [(fld, fld) for fld in add_fields] c.search_form = W.admin_search_form(all_fields) c.page_list = W.page_list c.page_size = W.page_size count = 0 objects = [] limit, page, start = g.handle_paging(limit, page, default=25) if q: match = search.site_admin_search(model, q, f, rows=limit, start=start) if match: count = match.hits objects = match.docs ids = [obj['id'].split('#')[1] for obj in objects] ids = [bson.ObjectId(_id) for _id in ids if _id != 'None'] mongo_objects = {} for obj in model.query.find({'_id': {'$in': ids}}): mongo_objects[str(obj._id)] = obj for i in range(len(objects)): obj = objects[i] _id = obj['id'].split('#')[1] obj['object'] = mongo_objects.get(_id) # Some objects can be deleted, but still have index in solr, should skip those objects = [o for o in objects if o.get('object')] def convert_fields(obj): # throw the type away (e.g. '_s' from 'url_s') result = {} for k,val in obj.iteritems(): name = k.rsplit('_', 1) if len(name) == 2: name = name[0] else: name = k result[name] = val return result return { 'q': q, 'f': f, 'objects': map(convert_fields, objects), 'count': count, 'page': page, 'limit': limit, 'fields': fields, 'additional_fields': add_fields, 'type_s': model.type_s, }
def index(self, page=0, limit=None, **kw): if self.post.state == "draft": require_access(self.post, "write") c.form = W.view_post_form c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop("version", None) post = self._get_version(version) base_post = self.post subscribed = M.Mailbox.subscribed(artifact=self.post) c.subscribe_form.tool_subscribed = M.Mailbox.subscribed() return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count, subscribed=subscribed)
def index(self, threads=None, limit=None, page=0, count=0, **kw): if self.discussion.deleted: redirect(self.discussion.url() + 'deleted') limit, page, start = g.handle_paging(limit, page) c.subscribed = M.Mailbox.subscribed(artifact=self.discussion) threads = DM.ForumThread.query.find(dict(discussion_id=self.discussion._id, num_replies={'$gt': 0})) \ .sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) c.discussion = self.W.discussion c.discussion_header = self.W.discussion_header c.whole_forum_subscription_form = self.W.subscribe_form return dict(discussion=self.discussion, count=threads.count(), threads=threads.skip(start).limit(int(limit)).all(), limit=limit, page=page)
def new_projects(self, page=0, limit=100, **kwargs): c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=100) count = 0 nb = M.Neighborhood.query.get(name='Users') projects = (M.Project.query.find({'neighborhood_id': {'$ne': nb._id}}) .sort('_id', -1)) count = projects.count() projects = projects.skip(start).limit(limit) return { 'projects': projects, 'limit': limit, 'pagenum': pagenum, 'count': count }
def index(self, threads=None, limit=None, page=0, count=0, **kw): if self.discussion.deleted: redirect(self.discussion.url() + 'deleted') limit, page, start = g.handle_paging(limit, page) c.subscribed = M.Mailbox.subscribed(artifact=self.discussion) threads = DM.ForumThread.query.find(dict(discussion_id=self.discussion._id, num_replies={'$gt': 0})) \ .sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) c.discussion = self.W.discussion c.discussion_header = self.W.discussion_header c.whole_forum_subscription_form = self.W.subscribe_form return dict( discussion=self.discussion, count=threads.count(), threads=threads.skip(start).limit(int(limit)).all(), limit=limit, page=page)
def index(self, page=0, limit=DEFAULT_PAGE_LIMIT): c.revision_widget = self.revision_widget c.page_list = self.page_list result = dict(commit=self._commit) if self._commit: result.update(self._commit.context()) tree = self._commit.tree limit, page, start = g.handle_paging(limit, page, default=self.DEFAULT_PAGE_LIMIT) diffs = self._commit.paged_diffs(start=start, end=start + limit) result['artifacts'] = [(t, f) for t in ('added', 'removed', 'changed', 'copied') for f in diffs[t] if t == 'removed' or tree.get_blob_by_path(f)] count = diffs['total'] result.update(dict(page=page, limit=limit, count=count)) return result
def index(self, page=0, limit=None, **kw): if self.post.state == 'draft': require_access(self.post, 'write') c.form = W.view_post_form c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop('version', None) post = self._get_version(version) base_post = self.post subscribed = M.Mailbox.subscribed(artifact=self.post) c.subscribe_form.tool_subscribed = M.Mailbox.subscribed() return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count, subscribed=subscribed)
def index(self, page=0, limit=DEFAULT_PAGE_LIMIT, **kw): c.revision_widget = self.revision_widget c.page_list = self.page_list result = dict(commit=self._commit) if self._commit: result.update(self._commit.context()) tree = self._commit.tree limit, page, start = g.handle_paging(limit, page, default=self.DEFAULT_PAGE_LIMIT) diffs = self._commit.paged_diffs(start=start, end=start + limit) result['artifacts'] = [ (t, f) for t in ('added', 'removed', 'changed', 'copied') for f in diffs[t] if t == 'removed' or tree.get_blob_by_path(f)] count = diffs['total'] result.update(dict(page=page, limit=limit, count=count)) return result
def browse_pages(self, sort='alpha', show_deleted=False, page=0, limit=None, **kw): 'list of all pages in the wiki' c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=25) count = 0 pages = [] uv_pages = [] criteria = dict(app_config_id=c.app.config._id) can_delete = has_access(c.app, 'delete')() show_deleted = show_deleted and can_delete if not can_delete: criteria['deleted'] = False q = WM.Page.query.find(criteria) if sort == 'alpha': q = q.sort('title') count = q.count() q = q.skip(start).limit(int(limit)) for page in q: recent_edit = page.history().first() p = dict(title=page.title, url=page.url(), deleted=page.deleted) if recent_edit: p['updated'] = recent_edit.timestamp p['user_label'] = recent_edit.author.display_name p['user_name'] = recent_edit.author.username pages.append(p) else: if sort == 'recent': uv_pages.append(p) else: pages.append(p) if sort == 'recent': pages.sort(reverse=True, key=lambda x: (x['updated'])) pages = pages + uv_pages return dict(pages=pages, can_delete=can_delete, show_deleted=show_deleted, limit=limit, count=count, page=pagenum)
def index(self, page=0, limit=100, **kw): c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=100) p = {'app_config_id': c.app.config._id} if not has_access(c.app, 'view_private'): p['private'] = False short_urls = (ShortUrl.query.find(p)) count = short_urls.count() short_urls = short_urls.skip(start).limit(limit) return { 'short_urls': short_urls, 'limit': limit, 'pagenum': pagenum, 'count': count }
def index(self, page=0, limit=None, **kw): c.form = W.view_post_form c.attachment_list = W.attachment_list c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop('version', None) post = self._get_version(version) base_post = self.post subscribed = M.Mailbox.subscribed(artifact=self.post) return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count, subscribed=subscribed)
def paged_query(cls, app_config, user, query, limit=None, page=0, sort=None, deleted=False, **kw): """ Query tickets, filtering for 'read' permission, sorting and paginating the result. See also paged_search which does a solr search """ limit, page, start = g.handle_paging(limit, page, default=25) q = cls.query.find( dict(query, app_config_id=app_config._id, deleted=deleted)) q = q.sort('ticket_num', pymongo.DESCENDING) if sort: field, direction = sort.split() if field.startswith('_'): field = 'custom_fields.' + field direction = dict(asc=pymongo.ASCENDING, desc=pymongo.DESCENDING)[direction] q = q.sort(field, direction) q = q.skip(start) q = q.limit(limit) tickets = [] count = q.count() for t in q: if security.has_access(t, 'read', user, app_config.project.root_project): tickets.append(t) else: count = count - 1 return dict(tickets=tickets, count=count, q=json.dumps(query), limit=limit, page=page, sort=sort, **kw)
def index(self, page=0, limit=100, **kw): c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=100) p = {"app_config_id": c.app.config._id} if not has_access(c.app, "view_private"): p["private"] = False short_urls = ShortUrl.query.find(p) count = short_urls.count() short_urls = short_urls.skip(start).limit(limit) return { "short_urls": short_urls, "limit": limit, "pagenum": pagenum, "count": count, "url_len": len(ShortUrl.build_short_url(c.app, short_name="")), }
def index(self, limit=100, page=0, **kw): limit, page, start = g.handle_paging(int(limit), int(page)) forums = model.Forum.query.find(dict( app_config_id=c.app.config._id, parent_id=None, deleted=False) ).sort([('shortname', pymongo.ASCENDING)]).skip(start).limit(limit) count = forums.count() json = dict(forums=[dict(_id=f._id, name=f.name, shortname=f.shortname, description=f.description, num_topics=f.num_topics, last_post=f.last_post, url=h.absurl('/rest' + f.url())) for f in forums if has_access(f, 'read')]) json['limit'] = limit json['page'] = page json['count'] = count return json
def new_projects(self, page=0, limit=100, **kwargs): c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=100) count = 0 nb = M.Neighborhood.query.get(name='Users') projects = (M.Project.query.find({ 'neighborhood_id': { '$ne': nb._id } }).sort('_id', -1)) count = projects.count() projects = projects.skip(start).limit(limit) return { 'projects': projects, 'limit': limit, 'pagenum': pagenum, 'count': count }
def index(self, limit=None, page=0, **kw): limit, page, start = g.handle_paging(limit, int(page)) forums = model.Forum.query.find(dict( app_config_id=c.app.config._id, parent_id=None, deleted=False) ).sort([('shortname', pymongo.ASCENDING)]).skip(start).limit(limit) count = forums.count() json = dict(forums=[dict(_id=f._id, name=f.name, shortname=f.shortname, description=f.description, num_topics=f.num_topics, last_post=f.last_post, url=h.absurl('/rest' + f.url())) for f in forums if has_access(f, 'read')]) json['limit'] = limit json['page'] = page json['count'] = count return json
def index(self, limit=100, page=0, **kw): limit, page, start = g.handle_paging(int(limit), int(page)) topics = model.Forum.thread_class().query.find(dict(discussion_id=self.forum._id)) topics = topics.sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) topics = topics.skip(start).limit(limit) count = topics.count() json = {} json['forum'] = self.forum.__json__() json['forum']['topics'] = [dict(_id=t._id, subject=t.subject, num_replies=t.num_replies, num_views=t.num_views, url=h.absurl('/rest' + t.url()), last_post=t.last_post) for t in topics] json['count'] = count json['page'] = page json['limit'] = limit return json
def url_paginated(self): '''Return link to the thread with a #target that poins to this comment. Also handle pagination properly. ''' if not self.thread: # pragma no cover return None limit, p, s = g.handle_paging(None, 0) # get paging limit if self.query.find(dict(thread_id=self.thread._id)).count() <= limit: # all posts in a single page page = 0 else: posts = self.thread.find_posts() posts = self.thread.create_post_threads(posts) def find_i(posts): '''Find the index number of this post in the display order''' q = [] def traverse(posts): for p in posts: if p['post']._id == self._id: return True # found q.append(p) if traverse(p['children']): return True traverse(posts) return len(q) page = find_i(posts) / limit slug = h.urlquote(self.slug) aref = ArtifactReference.query.get(_id=self.thread.ref_id) if aref and aref.artifact: url = aref.artifact.url() else: url = self.thread.url() if page == 0: return '%s?limit=%s#%s' % (url, limit, slug) return '%s?limit=%s&page=%s#%s' % (url, limit, page, slug)
def index(self, limit=None, page=0, **kw): limit, page, start = g.handle_paging(limit, int(page)) topics = model.Forum.thread_class().query.find(dict(discussion_id=self.forum._id)) topics = topics.sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) topics = topics.skip(start).limit(limit) count = topics.count() json = {} json['forum'] = self.forum.__json__(limit=1) # small limit since we're going to "del" the threads anyway # topics replace threads here del json['forum']['threads'] json['forum']['topics'] = [dict(_id=t._id, subject=t.subject, num_replies=t.num_replies, num_views=t.num_views, url=h.absurl('/rest' + t.url()), last_post=t.last_post) for t in topics if t.status == 'ok'] json['count'] = count json['page'] = page json['limit'] = limit return json
def browse_pages(self, sort='alpha', show_deleted=False, page=0, limit=None, **kw): 'list of all pages in the wiki' c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=25) count = 0 pages = [] uv_pages = [] criteria = dict(app_config_id=c.app.config._id) can_delete = has_access(c.app, 'delete')() show_deleted = show_deleted and can_delete if not can_delete: criteria['deleted'] = False q = WM.Page.query.find(criteria) if sort == 'alpha': q = q.sort('title') count = q.count() q = q.skip(start).limit(int(limit)) for page in q: recent_edit = page.history().first() p = dict(title=page.title, url=page.url(), deleted=page.deleted) if recent_edit: p['updated'] = recent_edit.timestamp p['user_label'] = recent_edit.author.display_name p['user_name'] = recent_edit.author.username pages.append(p) else: if sort == 'recent': uv_pages.append(p) else: pages.append(p) if sort == 'recent': pages.sort(reverse=True, key=lambda x: (x['updated'])) pages = pages + uv_pages return dict( pages=pages, can_delete=can_delete, show_deleted=show_deleted, limit=limit, count=count, page=pagenum)
def browse_tags(self, sort='alpha', page=0, limit=None, **kw): 'list of all labels in the wiki' c.page_list = W.page_list c.page_size = W.page_size limit, pagenum, start = g.handle_paging(limit, page, default=25) count = 0 page_tags = {} q = WM.Page.query.find(dict(app_config_id=c.app.config._id, deleted=False, labels={'$ne': []})) for page in q: if page.labels: for label in page.labels: if label not in page_tags: page_tags[label] = [] page_tags[label].append(page) count = len(page_tags) name_labels = list(page_tags) name_labels.sort() return dict(labels=page_tags, limit=limit, count=count, page=pagenum, name_labels=name_labels[start:start + limit])
def index(self, page=0, limit=DEFAULT_PAGE_LIMIT, **kw): c.revision_widget = self.revision_widget c.page_list = self.page_list result = dict(commit=self._commit) if self._commit: result.update(self._commit.context()) tree = self._commit.tree limit, page, start = g.handle_paging(limit, page, default=self.DEFAULT_PAGE_LIMIT) diffs = self._commit.paged_diffs(start=start, end=start + limit) result['artifacts'] = [] for t in ('added', 'removed', 'changed', 'copied', 'renamed'): for f in diffs[t]: if t in ('copied', 'renamed'): filepath = f['new'] else: filepath = f is_text = filepath and tree.get_blob_by_path(filepath) and tree.get_blob_by_path(filepath).has_html_view result['artifacts'].append( (t, f, 'blob' if tree.get_blob_by_path(f) else 'tree', is_text) ) count = diffs['total'] result.update(dict(page=page, limit=limit, count=count)) return result
def search_app(q='', fq=None, app=True, **kw): """Helper for app/project search. Uses dismax query parser. Matches on `title` and `text`. Handles paging, sorting, etc """ history = kw.pop('history', None) if app and kw.pop('project', False): # Used from app's search controller. If `project` is True, redirect to # 'entire project search' page redirect(c.project.url() + 'search/?' + urlencode(dict(q=q, history=history))) search_comments = kw.pop('search_comments', None) limit = kw.pop('limit', None) page = kw.pop('page', 0) default = kw.pop('default', 25) allowed_types = kw.pop('allowed_types', []) parser = kw.pop('parser', None) sort = kw.pop('sort', 'score desc') fq = fq if fq else [] search_error = None results = [] count = 0 matches = {} limit, page, start = g.handle_paging(limit, page, default=default) if not q: q = '' else: # Match on both `title` and `text` by default, using 'dismax' parser. # Score on `title` matches is boosted, so title match is better than body match. # It's 'fuzzier' than standard parser, which matches only on `text`. if search_comments: allowed_types += ['Post'] if app: fq = [ 'project_id_s:%s' % c.project._id, 'mount_point_s:%s' % c.app.config.options.mount_point, '-deleted_b:true', 'type_s:(%s)' % ' OR '.join(['"%s"' % t for t in allowed_types]) ] + fq search_params = { 'qt': 'dismax', 'qf': 'title^2 text', 'pf': 'title^2 text', 'fq': fq, 'hl': 'true', 'hl.simple.pre': '#ALLURA-HIGHLIGHT-START#', 'hl.simple.post': '#ALLURA-HIGHLIGHT-END#', 'sort': sort, } if not history: search_params['fq'].append('is_history_b:False') if parser == 'standard': search_params.pop('qt', None) search_params.pop('qf', None) search_params.pop('pf', None) try: results = search(q, short_timeout=True, ignore_errors=False, rows=limit, start=start, **search_params) except SearchError as e: search_error = e if results: count = results.hits matches = results.highlighting def historize_urls(doc): if doc.get('type_s', '').endswith(' Snapshot'): if doc.get('url_s'): doc['url_s'] = doc['url_s'] + \ '?version=%s' % doc.get('version_i') return doc def add_matches(doc): m = matches.get(doc['id'], {}) title = h.get_first(m, 'title') text = h.get_first(m, 'text') if title: title = (jinja2.escape(title).replace( '#ALLURA-HIGHLIGHT-START#', jinja2.Markup('<strong>')).replace( '#ALLURA-HIGHLIGHT-END#', jinja2.Markup('</strong>'))) if text: text = (jinja2.escape(text).replace( '#ALLURA-HIGHLIGHT-START#', jinja2.Markup('<strong>')).replace( '#ALLURA-HIGHLIGHT-END#', jinja2.Markup('</strong>'))) doc['title_match'] = title doc['text_match'] = text or h.get_first(doc, 'text') return doc def paginate_comment_urls(doc): from allura.model import ArtifactReference if doc.get('type_s', '') == 'Post': aref = ArtifactReference.query.get(_id=doc.get('id')) if aref and aref.artifact: doc['url_paginated'] = aref.artifact.url_paginated() return doc results = imap(historize_urls, results) results = imap(add_matches, results) results = imap(paginate_comment_urls, results) # Provide sort urls to the view score_url = 'score desc' date_url = 'mod_date_dt desc' try: field, order = sort.split(' ') except ValueError: field, order = 'score', 'desc' sort = ' '.join([field, 'asc' if order == 'desc' else 'desc']) if field == 'score': score_url = sort elif field == 'mod_date_dt': date_url = sort params = request.GET.copy() params.update({'sort': score_url}) score_url = url(request.path, params=params) params.update({'sort': date_url}) date_url = url(request.path, params=params) return dict(q=q, history=history, results=list(results) or [], count=count, limit=limit, page=page, search_error=search_error, sort_score_url=score_url, sort_date_url=date_url, sort_field=field)
def paged_search(cls, app_config, user, q, limit=None, page=0, sort=None, show_deleted=False, **kw): """Query tickets from Solr, filtering for 'read' permission, sorting and paginating the result. See also paged_query which does a mongo search. We do the sorting and skipping right in SOLR, before we ever ask Mongo for the actual tickets. Other keywords for search_artifact (e.g., history) or for SOLR are accepted through kw. The output is intended to be used directly in templates, e.g., exposed controller methods can just: return paged_query(q, ...) If you want all the results at once instead of paged you have these options: - don't call this routine, search directly in mongo - call this routine with a very high limit and TEST that count<=limit in the result limit=-1 is NOT recognized as 'all'. 500 is a reasonable limit. """ limit, page, start = g.handle_paging(limit, page, default=25) count = 0 tickets = [] refined_sort = sort if sort else 'ticket_num_i desc' if 'ticket_num_i' not in refined_sort: refined_sort += ',ticket_num_i asc' try: if q: matches = search_artifact(cls, q, short_timeout=True, rows=limit, sort=refined_sort, start=start, fl='ticket_num_i', **kw) else: matches = None solr_error = None except SearchError as e: solr_error = e matches = [] if matches: count = matches.hits # ticket_numbers is in sorted order ticket_numbers = [match['ticket_num_i'] for match in matches.docs] # but query, unfortunately, returns results in arbitrary order query = cls.query.find( dict(app_config_id=app_config._id, ticket_num={'$in': ticket_numbers})) # so stick all the results in a dictionary... ticket_for_num = {} for t in query: ticket_for_num[t.ticket_num] = t # and pull them out in the order given by ticket_numbers tickets = [] for tn in ticket_numbers: if tn in ticket_for_num: show_deleted = show_deleted and security.has_access( ticket_for_num[tn], 'delete', user, app_config.project.root_project) if (security.has_access(ticket_for_num[tn], 'read', user, app_config.project.root_project) and (show_deleted or ticket_for_num[tn].deleted == False)): tickets.append(ticket_for_num[tn]) else: count = count - 1 return dict(tickets=tickets, count=count, q=q, limit=limit, page=page, sort=sort, solr_error=solr_error, **kw)