def log(self, limit=0, path=None, **kw): if not limit: limit = int(tg.config.get('scm.view.log.limit', 25)) is_file = False if path: is_file = c.app.repo.is_file(path, self._commit._id) limit, _ = h.paging_sanitizer(limit, 0) commits = list(c.app.repo.log( revs=self._commit._id, path=path, id_only=False, limit=limit + 1)) # get an extra one to check for a next commit next_commit = None if len(commits) > limit: next_commit = commits.pop() c.log_widget = self.log_widget return dict( username=c.user._id and c.user.username, branch=None, log=commits, next_commit=next_commit, limit=limit, path=path, is_file=is_file, **kw)
def index(self, page=0, limit=250, **kw): c.thread = self.thread_widget c.log_widget = self.log_widget c.mr_dispose_form = self.mr_dispose_form limit, page = h.paging_sanitizer(limit, page) with self.req.push_downstream_context(): downstream_app = c.app result = dict( downstream_app=downstream_app, req=self.req, can_merge=self.req.can_merge(), can_merge_status=self.req.can_merge_task_status(), merge_status=self.req.merge_task_status(), page=page, limit=limit, count=self.req.discussion_thread.post_count) try: result['commits'] = self.req.commits except Exception: log.info( "Can't get commits for merge request %s", self.req.url(), exc_info=True) result['commits'] = [] result['error'] = True return result
def log(self, limit=0, path=None, **kw): if not limit: limit = int(tg.config.get('scm.view.log.limit', 25)) is_file = False if path: is_file = c.app.repo.is_file(path, self._commit._id) limit, _ = h.paging_sanitizer(limit, 0) commits = list( c.app.repo.log(revs=self._commit._id, path=path, id_only=False, limit=limit + 1)) # get an extra one to check for a next commit next_commit = None if len(commits) > limit: next_commit = commits.pop() c.log_widget = self.log_widget return dict(username=c.user._id and c.user.username, branch=None, log=commits, next_commit=next_commit, limit=limit, path=path, is_file=is_file, **kw)
def feed(cls, q, feed_type, title, link, description, since=None, until=None, page=None, limit=None): "Produces webhelper.feedgenerator Feed" d = dict(title=title, link=h.absurl(link), description=description, language=u'en', feed_url=request.url) if feed_type == 'atom': feed = FG.Atom1Feed(**d) elif feed_type == 'rss': feed = RssFeed(**d) limit, page = h.paging_sanitizer(limit or 10, page) query = defaultdict(dict) if callable(q): q = q(since, until, page, limit) query.update(q) if since is not None: query['pubdate']['$gte'] = since if until is not None: query['pubdate']['$lte'] = until cur = cls.query.find(query) cur = cur.sort('pubdate', pymongo.DESCENDING) cur = cur.limit(limit) cur = cur.skip(limit * page) for r in cur: feed.add_item(title=r.title, link=h.absurl(r.link.encode('utf-8')), pubdate=r.pubdate, description=r.description, unique_id=h.absurl(r.unique_id), author_name=r.author_name, author_link=h.absurl(r.author_link)) return feed
def index(self, version=None, page=0, limit=25, **kw): if not self.page: redirect(c.app.url+h.urlquote(self.title)+'/edit') c.thread = W.thread c.attachment_list = W.attachment_list c.subscribe_form = W.page_subscribe_form post_count = self.page.discussion_thread.post_count limit, pagenum = h.paging_sanitizer(limit, page, post_count) page = self.get_version(version) if page is None: if version: redirect('.?version=%d' % (version-1)) else: redirect('.') elif 'all' not in page.viewable_by and c.user.username not in page.viewable_by: raise exc.HTTPForbidden(detail="You may not view this page.") cur = page.version if cur > 1: prev = cur-1 else: prev = None next = cur+1 hide_left_bar = not (c.app.show_left_bar) return dict( page=page, cur=cur, prev=prev, next=next, subscribed=M.Mailbox.subscribed(artifact=self.page), hide_left_bar=hide_left_bar, show_meta=c.app.show_right_bar, pagenum=pagenum, limit=limit, count=post_count)
def commit_browser_data(self, start=None, limit=None, **kw): data = { 'commits': [], 'next_column': 1, 'max_row': 0, 'built_tree': {}, 'next_commit': None, } limit, _ = h.paging_sanitizer(limit or 100, 0, 0) for i, commit in enumerate(c.app.repo.log(revs=start, id_only=False, limit=limit+1)): if i >= limit: data['next_commit'] = str(commit['id']) break data['commits'].append(str(commit['id'])) data['built_tree'][commit['id']] = { 'column': 0, 'parents': list(map(str, commit['parents'])), 'short_id': '[r%s]' % commit['id'], 'message': commit['message'], 'oid': str(commit['id']), 'row': i, 'url': c.app.repo.url_for_commit(commit['id']), } data['max_row'] = len(data['commits']) - 1 return data
def commit_browser_data(self, start=None, limit=None, **kw): data = { 'commits': [], 'next_column': 1, 'max_row': 0, 'built_tree': {}, 'next_commit': None, } limit, _ = h.paging_sanitizer(limit or 100, 0, 0) for i, commit in enumerate(c.app.repo.log(revs=start, id_only=False, page_size=limit+1)): if i >= limit: data['next_commit'] = str(commit['id']) break data['commits'].append(str(commit['id'])) data['built_tree'][commit['id']] = { 'column': 0, 'parents': map(str, commit['parents']), 'short_id': '[r%s]' % commit['id'], 'message': commit['message'], 'oid': str(commit['id']), 'row': i, 'url': c.app.repo.url_for_commit(commit['id']), } data['max_row'] = len(data['commits']) - 1 return data
def index(self, page=0, limit=10, **kw): query_filter = dict(app_config_id=c.app.config._id) if not has_access(c.app, 'write')(): query_filter['state'] = 'published' q = BM.BlogPost.query.find(query_filter) post_count = q.count() limit, page = h.paging_sanitizer(limit, page, post_count) posts = q.sort('timestamp', pymongo.DESCENDING) \ .skip(page * limit).limit(limit) c.form = W.preview_post_form c.pager = W.pager return dict(posts=posts, page=page, limit=limit, count=post_count)
def index(self, page=0, limit=None, **kw): query_filter = dict(app_config_id=c.app.config._id) if not has_access(c.app, "write")(): query_filter["state"] = "published" q = BM.BlogPost.query.find(query_filter) post_count = q.count() limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) posts = q.sort("timestamp", pymongo.DESCENDING).skip(page * limit).limit(limit) c.form = W.preview_post_form c.pager = W.pager return dict(posts=posts, page=page, limit=limit, count=post_count)
def index(self, page=0, limit=25, **kw): if self.post.state == 'draft': require_access(self.post, 'write') c.form = W.view_post_form c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop('version', None) post = self._get_version(version) base_post = self.post return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count)
def test_paging_sanitizer(): test_data = { # input (limit, page, total, zero-based): output (limit, page) (0, 0, 0): (1, 0), ('1', '1', 1): (1, 0), (5, '10', 25): (5, 4), ('5', 10, 25, False): (5, 5), (5, '-1', 25): (5, 0), ('5', -1, 25, False): (5, 1), (5, '3', 25): (5, 3), ('5', 3, 25, False): (5, 3) } for input, output in test_data.iteritems(): assert (h.paging_sanitizer(*input)) == output
def test_paging_sanitizer(): test_data = { # input (limit, page, total, zero-based): output (limit, page) (0, 0, 0): (1, 0), ("1", "1", 1): (1, 0), (5, "10", 25): (5, 4), ("5", 10, 25, False): (5, 5), (5, "-1", 25): (5, 0), ("5", -1, 25, False): (5, 1), (5, "3", 25): (5, 3), ("5", 3, 25, False): (5, 3), } for input, output in test_data.iteritems(): assert (h.paging_sanitizer(*input)) == output
def _default(self, tool_name, page=0, limit=200, **kw): c.page_list = W.page_list tool_name = tool_name.lower() entries = c.project.sitemap(included_tools=[tool_name], tools_only=True, per_tool_limit=None) total_entries = len(entries) limit, page = h.paging_sanitizer(limit, page, total_entries) start = page * limit return dict( page=page, limit=limit, total_entries=total_entries, entries=entries[start:start + limit], type=g.entry_points['tool'][tool_name].tool_label if entries else None, )
def index(self, page=0, limit=None, **kw): c.form = W.view_post_form c.attachment_list = W.attachment_list c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop('version', None) post = self._get_version(version) base_post = self.post subscribed = M.Mailbox.subscribed(artifact=self.post) return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count, subscribed=subscribed)
def index(self, page=0, limit=None, **kw): if self.post.state == "draft": require_access(self.post, "write") c.form = W.view_post_form c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop("version", None) post = self._get_version(version) base_post = self.post subscribed = M.Mailbox.subscribed(artifact=self.post) c.subscribe_form.tool_subscribed = M.Mailbox.subscribed() return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count, subscribed=subscribed)
def index(self, page=0, limit=25): c.page_list = W.page_list c.page_size = W.page_size limit, page = h.paging_sanitizer(limit, page) query = M.notification.SiteNotification.query.find().sort('_id', -1) count = query.count() notifications = paginate.Page(query.all(), page+1, limit) return { 'notifications': notifications, 'count': count, 'page_url': page, 'limit': limit }
def index(self, page=0, limit=25): c.page_list = W.page_list c.page_size = W.page_size limit, page = h.paging_sanitizer(limit, page) query = M.notification.SiteNotification.query.find().sort('_id', -1) count = query.count() notifications = paginate.Page(query.all(), page + 1, limit) return { 'notifications': notifications, 'count': count, 'page_url': page, 'limit': limit }
def index(self, page=0, limit=None, **kw): if self.post.state == 'draft': require_access(self.post, 'write') c.form = W.view_post_form c.subscribe_form = W.subscribe_form c.thread = W.thread post_count = self.post.discussion_thread.post_count limit, page, _ = g.handle_paging(limit, page) limit, page = h.paging_sanitizer(limit, page, post_count) version = kw.pop('version', None) post = self._get_version(version) base_post = self.post subscribed = M.Mailbox.subscribed(artifact=self.post) c.subscribe_form.tool_subscribed = M.Mailbox.subscribed() return dict(post=post, base_post=base_post, page=page, limit=limit, count=post_count, subscribed=subscribed)
def test_paging_sanitizer(): test_data = { # input (limit, page, total, zero-based): output (limit, page) (0, 0, 0): (1, 0), ('1', '1', 1): (1, 0), (5, '10', 25): (5, 4), ('5', 10, 25, False): (5, 5), (5, '-1', 25): (5, 0), ('5', -1, 25, False): (5, 1), (5, '3', 25): (5, 3), ('5', 3, 25, False): (5, 3), (9999999, 0, 0): (500, 0), (10, None, 0): (10, 0), (10, 0): (10, 0), ('junk', 'more junk'): (25, 0), } for input, output in six.iteritems(test_data): assert (h.paging_sanitizer(*input)) == output
def index(self, page=0, limit=250, **kw): c.thread = self.thread_widget c.log_widget = self.log_widget c.mr_dispose_form = self.mr_dispose_form c.subscribe_form = self.subscribe_form limit, page = h.paging_sanitizer(limit, page) with self.req.push_downstream_context(): downstream_app = c.app tool_subscribed = M.Mailbox.subscribed() if tool_subscribed: subscribed = False else: subscribed = M.Mailbox.subscribed(artifact=self.req) result = dict( downstream_app=downstream_app, req=self.req, can_merge=self.req.can_merge(), can_merge_status=self.req.can_merge_task_status(), merge_status=self.req.merge_task_status(), page=page, limit=limit, count=self.req.discussion_thread.post_count, subscribed=subscribed, commits_task_started=False, ) if self.req.new_commits is not None: try: result['commits'] = self.req.commits except Exception: log.info( "Can't get commits for merge request %s", self.req.url(), exc_info=True) result['commits'] = [] result['error'] = True else: if self.req.commits_task_status() not in ('busy', 'ready'): allura.tasks.repo_tasks.determine_mr_commits.post(self.req._id) result['commits'] = [] result['commits_task_started'] = True return result
def tools(self, page=None, limit=200, **kw): c.markdown_editor = W.markdown_editor c.label_edit = W.label_edit c.mount_delete = W.mount_delete c.admin_modal = W.admin_modal c.install_modal = W.install_modal c.page_list = W.page_list mounts = c.project.ordered_mounts() total_mounts = len(mounts) limit, page = h.paging_sanitizer(limit, page or total_mounts / int(limit), total_mounts) start = page * limit return dict( page=page, limit=limit, total_mounts=total_mounts, mounts=mounts[start:start + limit], installable_tools=AdminApp.installable_tools_for(c.project), roles=M.ProjectRole.query.find( dict(project_id=c.project.root_project._id)).sort('_id').all(), categories=M.ProjectCategory.query.find(dict(parent_id=None)).sort('label').all())
def _get_activities_data(self, **kw): activity_enabled = asbool(config.get('activitystream.enabled', False)) if not activity_enabled: raise exc.HTTPNotFound() c.follow_toggle = W.follow_toggle c.page_list = W.page_list if c.project.is_user_project: followee = c.project.user_project_of actor_only = followee != c.user else: followee = c.project actor_only = False following = g.director.is_connected(c.user, followee) limit, page = h.paging_sanitizer(kw.get('limit', 100), kw.get('page', 0)) extra_limit = limit # get more in case perm check filters some out if page == 0 and limit <= 10: extra_limit = limit * 20 timeline = g.director.get_timeline(followee, page, limit=extra_limit, actor_only=actor_only) filtered_timeline = list( islice(filter(perm_check(c.user), timeline), 0, limit)) if extra_limit == limit: # if we didn't ask for extra, then we expect there's more if we got all we asked for has_more = len(timeline) == limit else: # if we did ask for extra, check filtered result has_more = len(filtered_timeline) == limit return dict(followee=followee, following=following, timeline=filtered_timeline, page=page, limit=limit, has_more=has_more, actor_only=actor_only)
def _get_activities_data(self, **kw): activity_enabled = asbool(config.get('activitystream.enabled', False)) if not activity_enabled: raise exc.HTTPNotFound() c.follow_toggle = W.follow_toggle c.page_list = W.page_list if c.project.is_user_project: followee = c.project.user_project_of actor_only = followee != c.user else: followee = c.project actor_only = False following = g.director.is_connected(c.user, followee) limit, page = h.paging_sanitizer(kw.get('limit', 100), kw.get('page', 0)) extra_limit = limit # get more in case perm check filters some out if page == 0 and limit <= 10: extra_limit = limit * 20 timeline = g.director.get_timeline(followee, page, limit=extra_limit, actor_only=actor_only) filtered_timeline = list(islice(ifilter(perm_check(c.user), timeline), 0, limit)) if extra_limit == limit: # if we didn't ask for extra, then we expect there's more if we got all we asked for has_more = len(timeline) == limit else: # if we did ask for extra, check filtered result has_more = len(filtered_timeline) == limit return dict( followee=followee, following=following, timeline=filtered_timeline, page=page, limit=limit, has_more=has_more, actor_only=actor_only)
def log(self, limit=25, path=None, **kw): is_file = False if path: is_file = c.app.repo.is_file(path, self._commit._id) limit, _ = h.paging_sanitizer(limit, 0) commits = list(islice(c.app.repo.log( revs=self._commit._id, path=path, id_only=False, page_size=limit + 1), limit + 1)) next_commit = None if len(commits) > limit: next_commit = commits.pop() c.log_widget = self.log_widget return dict( username=c.user._id and c.user.username, branch=None, log=commits, next_commit=next_commit, limit=limit, path=path, is_file=is_file, **kw)
def index(self, limit=25, page=None, **kw): limit, page = h.paging_sanitizer(limit, page) return dict(thread=self.thread.__json__(limit=limit, page=page))