def index(self): # Return a rendered template p = safe_int(request.GET.get('page', 1), 1) c.user = User.get(c.rhodecode_user.user_id) following = self.sa.query(UserFollowing)\ .filter(UserFollowing.user_id == c.rhodecode_user.user_id)\ .options(joinedload(UserFollowing.follows_repository))\ .all() journal = self._get_journal_data(following) def url_generator(**kw): return url.current(filter=c.search_term, **kw) c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator) c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) c.journal_data = render('journal/journal_data.html') if request.is_xhr: return c.journal_data return render('journal/journal.html')
def index(self): """GET /_admin/notifications: All items in the collection""" # url('notifications') c.user = c.rhodecode_user notif = NotificationModel().get_for_user( c.rhodecode_user.user_id, filter_=request.GET.getall('type')) p = safe_int(request.GET.get('page', 1), 1) notifications_url = webhelpers.paginate.PageURL( url('notifications'), request.GET) c.notifications = Page(notif, page=p, items_per_page=10, url=notifications_url) c.pull_request_type = Notification.TYPE_PULL_REQUEST c.comment_type = [ Notification.TYPE_CHANGESET_COMMENT, Notification.TYPE_PULL_REQUEST_COMMENT ] _current_filter = request.GET.getall('type') c.current_filter = 'all' if _current_filter == [c.pull_request_type]: c.current_filter = 'pull_request' elif _current_filter == c.comment_type: c.current_filter = 'comment' if request.is_xhr: return render('admin/notifications/notifications_data.html') return render('admin/notifications/notifications.html')
def index(self): users_log = UserLog.query()\ .options(joinedload(UserLog.user))\ .options(joinedload(UserLog.repository)) #FILTERING c.search_term = request.GET.get('filter') try: users_log = _journal_filter(users_log, c.search_term) except Exception: # we want this to crash for now raise users_log = users_log.order_by(UserLog.action_date.desc()) p = safe_int(request.GET.get('page', 1), 1) def url_generator(**kw): return url.current(filter=c.search_term, **kw) c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator) c.log_data = render('admin/admin_log.html') if request.environ.get('HTTP_X_PARTIAL_XHR'): return c.log_data return render('admin/admin.html')
def mark_all_read(self): if request.environ.get('HTTP_X_PARTIAL_XHR'): nm = NotificationModel() # mark all read nm.mark_all_read_for_user(self.rhodecode_user.user_id, filter_=request.GET.getall('type')) Session().commit() c.user = self.rhodecode_user notif = nm.get_for_user(self.rhodecode_user.user_id, filter_=request.GET.getall('type')) c.notifications = Page(notif, page=1, items_per_page=10) return render('admin/notifications/notifications_data.html')
def followers(self, repo_name): p = safe_int(request.GET.get('page', 1), 1) repo_id = c.rhodecode_db_repo.repo_id d = UserFollowing.get_repo_followers(repo_id)\ .order_by(UserFollowing.follows_from) c.followers_pager = Page(d, page=p, items_per_page=20) c.followers_data = render('/followers/followers_data.html') if request.environ.get('HTTP_X_PJAX'): return c.followers_data return render('/followers/followers.html')
def show_all(self, repo_name): c.pull_requests = PullRequestModel().get_all(repo_name) c.repo_name = repo_name p = safe_int(request.GET.get('page', 1), 1) c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=10) c.pullrequest_data = render('/pullrequests/pullrequest_data.html') if request.environ.get('HTTP_X_PARTIAL_XHR'): return c.pullrequest_data return render('/pullrequests/pullrequest_show_all.html')
def mark_all_read(self): if request.is_xhr: nm = NotificationModel() # mark all read nm.mark_all_read_for_user(c.rhodecode_user.user_id, filter_=request.GET.getall('type')) Session().commit() c.user = c.rhodecode_user notif = nm.get_for_user(c.rhodecode_user.user_id, filter_=request.GET.getall('type')) notifications_url = webhelpers.paginate.PageURL( url('notifications'), request.GET) c.notifications = Page(notif, page=1, items_per_page=10, url=notifications_url) return render('admin/notifications/notifications_data.html')
def forks(self, repo_name): p = safe_int(request.GET.get('page', 1), 1) repo_id = c.rhodecode_db_repo.repo_id d = [] for r in Repository.get_repo_forks(repo_id): if not HasRepoPermissionAny('repository.read', 'repository.write', 'repository.admin')(r.repo_name, 'get forks check'): continue d.append(r) c.forks_pager = Page(d, page=p, items_per_page=20) c.forks_data = render('/forks/forks_data.html') if request.environ.get('HTTP_X_PARTIAL_XHR'): return c.forks_data return render('/forks/forks.html')
def public_journal(self): # Return a rendered template p = safe_int(request.GET.get('page', 1), 1) c.following = self.sa.query(UserFollowing)\ .filter(UserFollowing.user_id == self.rhodecode_user.user_id)\ .options(joinedload(UserFollowing.follows_repository))\ .all() journal = self._get_journal_data(c.following) c.journal_pager = Page(journal, page=p, items_per_page=20) c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) c.journal_data = render('journal/journal_data.html') if request.environ.get('HTTP_X_PARTIAL_XHR'): return c.journal_data return render('journal/public_journal.html')
def index(self, format='html'): """GET /_admin/notifications: All items in the collection""" # url('notifications') c.user = self.rhodecode_user notif = NotificationModel().get_for_user(self.rhodecode_user.user_id, filter_=request.GET.getall('type')) p = safe_int(request.GET.get('page', 1), 1) c.notifications = Page(notif, page=p, items_per_page=10) c.pull_request_type = Notification.TYPE_PULL_REQUEST c.comment_type = [Notification.TYPE_CHANGESET_COMMENT, Notification.TYPE_PULL_REQUEST_COMMENT] _current_filter = request.GET.getall('type') c.current_filter = 'all' if _current_filter == [c.pull_request_type]: c.current_filter = 'pull_request' elif _current_filter == c.comment_type: c.current_filter = 'comment' return render('admin/notifications/notifications.html')
def index(self, format='html'): """GET /admin/gists: All items in the collection""" # url('gists') c.show_private = request.GET.get( 'private') and c.rhodecode_user.username != 'default' c.show_public = request.GET.get( 'public') and c.rhodecode_user.username != 'default' gists = Gist().query()\ .filter(or_(Gist.gist_expires == -1, Gist.gist_expires >= time.time()))\ .order_by(Gist.created_on.desc()) if c.show_private: c.gists = gists.filter(Gist.gist_type == Gist.GIST_PRIVATE)\ .filter(Gist.gist_owner == c.rhodecode_user.user_id) elif c.show_public: c.gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC)\ .filter(Gist.gist_owner == c.rhodecode_user.user_id) else: c.gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC) p = safe_int(request.GET.get('page', 1), 1) c.gists_pager = Page(c.gists, page=p, items_per_page=10) return render('admin/gists/index.html')
def index(self, repo_name=None): c.repo_name = repo_name c.formated_results = [] c.runtime = '' c.cur_query = request.GET.get('q', None) c.cur_type = request.GET.get('type', 'content') c.cur_search = search_type = { 'content': 'content', 'commit': 'message', 'path': 'path', 'repository': 'repository' }.get(c.cur_type, 'content') index_name = { 'content': IDX_NAME, 'commit': CHGSET_IDX_NAME, 'path': IDX_NAME }.get(c.cur_type, IDX_NAME) schema_defn = { 'content': SCHEMA, 'commit': CHGSETS_SCHEMA, 'path': SCHEMA }.get(c.cur_type, SCHEMA) log.debug('IDX: %s' % index_name) log.debug('SCHEMA: %s' % schema_defn) if c.cur_query: cur_query = c.cur_query.lower() log.debug(cur_query) if c.cur_query: p = safe_int(request.GET.get('page', 1), 1) highlight_items = set() try: idx = open_dir(config['app_conf']['index_dir'], indexname=index_name) searcher = idx.searcher() qp = QueryParser(search_type, schema=schema_defn) if c.repo_name: cur_query = u'repository:%s %s' % (c.repo_name, cur_query) try: query = qp.parse(unicode(cur_query)) # extract words for highlight if isinstance(query, Phrase): highlight_items.update(query.words) elif isinstance(query, Prefix): highlight_items.add(query.text) else: for i in query.all_terms(): if i[0] in ['content', 'message']: highlight_items.add(i[1]) matcher = query.matcher(searcher) log.debug('query: %s' % query) log.debug('hl terms: %s' % highlight_items) results = searcher.search(query) res_ln = len(results) c.runtime = '%s results (%.3f seconds)' % (res_ln, results.runtime) def url_generator(**kw): q = urllib.quote(safe_str(c.cur_query)) return update_params("?q=%s&type=%s" \ % (q, safe_str(c.cur_type)), **kw) repo_location = RepoModel().repos_path c.formated_results = Page(WhooshResultWrapper( search_type, searcher, matcher, highlight_items, repo_location), page=p, item_count=res_ln, items_per_page=10, url=url_generator) except QueryParserError: c.runtime = _('Invalid search query. Try quoting it.') searcher.close() except (EmptyIndexError, IOError): log.error(traceback.format_exc()) log.error('Empty Index data') c.runtime = _('There is no index to search in. ' 'Please run whoosh indexer') except (Exception): log.error(traceback.format_exc()) c.runtime = _('An error occurred during this search operation') # Return a rendered template return render('/search/search.html')
def index(self, repo_name=None): searcher = searcher_from_config(config) formatted_results = [] execution_time = '' schema = validation_schema.SearchParamsSchema() search_params = {} errors = [] try: search_params = schema.deserialize( dict(search_query=request.GET.get('q'), search_type=request.GET.get('type'), search_sort=request.GET.get('sort'), page_limit=request.GET.get('page_limit'), requested_page=request.GET.get('page')) ) except validation_schema.Invalid as e: errors = e.children def url_generator(**kw): q = urllib.quote(safe_str(search_query)) return update_params( "?q=%s&type=%s" % (q, safe_str(search_type)), **kw) search_query = search_params.get('search_query') search_type = search_params.get('search_type') search_sort = search_params.get('search_sort') if search_params.get('search_query'): page_limit = search_params['page_limit'] requested_page = search_params['requested_page'] c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr) try: search_result = searcher.search( search_query, search_type, c.perm_user, repo_name, requested_page, page_limit, search_sort) formatted_results = Page( search_result['results'], page=requested_page, item_count=search_result['count'], items_per_page=page_limit, url=url_generator) finally: searcher.cleanup() if not search_result['error']: execution_time = '%s results (%.3f seconds)' % ( search_result['count'], search_result['runtime']) elif not errors: node = schema['search_query'] errors = [ validation_schema.Invalid(node, search_result['error'])] c.sort = search_sort c.url_generator = url_generator c.errors = errors c.formatted_results = formatted_results c.runtime = execution_time c.cur_query = search_query c.search_type = search_type # Return a rendered template return render('/search/search.html')
def index(self): # Return a rendered template p = safe_int(request.GET.get('page', 1), 1) c.user = User.get(self.rhodecode_user.user_id) c.following = self.sa.query(UserFollowing)\ .filter(UserFollowing.user_id == self.rhodecode_user.user_id)\ .options(joinedload(UserFollowing.follows_repository))\ .all() journal = self._get_journal_data(c.following) def url_generator(**kw): return url.current(filter=c.search_term, **kw) c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator) c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) c.journal_data = render('journal/journal_data.html') if request.environ.get('HTTP_X_PARTIAL_XHR'): return c.journal_data repos_list = Session().query(Repository)\ .filter(Repository.user_id == self.rhodecode_user.user_id)\ .order_by(func.lower(Repository.repo_name)).all() repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list, admin=True) #json used to render the grid c.data = json.dumps(repos_data) watched_repos_data = [] ## watched repos _render = RepoModel._render_datatable def quick_menu(repo_name): return _render('quick_menu', repo_name) def repo_lnk(name, rtype, private, fork_of): return _render('repo_name', name, rtype, private, fork_of, short_name=False, admin=False) def last_rev(repo_name, cs_cache): return _render('revision', repo_name, cs_cache.get('revision'), cs_cache.get('raw_id'), cs_cache.get('author'), cs_cache.get('message')) def desc(desc): from pylons import tmpl_context as c if c.visual.stylify_metatags: return h.urlify_text(h.desc_stylize(h.truncate(desc, 60))) else: return h.urlify_text(h.truncate(desc, 60)) def repo_actions(repo_name): return _render('repo_actions', repo_name) def owner_actions(user_id, username): return _render('user_name', user_id, username) def toogle_follow(repo_id): return _render('toggle_follow', repo_id) for entry in c.following: repo = entry.follows_repository cs_cache = repo.changeset_cache row = { "menu": quick_menu(repo.repo_name), "raw_name": repo.repo_name.lower(), "name": repo_lnk(repo.repo_name, repo.repo_type, repo.private, repo.fork), "last_changeset": last_rev(repo.repo_name, cs_cache), "raw_tip": cs_cache.get('revision'), "action": toogle_follow(repo.repo_id) } watched_repos_data.append(row) c.watched_data = json.dumps({ "totalRecords": len(c.following), "startIndex": 0, "sort": "name", "dir": "asc", "records": watched_repos_data }) return render('journal/journal.html')