def __get_desc(self, cs): desc_msg = [(_('%s committed on %s') % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>'] #branches, tags, bookmarks if cs.branch: desc_msg.append('branch: %s<br/>' % cs.branch) if h.is_hg(c.rhodecode_repo): for book in cs.bookmarks: desc_msg.append('bookmark: %s<br/>' % book) for tag in cs.tags: desc_msg.append('tag: %s<br/>' % tag) diff_processor, changes = self.__changes(cs) # rev link _url = url('changeset_home', repo_name=cs.repository.name, revision=cs.raw_id, qualified=True) desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) desc_msg.append('<pre>') desc_msg.append(cs.message) desc_msg.append('\n') desc_msg.extend(changes) if self.include_diff: desc_msg.append('\n\n') desc_msg.append(diff_processor.as_raw()) desc_msg.append('</pre>') return map(safe_unicode, desc_msg)
def get_users(self, name_contains=None, limit=20, only_active=True): # TODO: mikhail: move this method to the UserModel. query = self.sa.query(User) if only_active: query = query.filter(User.active == true()) if name_contains: ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) query = query.filter( or_(User.name.ilike(ilike_expression), User.lastname.ilike(ilike_expression), User.username.ilike(ilike_expression))) query = query.limit(limit) users = query.all() _users = [{ 'id': user.user_id, 'first_name': user.name, 'last_name': user.lastname, 'username': user.username, 'icon_link': h.gravatar_url(user.email, 14), 'value_display': h.person(user.email), 'value': user.username, 'value_type': 'user', 'active': user.active, } for user in users] return _users
def __get_desc(self, cs): desc_msg = [] desc_msg.append((_('%s committed on %s') % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>') #branches, tags, bookmarks if cs.branch: desc_msg.append('branch: %s<br/>' % cs.branch) if h.is_hg(c.rhodecode_repo): for book in cs.bookmarks: desc_msg.append('bookmark: %s<br/>' % book) for tag in cs.tags: desc_msg.append('tag: %s<br/>' % tag) diff_processor, changes = self.__changes(cs) # rev link _url = url('changeset_home', repo_name=cs.repository.name, revision=cs.raw_id, qualified=True) desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) desc_msg.append('<pre>') desc_msg.append(cs.message) desc_msg.append('\n') desc_msg.extend(changes) if self.include_diff: desc_msg.append('\n\n') desc_msg.append(diff_processor.as_raw()) desc_msg.append('</pre>') return map(safe_unicode, desc_msg)
def index(self): c.groups = self.scm_model.get_repos_groups() c.group = None if c.visual.lightweight_dashboard is False: c.repos_list = self.scm_model.get_repos() ## lightweight version of dashboard else: c.repos_list = Repository.query()\ .filter(Repository.group_id == None)\ .order_by(func.lower(Repository.repo_name))\ .all() repos_data = [] total_records = len(c.repos_list) _tmpl_lookup = rhodecode.CONFIG['pylons.app_globals'].mako_lookup template = _tmpl_lookup.get_template('data_table/_dt_elements.html') quick_menu = lambda repo_name: (template.get_def("quick_menu") .render(repo_name, _=_, h=h, c=c)) repo_lnk = lambda name, rtype, private, fork_of: ( template.get_def("repo_name") .render(name, rtype, private, fork_of, short_name=False, admin=False, _=_, h=h, c=c)) last_change = lambda last_change: (template.get_def("last_change") .render(last_change, _=_, h=h, c=c)) rss_lnk = lambda repo_name: (template.get_def("rss") .render(repo_name, _=_, h=h, c=c)) atom_lnk = lambda repo_name: (template.get_def("atom") .render(repo_name, _=_, h=h, c=c)) def desc(desc): if c.visual.stylify_metatags: return h.urlify_text(h.desc_stylize(h.truncate(desc, 60))) else: return h.urlify_text(h.truncate(desc, 60)) for repo in c.repos_list: repos_data.append({ "menu": quick_menu(repo.repo_name), "raw_name": repo.repo_name.lower(), "name": repo_lnk(repo.repo_name, repo.repo_type, repo.private, repo.fork), "last_change": last_change(repo.last_db_change), "desc": desc(repo.description), "owner": h.person(repo.user.username), "rss": rss_lnk(repo.repo_name), "atom": atom_lnk(repo.repo_name), }) c.data = json.dumps({ "totalRecords": total_records, "startIndex": 0, "sort": "name", "dir": "asc", "records": repos_data }) return render('/index.html')
def index(self, repo_name, revision, f_path, annotate=False): # redirect to given revision from form if given post_revision = request.POST.get('at_rev', None) if post_revision: cs = self.__get_cs_or_redirect(post_revision, repo_name) c.changeset = self.__get_cs_or_redirect(revision, repo_name) c.branch = request.GET.get('branch', None) c.f_path = f_path c.annotate = annotate c.changeset = self.__get_cs_or_redirect(revision, repo_name) cur_rev = c.changeset.revision # prev link try: prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) c.url_prev = url('files_home', repo_name=c.repo_name, revision=prev_rev.raw_id, f_path=f_path) if c.branch: c.url_prev += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_prev = '#' # next link try: next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) c.url_next = url('files_home', repo_name=c.repo_name, revision=next_rev.raw_id, f_path=f_path) if c.branch: c.url_next += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_next = '#' # files or dirs try: c.file = c.changeset.get_node(f_path) if c.file.is_file(): c.load_full_history = False file_last_cs = c.file.last_changeset c.file_changeset = (c.changeset if c.changeset.revision < file_last_cs.revision else file_last_cs) #determine if we're on branch head _branches = c.rhodecode_repo.branches c.on_branch_head = revision in _branches.keys() + _branches.values() _hist = [] c.file_history = [] if c.load_full_history: c.file_history, _hist = self._get_node_history(c.changeset, f_path) c.authors = [] for a in set([x.author for x in _hist]): c.authors.append((h.email(a), h.person(a))) else: c.authors = c.file_history = [] except RepositoryError, e: h.flash(safe_str(e), category='error') raise HTTPNotFound()
def index(self, repo_name, revision, f_path, annotate=False): # redirect to given revision from form if given post_revision = request.POST.get('at_rev', None) if post_revision: cs = self.__get_cs_or_redirect(post_revision, repo_name) c.changeset = self.__get_cs_or_redirect(revision, repo_name) c.branch = request.GET.get('branch', None) c.f_path = f_path c.annotate = annotate c.changeset = self.__get_cs_or_redirect(revision, repo_name) cur_rev = c.changeset.revision # prev link try: prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) c.url_prev = url('files_home', repo_name=c.repo_name, revision=prev_rev.raw_id, f_path=f_path) if c.branch: c.url_prev += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_prev = '#' # next link try: next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) c.url_next = url('files_home', repo_name=c.repo_name, revision=next_rev.raw_id, f_path=f_path) if c.branch: c.url_next += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_next = '#' # files or dirs try: c.file = c.changeset.get_node(f_path) if c.file.is_file(): c.load_full_history = False file_last_cs = c.file.last_changeset c.file_changeset = (c.changeset if c.changeset.revision < file_last_cs.revision else file_last_cs) #determine if we're on branch head _branches = c.rhodecode_repo.branches c.on_branch_head = revision in _branches.keys() + _branches.values() _hist = [] c.file_history = [] if c.load_full_history: c.file_history, _hist = self._get_node_history(c.changeset, f_path) c.authors = [] for a in set([x.author for x in _hist]): c.authors.append((h.email(a), h.person(a))) else: c.authors = c.file_history = [] except RepositoryError, e: h.flash(str(e), category='error') raise HTTPNotFound()
def create(self, created_by, org_repo, org_ref, other_repo, other_ref, revisions, reviewers, title, description=None): created_by_user = self._get_user(created_by) org_repo = self._get_repo(org_repo) other_repo = self._get_repo(other_repo) new = PullRequest() new.org_repo = org_repo new.org_ref = org_ref new.other_repo = other_repo new.other_ref = other_ref new.revisions = revisions new.title = title new.description = description new.author = created_by_user self.sa.add(new) Session().flush() #members for member in reviewers: _usr = self._get_user(member) reviewer = PullRequestReviewers(_usr, new) self.sa.add(reviewer) #notification to reviewers notif = NotificationModel() pr_url = h.url('pullrequest_show', repo_name=other_repo.repo_name, pull_request_id=new.pull_request_id, qualified=True, ) subject = safe_unicode( h.link_to( _('%(user)s wants you to review pull request #%(pr_id)s') % \ {'user': created_by_user.username, 'pr_id': new.pull_request_id}, pr_url ) ) body = description kwargs = { 'pr_title': title, 'pr_user_created': h.person(created_by_user.email), 'pr_repo_url': h.url('summary_home', repo_name=other_repo.repo_name, qualified=True,), 'pr_url': pr_url, 'pr_revisions': revisions } notif.create(created_by=created_by_user, subject=subject, body=body, recipients=reviewers, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=kwargs) return new
def index(self, repo_name, revision, f_path, annotate=False): # redirect to given revision from form if given post_revision = request.POST.get('at_rev', None) if post_revision: cs = self.__get_cs_or_redirect(post_revision, repo_name) redirect(url('files_home', repo_name=c.repo_name, revision=cs.raw_id, f_path=f_path)) c.changeset = self.__get_cs_or_redirect(revision, repo_name) c.branch = request.GET.get('branch', None) c.f_path = f_path c.annotate = annotate cur_rev = c.changeset.revision # prev link try: prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) c.url_prev = url('files_home', repo_name=c.repo_name, revision=prev_rev.raw_id, f_path=f_path) if c.branch: c.url_prev += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_prev = '#' # next link try: next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) c.url_next = url('files_home', repo_name=c.repo_name, revision=next_rev.raw_id, f_path=f_path) if c.branch: c.url_next += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_next = '#' # files or dirs try: c.file = c.changeset.get_node(f_path) if c.file.is_file(): _hist = c.changeset.get_file_history(f_path) c.file_history = self._get_node_history(c.changeset, f_path, _hist) c.authors = [] for a in set([x.author for x in _hist]): c.authors.append((h.email(a), h.person(a))) else: c.authors = c.file_history = [] except RepositoryError, e: h.flash(str(e), category='warning') redirect(h.url('files_home', repo_name=repo_name, revision='tip'))
def history(self, repo_name, revision, f_path, annotate=False): if request.environ.get("HTTP_X_PARTIAL_XHR"): c.changeset = self.__get_cs_or_redirect(revision, repo_name) c.f_path = f_path c.annotate = annotate c.file = c.changeset.get_node(f_path) if c.file.is_file(): file_last_cs = c.file.last_changeset c.file_changeset = c.changeset if c.changeset.revision < file_last_cs.revision else file_last_cs c.file_history, _hist = self._get_node_history(c.changeset, f_path) c.authors = [] for a in set([x.author for x in _hist]): c.authors.append((h.email(a), h.person(a))) return render("files/files_history_box.html")
def history(self, repo_name, revision, f_path, annotate=False): if request.environ.get('HTTP_X_PARTIAL_XHR'): c.changeset = self.__get_cs_or_redirect(revision, repo_name) c.f_path = f_path c.annotate = annotate c.file = c.changeset.get_node(f_path) if c.file.is_file(): file_last_cs = c.file.last_changeset c.file_changeset = (c.changeset if c.changeset.revision < file_last_cs.revision else file_last_cs) c.file_history, _hist = self._get_node_history(c.changeset, f_path) c.authors = [] for a in set([x.author for x in _hist]): c.authors.append((h.email(a), h.person(a))) return render('files/files_history_box.html')
def authors(self, repo_name, revision, f_path): commit = self.__get_commit_or_redirect(revision, repo_name) file_node = commit.get_node(f_path) if file_node.is_file(): c.file_last_commit = file_node.last_commit if request.GET.get('annotate') == '1': # use _hist from annotation if annotation mode is on commit_ids = set(x[1] for x in file_node.annotate) _hist = (c.rhodecode_repo.get_commit(commit_id) for commit_id in commit_ids) else: _f_history, _hist = self._get_node_history(commit, f_path) c.file_author = False c.authors = [] for author in set(commit.author for commit in _hist): c.authors.append((h.email(author), h.person(author, 'username_or_name_or_email'))) return render('files/file_authors_box.html')
def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True, super_user_actions=False): _render = self._render_datatable from pylons import tmpl_context as c def quick_menu(repo_name): return _render('quick_menu', repo_name) def repo_lnk(name, rtype, private, fork_of): return _render('repo_name', name, rtype, private, fork_of, short_name=not admin, admin=False) def last_change(last_change): return _render("last_change", last_change) def rss_lnk(repo_name): return _render("rss", repo_name) def atom_lnk(repo_name): return _render("atom", repo_name) def last_rev(repo_name, cs_cache): return _render('revision', repo_name, cs_cache.get('revision'), cs_cache.get('raw_id'), cs_cache.get('author'), cs_cache.get('message')) def desc(desc): if c.visual.stylify_metatags: return h.urlify_text(h.desc_stylize(h.truncate(desc, 60))) else: return h.urlify_text(h.truncate(desc, 60)) def repo_actions(repo_name): return _render('repo_actions', repo_name, super_user_actions) def owner_actions(user_id, username): return _render('user_name', user_id, username) repos_data = [] for repo in repos_list: if perm_check: # check permission at this level if not HasRepoPermissionAny( 'repository.read', 'repository.write', 'repository.admin')(repo.repo_name, 'get_repos_as_dict check'): continue cs_cache = repo.changeset_cache row = { "menu": quick_menu(repo.repo_name), "raw_name": repo.repo_name.lower(), "name": repo_lnk(repo.repo_name, repo.repo_type, repo.private, repo.fork), "last_change": last_change(repo.last_db_change), "last_changeset": last_rev(repo.repo_name, cs_cache), "raw_tip": cs_cache.get('revision'), "desc": desc(repo.description), "owner": h.person(repo.user.username), "rss": rss_lnk(repo.repo_name), "atom": atom_lnk(repo.repo_name), } if admin: row.update({ "action": repo_actions(repo.repo_name), "owner": owner_actions(repo.user.user_id, h.person(repo.user.username)) }) repos_data.append(row) return { "totalRecords": len(repos_list), "startIndex": 0, "sort": "name", "dir": "asc", "records": repos_data }
def get_commits_stats(repo_name, ts_min_y, ts_max_y): log = get_logger(get_commits_stats) DBS = get_session() lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config['here'] log.info('running task with lockkey %s' % lockkey) try: lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) # for js data compatibility cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config['app_conf'].get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository)\ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics)\ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict(json.loads( cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug('starting parsing %s' % parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s' % ( last_rev, last_rev + parse_limit) ) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s' % cs) last_cs = cs # remember last parsed changeset k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) if akc(cs.author) in co_day_auth_aggr: try: l = [timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data']] time_pos = l.index(k) except ValueError: time_pos = False if time_pos >= 0 and time_pos is not False: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = {"time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data']\ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{"time":k, "commits":1, "added":len(cs.added), "changed":len(cs.changed), "removed":len(cs.removed), }], "schema": ["commits"], } #gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug('last revison %s' % last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s' % leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON: run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) return True except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def _get_notification_data(self, repo, comment, user, comment_text, line_no=None, revision=None, pull_request=None, status_change=None, closing_pr=False): """ Get notification data :param comment_text: :param line: :returns: tuple (subj,body,recipients,notification_type,email_kwargs) """ # make notification body = comment_text # text of the comment line = '' if line_no: line = _('on line %s') % line_no #changeset if revision: notification_type = Notification.TYPE_CHANGESET_COMMENT cs = repo.scm_instance.get_changeset(revision) desc = "%s" % (cs.short_id) _url = h.url('changeset_home', repo_name=repo.repo_name, revision=revision, anchor='comment-%s' % comment.comment_id, qualified=True, ) subj = safe_unicode( h.link_to('Re changeset: %(desc)s %(line)s' % \ {'desc': desc, 'line': line}, _url) ) email_subject = '%s commented on changeset %s' % \ (user.username, h.short_id(revision)) # get the current participants of this changeset recipients = ChangesetComment.get_users(revision=revision) # add changeset author if it's in rhodecode system cs_author = User.get_from_cs_author(cs.author) if not cs_author: #use repo owner if we cannot extract the author correctly cs_author = repo.user recipients += [cs_author] email_kwargs = { 'status_change': status_change, 'cs_comment_user': h.person(user.email), 'cs_target_repo': h.url('summary_home', repo_name=repo.repo_name, qualified=True), 'cs_comment_url': _url, 'raw_id': revision, 'message': cs.message } #pull request elif pull_request: notification_type = Notification.TYPE_PULL_REQUEST_COMMENT desc = comment.pull_request.title _url = h.url('pullrequest_show', repo_name=pull_request.other_repo.repo_name, pull_request_id=pull_request.pull_request_id, anchor='comment-%s' % comment.comment_id, qualified=True, ) subj = safe_unicode( h.link_to('Re pull request #%(pr_id)s: %(desc)s %(line)s' % \ {'desc': desc, 'pr_id': comment.pull_request.pull_request_id, 'line': line}, _url) ) email_subject = '%s commented on pull request #%s' % \ (user.username, comment.pull_request.pull_request_id) # get the current participants of this pull request recipients = ChangesetComment.get_users(pull_request_id= pull_request.pull_request_id) # add pull request author recipients += [pull_request.author] # add the reviewers to notification recipients += [x.user for x in pull_request.reviewers] #set some variables for email notification email_kwargs = { 'pr_title': pull_request.title, 'pr_id': pull_request.pull_request_id, 'status_change': status_change, 'closing_pr': closing_pr, 'pr_comment_url': _url, 'pr_comment_user': h.person(user.email), 'pr_target_repo': h.url('summary_home', repo_name=pull_request.other_repo.repo_name, qualified=True) } return subj, body, recipients, notification_type, email_kwargs, email_subject
def _get_notification_data(self, repo, comment, user, comment_text, line_no=None, revision=None, pull_request=None, status_change=None, closing_pr=False): """ Get notification data :param comment_text: :param line: :returns: tuple (subj,body,recipients,notification_type,email_kwargs) """ # make notification body = comment_text # text of the comment line = '' if line_no: line = _('on line %s') % line_no #changeset if revision: notification_type = Notification.TYPE_CHANGESET_COMMENT cs = repo.scm_instance.get_changeset(revision) desc = "%s" % (cs.short_id) _url = h.url( 'changeset_home', repo_name=repo.repo_name, revision=revision, anchor='comment-%s' % comment.comment_id, qualified=True, ) subj = safe_unicode( h.link_to('Re changeset: %(desc)s %(line)s' % \ {'desc': desc, 'line': line}, _url) ) email_subject = 'User %s commented on changeset %s' % \ (user.username, h.short_id(revision)) # get the current participants of this changeset recipients = ChangesetComment.get_users(revision=revision) # add changeset author if it's in rhodecode system cs_author = User.get_from_cs_author(cs.author) if not cs_author: #use repo owner if we cannot extract the author correctly cs_author = repo.user recipients += [cs_author] email_kwargs = { 'status_change': status_change, 'cs_comment_user': h.person(user.email), 'cs_target_repo': h.url('summary_home', repo_name=repo.repo_name, qualified=True), 'cs_comment_url': _url, 'raw_id': revision, 'message': cs.message } #pull request elif pull_request: notification_type = Notification.TYPE_PULL_REQUEST_COMMENT desc = comment.pull_request.title _url = h.url( 'pullrequest_show', repo_name=pull_request.other_repo.repo_name, pull_request_id=pull_request.pull_request_id, anchor='comment-%s' % comment.comment_id, qualified=True, ) subj = safe_unicode( h.link_to('Re pull request #%(pr_id)s: %(desc)s %(line)s' % \ {'desc': desc, 'pr_id': comment.pull_request.pull_request_id, 'line': line}, _url) ) email_subject = 'User %s commented on pull request #%s' % \ (user.username, comment.pull_request.pull_request_id) # get the current participants of this pull request recipients = ChangesetComment.get_users( pull_request_id=pull_request.pull_request_id) # add pull request author recipients += [pull_request.author] # add the reviewers to notification recipients += [x.user for x in pull_request.reviewers] #set some variables for email notification email_kwargs = { 'pr_id': pull_request.pull_request_id, 'status_change': status_change, 'closing_pr': closing_pr, 'pr_comment_url': _url, 'pr_comment_user': h.person(user.email), 'pr_target_repo': h.url('summary_home', repo_name=pull_request.other_repo.repo_name, qualified=True) } return subj, body, recipients, notification_type, email_kwargs, email_subject
def show(self, id, format='html'): """GET /repos_groups/id: Show a specific item""" # url('repos_group', id=ID) c.group = RepoGroup.get_or_404(id) c.group_repos = c.group.repositories.all() #overwrite our cached list with current filter gr_filter = c.group_repos c.repo_cnt = 0 groups = RepoGroup.query().order_by(RepoGroup.group_name)\ .filter(RepoGroup.group_parent_id == id).all() c.groups = self.scm_model.get_repos_groups(groups) if c.visual.lightweight_dashboard is False: c.cached_repo_list = self.scm_model.get_repos(all_repos=gr_filter) c.repos_list = c.cached_repo_list ## lightweight version of dashboard else: c.repos_list = Repository.query()\ .filter(Repository.group_id == id)\ .order_by(func.lower(Repository.repo_name))\ .all() repos_data = [] total_records = len(c.repos_list) _tmpl_lookup = rhodecode.CONFIG['pylons.app_globals'].mako_lookup template = _tmpl_lookup.get_template('data_table/_dt_elements.html') quick_menu = lambda repo_name: (template.get_def("quick_menu") .render(repo_name, _=_, h=h, c=c)) repo_lnk = lambda name, rtype, private, fork_of: ( template.get_def("repo_name") .render(name, rtype, private, fork_of, short_name=False, admin=False, _=_, h=h, c=c)) last_change = lambda last_change: (template.get_def("last_change") .render(last_change, _=_, h=h, c=c)) rss_lnk = lambda repo_name: (template.get_def("rss") .render(repo_name, _=_, h=h, c=c)) atom_lnk = lambda repo_name: (template.get_def("atom") .render(repo_name, _=_, h=h, c=c)) for repo in c.repos_list: repos_data.append({ "menu": quick_menu(repo.repo_name), "raw_name": repo.repo_name.lower(), "name": repo_lnk(repo.repo_name, repo.repo_type, repo.private, repo.fork), "last_change": last_change(repo.last_db_change), "desc": repo.description, "owner": h.person(repo.user.username), "rss": rss_lnk(repo.repo_name), "atom": atom_lnk(repo.repo_name), }) c.data = json.dumps({ "totalRecords": total_records, "startIndex": 0, "sort": "name", "dir": "asc", "records": repos_data }) return render('admin/repos_groups/repos_groups.html')
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): log = get_logger(get_commits_stats) DBS = get_session() lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config['app_conf']['cache_dir'] log.info('running task with lockkey %s' % lockkey) try: lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) # for js data compatibility cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config['app_conf'].get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository)\ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics)\ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict( json.loads(cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug('starting parsing %s' % parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s' % (last_rev, last_rev + parse_limit)) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s' % cs) last_cs = cs # remember last parsed changeset k = lmktime([ cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0 ]) if akc(cs.author) in co_day_auth_aggr: try: l = [ timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data'] ] time_pos = l.index(k) except ValueError: time_pos = None if time_pos >= 0 and time_pos is not None: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = { "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data']\ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{ "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), }], "schema": ["commits"], } #gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug('last revison %s' % last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s' % leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0: recurse_limit -= 1 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y, recurse_limit) if recurse_limit <= 0: log.debug('Breaking recursive mode due to reach of recurse limit') return True except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True, super_user_actions=False): _render = self._render_datatable from pylons import tmpl_context as c def quick_menu(repo_name): return _render('quick_menu', repo_name) def repo_lnk(name, rtype, private, fork_of): return _render('repo_name', name, rtype, private, fork_of, short_name=not admin, admin=False) def last_change(last_change): return _render("last_change", last_change) def rss_lnk(repo_name): return _render("rss", repo_name) def atom_lnk(repo_name): return _render("atom", repo_name) def last_rev(repo_name, cs_cache): return _render('revision', repo_name, cs_cache.get('revision'), cs_cache.get('raw_id'), cs_cache.get('author'), cs_cache.get('message')) def desc(desc): if c.visual.stylify_metatags: return h.urlify_text(h.desc_stylize(h.truncate(desc, 60))) else: return h.urlify_text(h.truncate(desc, 60)) def repo_actions(repo_name): return _render('repo_actions', repo_name, super_user_actions) def owner_actions(user_id, username): return _render('user_name', user_id, username) repos_data = [] for repo in repos_list: if perm_check: # check permission at this level if not HasRepoPermissionAny( 'repository.read', 'repository.write', 'repository.admin' )(repo.repo_name, 'get_repos_as_dict check'): continue cs_cache = repo.changeset_cache row = { "menu": quick_menu(repo.repo_name), "raw_name": repo.repo_name.lower(), "name": repo_lnk(repo.repo_name, repo.repo_type, repo.private, repo.fork), "last_change": last_change(repo.last_db_change), "last_changeset": last_rev(repo.repo_name, cs_cache), "raw_tip": cs_cache.get('revision'), "desc": desc(repo.description), "owner": h.person(repo.user.username), "rss": rss_lnk(repo.repo_name), "atom": atom_lnk(repo.repo_name), } if admin: row.update({ "action": repo_actions(repo.repo_name), "owner": owner_actions(repo.user.user_id, h.person(repo.user.username)) }) repos_data.append(row) return { "totalRecords": len(repos_list), "startIndex": 0, "sort": "name", "dir": "asc", "records": repos_data }
def create(self, text, repo, user, revision=None, pull_request=None, f_path=None, line_no=None, status_change=None): """ Creates new comment for changeset or pull request. IF status_change is not none this comment is associated with a status change of changeset or changesets associated with pull request :param text: :param repo: :param user: :param revision: :param pull_request: :param f_path: :param line_no: :param status_change: """ if not text: return repo = self._get_repo(repo) user = self._get_user(user) comment = ChangesetComment() comment.repo = repo comment.author = user comment.text = text comment.f_path = f_path comment.line_no = line_no if revision: cs = repo.scm_instance.get_changeset(revision) desc = "%s - %s" % (cs.short_id, h.shorter(cs.message, 256)) author_email = cs.author_email comment.revision = revision elif pull_request: pull_request = self.__get_pull_request(pull_request) comment.pull_request = pull_request desc = pull_request.pull_request_id else: raise Exception('Please specify revision or pull_request_id') self.sa.add(comment) self.sa.flush() # make notification line = '' body = text #changeset if revision: if line_no: line = _('on line %s') % line_no subj = safe_unicode( h.link_to('Re commit: %(desc)s %(line)s' % \ {'desc': desc, 'line': line}, h.url('changeset_home', repo_name=repo.repo_name, revision=revision, anchor='comment-%s' % comment.comment_id, qualified=True, ) ) ) notification_type = Notification.TYPE_CHANGESET_COMMENT # get the current participants of this changeset recipients = ChangesetComment.get_users(revision=revision) # add changeset author if it's in rhodecode system recipients += [User.get_by_email(author_email)] email_kwargs = { 'status_change': status_change, } #pull request elif pull_request: _url = h.url('pullrequest_show', repo_name=pull_request.other_repo.repo_name, pull_request_id=pull_request.pull_request_id, anchor='comment-%s' % comment.comment_id, qualified=True, ) subj = safe_unicode( h.link_to('Re pull request: %(desc)s %(line)s' % \ {'desc': desc, 'line': line}, _url) ) notification_type = Notification.TYPE_PULL_REQUEST_COMMENT # get the current participants of this pull request recipients = ChangesetComment.get_users(pull_request_id= pull_request.pull_request_id) # add pull request author recipients += [pull_request.author] # add the reviewers to notification recipients += [x.user for x in pull_request.reviewers] #set some variables for email notification email_kwargs = { 'pr_id': pull_request.pull_request_id, 'status_change': status_change, 'pr_comment_url': _url, 'pr_comment_user': h.person(user.email), 'pr_target_repo': h.url('summary_home', repo_name=pull_request.other_repo.repo_name, qualified=True) } # create notification objects, and emails NotificationModel().create( created_by=user, subject=subj, body=body, recipients=recipients, type_=notification_type, email_kwargs=email_kwargs ) mention_recipients = set(self._extract_mentions(body))\ .difference(recipients) if mention_recipients: email_kwargs.update({'pr_mention': True}) subj = _('[Mention]') + ' ' + subj NotificationModel().create( created_by=user, subject=subj, body=body, recipients=mention_recipients, type_=notification_type, email_kwargs=email_kwargs ) return comment
def create(self, created_by, org_repo, org_ref, other_repo, other_ref, revisions, reviewers, title, description=None): from rhodecode.model.changeset_status import ChangesetStatusModel created_by_user = self._get_user(created_by) org_repo = self._get_repo(org_repo) other_repo = self._get_repo(other_repo) new = PullRequest() new.org_repo = org_repo new.org_ref = org_ref new.other_repo = other_repo new.other_ref = other_ref new.revisions = revisions new.title = title new.description = description new.author = created_by_user Session().add(new) Session().flush() #members for member in set(reviewers): _usr = self._get_user(member) reviewer = PullRequestReviewers(_usr, new) Session().add(reviewer) #reset state to under-review ChangesetStatusModel().set_status( repo=org_repo, status=ChangesetStatus.STATUS_UNDER_REVIEW, user=created_by_user, pull_request=new ) revision_data = [(x.raw_id, x.message) for x in map(org_repo.get_changeset, revisions)] #notification to reviewers notif = NotificationModel() pr_url = h.url('pullrequest_show', repo_name=other_repo.repo_name, pull_request_id=new.pull_request_id, qualified=True, ) subject = safe_unicode( h.link_to( _('%(user)s wants you to review pull request #%(pr_id)s: %(pr_title)s') % \ {'user': created_by_user.username, 'pr_title': new.title, 'pr_id': new.pull_request_id}, pr_url ) ) body = description kwargs = { 'pr_title': title, 'pr_user_created': h.person(created_by_user.email), 'pr_repo_url': h.url('summary_home', repo_name=other_repo.repo_name, qualified=True,), 'pr_url': pr_url, 'pr_revisions': revision_data } notif.create(created_by=created_by_user, subject=subject, body=body, recipients=reviewers, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=kwargs) return new
def create(self, created_by, org_repo, org_ref, other_repo, other_ref, revisions, reviewers, title, description=None): from rhodecode.model.changeset_status import ChangesetStatusModel created_by_user = self._get_user(created_by) org_repo = self._get_repo(org_repo) other_repo = self._get_repo(other_repo) new = PullRequest() new.org_repo = org_repo new.org_ref = org_ref new.other_repo = other_repo new.other_ref = other_ref new.revisions = revisions new.title = title new.description = description new.author = created_by_user Session().add(new) Session().flush() #members for member in set(reviewers): _usr = self._get_user(member) reviewer = PullRequestReviewers(_usr, new) Session().add(reviewer) #reset state to under-review ChangesetStatusModel().set_status( repo=org_repo, status=ChangesetStatus.STATUS_UNDER_REVIEW, user=created_by_user, pull_request=new ) revision_data = [(x.raw_id, x.message) for x in map(org_repo.get_changeset, revisions)] #notification to reviewers pr_url = h.url('pullrequest_show', repo_name=other_repo.repo_name, pull_request_id=new.pull_request_id, qualified=True, ) subject = safe_unicode( h.link_to( _('%(user)s wants you to review pull request #%(pr_id)s: %(pr_title)s') % \ {'user': created_by_user.username, 'pr_title': new.title, 'pr_id': new.pull_request_id}, pr_url ) ) body = description kwargs = { 'pr_title': title, 'pr_user_created': h.person(created_by_user.email), 'pr_repo_url': h.url('summary_home', repo_name=other_repo.repo_name, qualified=True,), 'pr_url': pr_url, 'pr_revisions': revision_data } NotificationModel().create(created_by=created_by_user, subject=subject, body=body, recipients=reviewers, type_=Notification.TYPE_PULL_REQUEST, email_kwargs=kwargs) return new
def index(self, repo_name, revision, f_path, annotate=False, rendered=False): commit_id = revision # redirect to given commit_id from form if given get_commit_id = request.GET.get('at_rev', None) if get_commit_id: self.__get_commit_or_redirect(get_commit_id, repo_name) c.commit = self.__get_commit_or_redirect(commit_id, repo_name) c.branch = request.GET.get('branch', None) c.f_path = f_path c.annotate = annotate # default is false, but .rst/.md files later are autorendered, we can # overwrite autorendering by setting this GET flag c.renderer = rendered or not request.GET.get('no-render', False) # prev link try: prev_commit = c.commit.prev(c.branch) c.prev_commit = prev_commit c.url_prev = url('files_home', repo_name=c.repo_name, revision=prev_commit.raw_id, f_path=f_path) if c.branch: c.url_prev += '?branch=%s' % c.branch except (CommitDoesNotExistError, VCSError): c.url_prev = '#' c.prev_commit = EmptyCommit() # next link try: next_commit = c.commit.next(c.branch) c.next_commit = next_commit c.url_next = url('files_home', repo_name=c.repo_name, revision=next_commit.raw_id, f_path=f_path) if c.branch: c.url_next += '?branch=%s' % c.branch except (CommitDoesNotExistError, VCSError): c.url_next = '#' c.next_commit = EmptyCommit() # files or dirs try: c.file = c.commit.get_node(f_path) c.file_author = True c.file_tree = '' if c.file.is_file(): c.renderer = (c.renderer and h.renderer_from_filename(c.file.path)) c.file_last_commit = c.file.last_commit c.on_branch_head = self._is_valid_head(commit_id, c.rhodecode_repo) c.branch_or_raw_id = c.commit.branch or c.commit.raw_id author = c.file_last_commit.author c.authors = [(h.email(author), h.person(author, 'username_or_name_or_email'))] else: c.authors = [] c.file_tree = self._get_tree_at_commit(repo_name, c.commit.raw_id, f_path) except RepositoryError as e: h.flash(safe_str(e), category='error') raise HTTPNotFound() if request.environ.get('HTTP_X_PJAX'): return render('files/files_pjax.html') return render('files/files.html')