def test_delete_closed_pr(self): self.log_user() pr_id = self._create_pr() # first close text = 'general comment on pullrequest' params = { 'text': text, 'save_close': 'close', '_session_csrf_secret_token': self.session_csrf_secret_token() } response = self.app.post(base.url(controller='pullrequests', action='comment', repo_name=base.HG_REPO, pull_request_id=pr_id), params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) assert response.status == '200 OK' # attempt delete, should fail params = { 'text': text, 'save_delete': 'delete', '_session_csrf_secret_token': self.session_csrf_secret_token() } response = self.app.post(base.url(controller='pullrequests', action='comment', repo_name=base.HG_REPO, pull_request_id=pr_id), params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'}, status=403) # verify that PR still exists, in closed state assert PullRequest.get(pr_id).status == PullRequest.STATUS_CLOSED
def test_delete_pr(self): self.log_user() pr_id = self._create_pr() text = 'general comment on pullrequest' params = { 'text': text, 'save_delete': 'delete', '_session_csrf_secret_token': self.session_csrf_secret_token() } response = self.app.post(base.url(controller='pullrequests', action='comment', repo_name=base.HG_REPO, pull_request_id=pr_id), params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) # Test response... assert response.status == '200 OK' response = self.app.get(base.url(controller='pullrequests', action='show', repo_name=base.HG_REPO, pull_request_id=pr_id, extra=''), status=404) # test DB assert PullRequest.get(pr_id) is None
def test_close_pr(self): self.log_user() pr_id = self._create_pr() text = 'general comment on pullrequest' params = { 'text': text, 'save_close': 'close', '_session_csrf_secret_token': self.session_csrf_secret_token() } response = self.app.post(base.url(controller='pullrequests', action='comment', repo_name=base.HG_REPO, pull_request_id=pr_id), params=params, extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) # Test response... assert response.status == '200 OK' response = self.app.get( base.url(controller='pullrequests', action='show', repo_name=base.HG_REPO, pull_request_id=pr_id, extra='')) response.mustcontain('''title (Closed)''') response.mustcontain(text) # test DB assert PullRequest.get(pr_id).status == PullRequest.STATUS_CLOSED
def test_iteration_refs(self): # Repo graph excerpt: # o fb95b340e0d0 webvcs # /: # o : 41d2568309a0 default # : : # : o 5ec21f21aafe webvcs # : : # : o 9e6119747791 webvcs # : : # o : 3d1091ee5a53 default # :/ # o 948da46b29c1 default self.log_user() # create initial PR response = self.app.post( url(controller='pullrequests', action='create', repo_name=HG_REPO), { 'org_repo': HG_REPO, 'org_ref': 'rev:9e6119747791:9e6119747791ff886a5abe1193a730b6bf874e1c', 'other_repo': HG_REPO, 'other_ref': 'branch:default:3d1091ee5a533b1f4577ec7d8a226bb315fb1336', 'pullrequest_title': 'title', 'pullrequest_desc': 'description', '_authentication_token': self.authentication_token(), }, status=302) pr1_id = int( re.search('/pull-request/(\d+)/', response.location).group(1)) pr1 = PullRequest.get(pr1_id) assert pr1.org_ref == 'branch:webvcs:9e6119747791ff886a5abe1193a730b6bf874e1c' assert pr1.other_ref == 'branch:default:948da46b29c125838a717f6a8496eb409717078d' Session().rollback( ) # invalidate loaded PR objects before issuing next request. # create PR 2 (new iteration with same ancestor) response = self.app.post( url(controller='pullrequests', action='post', repo_name=HG_REPO, pull_request_id=pr1_id), { 'updaterev': '5ec21f21aafe95220f1fc4843a4a57c378498b71', 'pullrequest_title': 'title', 'pullrequest_desc': 'description', 'owner': TEST_USER_REGULAR_LOGIN, '_authentication_token': self.authentication_token(), }, status=302) pr2_id = int( re.search('/pull-request/(\d+)/', response.location).group(1)) pr1 = PullRequest.get(pr1_id) pr2 = PullRequest.get(pr2_id) assert pr2_id != pr1_id assert pr1.status == PullRequest.STATUS_CLOSED assert pr2.org_ref == 'branch:webvcs:5ec21f21aafe95220f1fc4843a4a57c378498b71' assert pr2.other_ref == pr1.other_ref Session().rollback( ) # invalidate loaded PR objects before issuing next request. # create PR 3 (new iteration with new ancestor) response = self.app.post( url(controller='pullrequests', action='post', repo_name=HG_REPO, pull_request_id=pr2_id), { 'updaterev': 'fb95b340e0d03fa51f33c56c991c08077c99303e', 'pullrequest_title': 'title', 'pullrequest_desc': 'description', 'owner': TEST_USER_REGULAR_LOGIN, '_authentication_token': self.authentication_token(), }, status=302) pr3_id = int( re.search('/pull-request/(\d+)/', response.location).group(1)) pr2 = PullRequest.get(pr2_id) pr3 = PullRequest.get(pr3_id) assert pr3_id != pr2_id assert pr2.status == PullRequest.STATUS_CLOSED assert pr3.org_ref == 'branch:webvcs:fb95b340e0d03fa51f33c56c991c08077c99303e' assert pr3.other_ref == 'branch:default:41d2568309a05f422cffb8008e599d385f8af439'
def export(self, repo_name, pull_request_id, fname, **kwargs): ext = fname.split('.')[1] export_name = '{repo}-{pr_id}.{ext}'.format( repo=safe_str(repo_name.replace('/', '_')), pr_id=safe_str(pull_request_id), ext=safe_str(ext)) fd, export_path = mkstemp() log.debug( 'Creating new temp export in {path}'.format(path=export_path)) try: pr = PullRequest.get(pull_request_id) if repo_name != pr.other_repo.repo_name: raise RepositoryError except Exception as e: log.error(e) return _('Pull request #{id} not found').format(id=pull_request_id) cc_model = ChangesetCommentsModel() inline_comments = cc_model.get_inline_comments( pr.org_repo_id, pull_request=pull_request_id) file_comments = {} for f_path, lines in inline_comments: file_comments[f_path] = lines sorted_file_comments_by_name = sorted(file_comments.items(), key=lambda x: x[0], reverse=False) general_comments = cc_model.get_comments(pr.org_repo_id, pull_request=pull_request_id) wb = Workbook() ws = wb.create_sheet(_('comments'), 0) ws['A1'].value = _('File path') ws.column_dimensions['A'].width = 3.0 ws['B1'].value = _('Comment ID') ws['C1'].value = _('Line no (old)') ws['D1'].value = _('Line no (new)') ws['E1'].value = _('Author') ws['F1'].value = _('Status') ws['G1'].value = _('Comment') ws.column_dimensions['G'].width = 60.0 ws['H1'].value = _('Opinion') ws.column_dimensions['H'].width = 60.0 ws['I1'].value = _('Retouch') ws['J1'].value = _('Priority') ws['K1'].value = _('Deadline') align_rot_90 = Alignment(text_rotation=90) align_wrap = Alignment(wrap_text=True, vertical='top') rows = 2 for f_path, lines in sorted_file_comments_by_name: sorted_inline_comments_by_lineno = sorted( lines.iteritems(), key=lambda (line_no, comments): int(line_no[1:]), reverse=False) base_rows = rows for line_no, comments in sorted_inline_comments_by_lineno: top_comments = {} reply_comments = {} for co in comments: m = re.match(r'`replyto comment-(?P<replyto>[\d]+)', co.text) if m: replyto = int(m.group('replyto')) if reply_comments.get(replyto, None) is None: reply_comments[replyto] = [] reply_comments[replyto].append(co) else: top_comments[co.comment_id] = co def _make_threaded_message(comment_id, stops=0): message = '\n' if stops > 0 else '' for reply in reply_comments.get(comment_id, []): text = re.sub( r'`replyto comment-(?:[\d]+) <#comment-(?:[\d]+)>`_ :', '', reply.text) indent = ' ' * 2 * stops message += '{indent}commented by {author}:\n'.format( indent=indent, author=reply.author.username) message += '\n'.join(indent + line for line in text.splitlines() if len(line) > 0) message += '\n{indent}----'.format(indent=indent) message += _make_threaded_message( reply.comment_id, stops + 1) return message for comment_id, co in top_comments.items(): link = pr.url( canonical=True, anchor='comment-{id}'.format(id=co.comment_id)) ws['B{row}'.format(row=rows)].value = co.comment_id ws['B{row}'.format(row=rows)].hyperlink = link if co.line_no.startswith('o'): ws['C{row}'.format(row=rows)].value = co.line_no[1:] else: ws['D{row}'.format(row=rows)].value = co.line_no[1:] ws['E{row}'.format(row=rows)].value = co.author.username if co.status_change: ws['F{row}'.format(row=rows)].value = str( h.changeset_status_lbl(co.status_change[0].status)) ws['G{row}'.format(row=rows)].value = co.text.replace( '@', '(at)') ws['G{row}'.format(row=rows)].alignment = align_wrap ws['H{row}'.format( row=rows)].value = _make_threaded_message( comment_id).replace('@', '(at)') ws['H{row}'.format(row=rows)].alignment = align_wrap rows += 1 ws.merge_cells('A{start}:A{end}'.format(start=base_rows, end=rows - 1)) for i in range(rows - base_rows): ws['A{row}'.format(row=base_rows + i)].value = f_path ws['A{start}'.format(start=base_rows)].alignment = align_rot_90 ws['A{row}'.format(row=rows)].value = 'General' base_rows = rows top_comments = {} reply_comments = {} for co in general_comments: m = re.match(r'`replyto comment-(?P<replyto>[\d]+)', co.text) if m: replyto = int(m.group('replyto')) if reply_comments.get(replyto, None) is None: reply_comments[replyto] = [] reply_comments[replyto].append(co) else: top_comments[co.comment_id] = co def _make_threaded_message(comment_id, stops=0): message = '\n' if stops > 0 else '' for reply in reply_comments.get(comment_id, []): text = re.sub( r'`replyto comment-(?:[\d]+) <#comment-(?:[\d]+)>`_ :', '', reply.text) indent = ' ' * 2 * stops message += '{indent}commented by {author}:\n'.format( indent=indent, author=reply.author.username) message += '\n'.join(indent + line for line in text.splitlines() if len(line) > 0) message += '\n{indent}----'.format(indent=indent) message += _make_threaded_message(reply.comment_id, stops + 1) return message for comment_id, co in top_comments.items(): link = pr.url(canonical=True, anchor='comment-{id}'.format(id=co.comment_id)) ws['B{row}'.format(row=rows)].value = co.comment_id ws['B{row}'.format(row=rows)].hyperlink = link ws['E{row}'.format(row=rows)].value = co.author.username if co.status_change: ws['F{row}'.format(row=rows)].value = str( h.changeset_status_lbl(co.status_change[0].status)) ws['G{row}'.format(row=rows)].value = co.text.replace('@', '(at)') ws['G{row}'.format(row=rows)].alignment = align_wrap ws['H{row}'.format( row=rows)].value = _make_threaded_message(comment_id).replace( '@', '(at)') ws['H{row}'.format(row=rows)].alignment = align_wrap rows += 1 ws.merge_cells('A{start}:A{end}'.format(start=base_rows, end=rows - 1)) for i in range(rows - base_rows): ws['A{row}'.format(row=base_rows + i)].value = 'General' ws['A{start}'.format(start=base_rows)].alignment = align_rot_90 with os.fdopen(fd, 'wb') as s: s.write(save_virtual_workbook(wb)) def get_chunked_export(export_path): stream = open(export_path, 'rb') while True: data = stream.read(16 * 1024) if not data: break yield data stream.close() log.debug('Destroying temp export %s', export_path) os.remove(export_path) response.content_disposition = str('attachment; filename=%s' % (export_name)) response.content_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' return get_chunked_export(export_path)