class TestPullRequestHandler: def setup_class(self): self.pr = PullRequestHandler('fakerepo/doesnotexist', 1234) def test_urls(self): assert self.pr._url_pull_request == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234' assert self.pr._url_review_comment == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234/reviews' assert self.pr._url_commits == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234/commits' assert self.pr._url_files == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234/files' def test_has_modified(self): mock = MagicMock(return_value=[{ "sha": "bbcd538c8e72b8c175046e27cc8f907076331401", "filename": "file1.txt", "status": "added", "additions": 103, "deletions": 21, "changes": 124, "blob_url": "https://github.com/blah/blah/blob/hash/file1.txt", "raw_url": "https://github.com/blaht/blah/raw/hash/file1.txt", "contents_url": "https://api.github.com/repos/blah/blah/contents/file1.txt?ref=hash", "patch": "@@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test" }]) with patch('baldrick.github.github_api.paged_github_json_request', mock): # noqa assert self.pr.has_modified(['file1.txt']) assert self.pr.has_modified(['file1.txt', 'notthis.txt']) assert not self.pr.has_modified(['notthis.txt'])
def setup_method(self, method): self.get_file_contents_mock = patch('baldrick.github.github_api.PullRequestHandler.get_file_contents') self.get_base_branch_mock = patch('baldrick.github.github_api.PullRequestHandler.base_branch') a = self.get_base_branch_mock.start() a.return_value = "master" self.milestone_mock = patch('baldrick.github.github_api.PullRequestHandler.milestone', new_callable=PropertyMock) self.repo_handler = RepoHandler("nota/repo", "1234") self.pr_handler = PullRequestHandler("nota/repo", "1234") self.milestone = self.milestone_mock.start() self.milestone.return_value = None self.get_file_contents = self.get_file_contents_mock.start() FILE_CACHE.clear()
def setup_method(self, method): self.get_file_contents_mock = patch( 'baldrick.github.github_api.PullRequestHandler.get_file_contents') self.get_base_branch_mock = patch( 'baldrick.github.github_api.PullRequestHandler.base_branch') a = self.get_base_branch_mock.start() a.return_value = "master" self.modified_files_mock = patch( 'baldrick.github.github_api.PullRequestHandler.get_modified_files') self.repo_handler = RepoHandler("nota/repo", "1234") self.pr_handler = PullRequestHandler("nota/repo", "1234") self.get_file_contents = self.get_file_contents_mock.start() self.modified_files = self.modified_files_mock.start() FILE_CACHE.clear()
def process_pull_requests(repository, installation, warn_seconds=None, close_seconds=None): now = time.time() # Find app name bot_name = get_app_name() # Get issues labeled as 'Close?' repo = RepoHandler(repository, 'master', installation) pull_requests = repo.open_pull_requests() # User config enable_autoclose = repo.get_config_value('autoclose_stale_pull_request', True) for n in pull_requests: print(f'Checking {n}') pr = PullRequestHandler(repository, n, installation) if 'keep-open' in pr.labels: print('-> PROTECTED by label, skipping') continue commit_time = pr.last_commit_date time_since_last_commit = now - commit_time # Note: if warning time is before commit time, it's as if the warning # didn't exist since it's no longer relevant. warning_time = pr.last_comment_date(f'{bot_name}[bot]', filter_keep=is_close_warning) if warning_time is None or warning_time < commit_time: time_since_last_warning = -1. else: # We use max() here to make sure that the value is positive time_since_last_warning = max(0, now - warning_time) # We only close pull requests if there has been a warning before, and # the time since the warning exceeds the threshold specified by # stale_pull_requests_close_seconds. if time_since_last_warning > close_seconds: comment_ids = pr.find_comments(f'{bot_name}[bot]', filter_keep=is_close_epilogue) if not enable_autoclose: print(f'-> Skipping pull request {n} (auto-close disabled)') elif len(comment_ids) == 0: print(f'-> CLOSING pull request {n}') pr.set_labels(['closed-by-bot']) pr.submit_comment(PULL_REQUESTS_CLOSE_EPILOGUE) pr.close() else: print(f'-> Skipping pull request {n} (already closed)') elif time_since_last_commit > warn_seconds: # A negative time_since_last_warning means no warning since last commit. if time_since_last_warning < 0.: print(f'-> WARNING pull request {n}') pr.submit_comment( PULL_REQUESTS_CLOSE_WARNING.format( pasttime=naturaldelta(time_since_last_commit), futuretime=naturaldelta(close_seconds))) else: print(f'-> Skipping pull request {n} (already warned)') else: print(f'-> OK pull request {n}') print('Finished checking for stale pull requests')
def setup_class(self): self.pr = PullRequestHandler('fakerepo/doesnotexist', 1234)
def process_pull_request(repository, number, installation, is_new=False): # TODO: cache handlers and invalidate the internal cache of the handlers on # certain events. pr_handler = PullRequestHandler(repository, number, installation) pr_config = pr_handler.get_config_value("pull_requests", {}) if not pr_config.get("enabled", False): return "Skipping PR checks, disabled in config." # Disable if the config is not present if pr_config is None: return # Don't comment on closed PR if pr_handler.is_closed: return "Pull request already closed, no need to check" repo_handler = RepoHandler(pr_handler.head_repo_name, pr_handler.head_branch, installation) # First check whether there are labels that indicate the checks should be # skipped skip_labels = pr_config.get("skip_labels", []) skip_fails = pr_config.get("skip_fails", True) for label in pr_handler.labels: if label in skip_labels: if skip_fails: pr_handler.set_status( 'failure', "Skipping checks due to {0} label".format(label), current_app.bot_username) return results = {} for function in PULL_REQUEST_CHECKS: result = function(pr_handler, repo_handler) # Ignore skipped checks if result is not None: results.update(result) # Special message for a special day not_boring = pr_handler.get_config_value('not_boring', cfg_default=True) if not_boring: # pragma: no cover special_msg = '' if is_new: # Always be snarky for new PR special_msg = insert_special_message('') else: import random tensided_dice_roll = random.randrange(10) if tensided_dice_roll == 9: # 1 out of 10 for subsequent remarks special_msg = insert_special_message('') if special_msg: pr_handler.submit_comment(special_msg) # Post each failure as a status existing_statuses = pr_handler.list_statuses() for context, details in sorted(results.items()): full_context = current_app.bot_username + ':' + context # NOTE: we could in principle check if the status has been posted # before, and if so not post it again, but we had this in the past # and there were some strange caching issues where GitHub would # return old status messages, so we avoid doing that. pr_handler.set_status(details['state'], details['description'], full_context, target_url=details.get('target_url')) # For statuses that have been skipped this time but existed before, set # status to pass and set message to say skipped for full_context in existing_statuses: if full_context.startswith(current_app.bot_username + ':'): context = full_context[len(current_app.bot_username) + 1:] if context not in results: pr_handler.set_status('success', 'This check has been skipped', current_app.bot_username + ':' + context) # Also set the general 'single' status check as a skipped check if it # is present if full_context == current_app.bot_username: pr_handler.set_status('success', 'This check has been skipped', current_app.bot_username) return 'Finished pull requests checks'
def process_pull_request(repository, number, installation, action, is_new=False): # TODO: cache handlers and invalidate the internal cache of the handlers on # certain events. pr_handler = PullRequestHandler(repository, number, installation) pr_config = pr_handler.get_config_value("pull_requests", {}) if not pr_config.get("enabled", False): msg = "Skipping PR checks, disabled in config." logger.debug(msg) return msg # Don't comment on closed PR if pr_handler.is_closed: return "Pull request already closed, no need to check" repo_handler = RepoHandler(pr_handler.head_repo_name, pr_handler.head_branch, installation) # First check whether there are labels that indicate the checks should be # skipped skip_labels = pr_config.get("skip_labels", []) skip_fails = pr_config.get("skip_fails", True) for label in pr_handler.labels: if label in skip_labels: if skip_fails: pr_handler.set_check( current_app.bot_username, title="Skipping checks due to {0} label".format(label), name=current_app.bot_username, status='completed', conclusion='failure') return results = {} for function, actions in PULL_REQUEST_CHECKS.items(): if actions is None or action in actions: result = function(pr_handler, repo_handler) # Ignore skipped checks if result is not None: # Map old plugin keys to new checks names. # It's possible that the hook returns {} for context, check in result.items(): if check is not None: title = check.pop('description', None) if title: logger.warning( f"'description' is deprecated as a key in the return value from {function}," " it will be interpreted as 'title'") check['title'] = title check['title'] = check.pop('title', title) conclusion = check.pop('state', None) if conclusion: logger.warning( f"'state' is deprecated as a key in the return value from {function}," "it will be interpreted as 'conclusion'.") check['conclusion'] = conclusion check['conclusion'] = check.pop( 'conclusion', conclusion) result[context] = check results.update(result) # Get existing checks from our app, for the 'head' commit existing_checks = pr_handler.list_checks(only_ours=True) # For each existing check, see if it needs updating or skipping new_results = copy.copy(results) for external_id, check in existing_checks.items(): if external_id in results.keys(): details = new_results.pop(external_id) # Remove skip key. details.pop("skip_if_missing", False) # Update the previous check with the new check (this includes the check_id to update) check.update(details) # Send the check to be updated pr_handler.set_check(**check) else: # If check is in existing_checks but not results mark it as skipped. check.update({ 'title': 'This check has been skipped.', 'status': 'completed', 'conclusion': 'neutral' }) pr_handler.set_check(**check) # Any keys left in results are new checks we haven't sent on this commit yet. for external_id, details in sorted(new_results.items()): skip = details.pop("skip_if_missing", False) logger.trace(f"{details} skip is {skip}") if not skip: pr_handler.set_check(external_id, status="completed", **details) # Also set the general 'single' status check as a skipped check if it # is present if current_app.bot_username in new_results.keys(): check = new_results[current_app.bot_username] check.update({ 'title': 'This check has been skipped.', 'commit_hash': 'head', 'status': 'completed', 'conclusion': 'neutral' }) pr_handler.set_check(**check) # Special message for a special day not_boring = pr_handler.get_config_value('not_boring', cfg_default=True) if not_boring: # pragma: no cover special_msg = '' if is_new: # Always be snarky for new PR special_msg = insert_special_message('') else: import random tensided_dice_roll = random.randrange(10) if tensided_dice_roll == 9: # 1 out of 10 for subsequent remarks special_msg = insert_special_message('') if special_msg: pr_handler.submit_comment(special_msg) return 'Finished pull requests checks'
def process_pull_request(repository, number, installation): # TODO: cache handlers and invalidate the internal cache of the handlers on # certain events. pr_handler = PullRequestHandler(repository, number, installation) pr_config = pr_handler.get_config_value("pull_requests", {}) post_comment = pr_config.get("post_pr_comment", False) pull_request_substring = pr_config.get('pull_request_substring', '') # Disable if the config is not present if pr_config is None: return # Don't comment on closed PR if pr_handler.is_closed: return "Pull request already closed, no need to check" repo_handler = RepoHandler(pr_handler.head_repo_name, pr_handler.head_branch, installation) def is_previous_comment(message): if len(pull_request_substring) > 0: return pull_request_substring in message else: return True # Find previous comments by this app comment_ids = pr_handler.find_comments(f'{current_app.bot_username}[bot]', filter_keep=is_previous_comment) if len(comment_ids) == 0: comment_id = None else: comment_id = comment_ids[-1] # First check whether there are labels that indicate the checks should be # skipped skip_labels = pr_config.get("skip_labels", []) skip_fails = pr_config.get("skip_fails", True) for label in pr_handler.labels: if label in skip_labels: skip_message = pr_config.get( "skip_message", "Pull request checks have " "been skipped as this pull request has been " f"labelled as **{label}**") skip_message = skip_message.format(pr_handler=pr_handler, repo_handler=repo_handler) pr_handler.submit_comment(skip_message, comment_id=comment_id) if skip_fails: pr_handler.set_status( 'failure', "Skipping checks due to {0} label".format(label), current_app.bot_username) return results = {} for function in PULL_REQUEST_CHECKS: result = function(pr_handler, repo_handler) results.update(result) failures = [ details['description'] for details in results.values() if details['state'] in ('error', 'failure') ] if post_comment: # Post all failures in a comment, and have a single status check if failures: pull_request_prologue = pr_config.get('fail_prologue', '') pull_request_epilogue = pr_config.get('fail_epilogue', '') fail_status = pr_config.get('fail_status', 'Failed some checks') message = pull_request_prologue.format(pr_handler=pr_handler, repo_handler=repo_handler) for failure in failures: message += f'* {failure}\n' message += pull_request_epilogue.format(pr_handler=pr_handler, repo_handler=repo_handler) comment_url = pr_handler.submit_comment(message, comment_id=comment_id, return_url=True) pr_handler.set_status('failure', fail_status, current_app.bot_username, target_url=comment_url) else: pass_status = pr_config.get('pass_status', 'Passed all checks') all_passed_message = pr_config.get('all_passed_message', '') all_passed_message = all_passed_message.format( pr_handler=pr_handler, repo_handler=repo_handler) if all_passed_message: pr_handler.submit_comment(all_passed_message, comment_id=comment_id) pr_handler.set_status('success', pass_status, current_app.bot_username) else: # Post each failure as a status existing_statuses = pr_handler.list_statuses() for context, details in sorted(results.items()): full_context = current_app.bot_username + ':' + context # Don't post again if status hasn't changed if full_context in existing_statuses: existing_details = existing_statuses[full_context] if (details['state'] == existing_details['state'] and details['description'] == existing_details['description'] and details.get('target_url') == existing_details.get('target_url')): continue pr_handler.set_status(details['state'], details['description'], full_context, target_url=details.get('target_url')) # For statuses that have been skipped this time but existed before, set # status to pass and set message to say skipped for full_context in existing_statuses: if full_context.startswith(current_app.bot_username + ':'): context = full_context[len(current_app.bot_username) + 1:] if context not in results: pr_handler.set_status( 'success', 'This check has been skipped', current_app.bot_username + ':' + context) return 'Finished pull requests checks'
class TestPullRequestHandler: def setup_class(self): self.pr = PullRequestHandler('fakerepo/doesnotexist', 1234) def test_urls(self): assert self.pr._url_pull_request == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234' assert self.pr._url_review_comment == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234/reviews' assert self.pr._url_commits == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234/commits' assert self.pr._url_files == 'https://api.github.com/repos/fakerepo/doesnotexist/pulls/1234/files' def test_has_modified(self): mock = MagicMock(return_value=[{ "sha": "bbcd538c8e72b8c175046e27cc8f907076331401", "filename": "file1.txt", "status": "added", "additions": 103, "deletions": 21, "changes": 124, "blob_url": "https://github.com/blah/blah/blob/hash/file1.txt", "raw_url": "https://github.com/blaht/blah/raw/hash/file1.txt", "contents_url": "https://api.github.com/repos/blah/blah/contents/file1.txt?ref=hash", "patch": "@@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test" }]) with patch('baldrick.github.github_api.paged_github_json_request', mock): # noqa assert self.pr.has_modified(['file1.txt']) assert self.pr.has_modified(['file1.txt', 'notthis.txt']) assert not self.pr.has_modified(['notthis.txt']) def test_set_check(self, app): with patch("baldrick.github.github_api.PullRequestHandler.json", new_callable=PropertyMock) as json: json.return_value = { 'head': { 'sha': 987654321 }, 'base': { 'sha': 123456789 } } with patch('requests.post') as post: self.pr.set_check("baldrick-1", "hello", name="test") expected_json = { 'external_id': 'baldrick-1', 'name': 'test', 'head_sha': 987654321, 'status': 'completed', 'output': { 'title': 'hello', 'summary': '' }, 'conclusion': 'neutral' } post.assert_called_once_with( 'https://api.github.com/repos/fakerepo/doesnotexist/check-runs', headers={ 'Accept': 'application/vnd.github.antiope-preview+json' }, json=expected_json) post.reset_mock() self.pr.set_check("baldrick-1", "hello", name="test", commit_hash='base', text="hello world", summary="why hello") expected_json = { 'external_id': 'baldrick-1', 'name': 'test', 'head_sha': 123456789, 'status': 'completed', 'output': { 'title': 'hello', 'summary': 'why hello', 'text': 'hello world' }, 'conclusion': 'neutral' } post.assert_called_once_with( 'https://api.github.com/repos/fakerepo/doesnotexist/check-runs', headers={ 'Accept': 'application/vnd.github.antiope-preview+json' }, json=expected_json) post.reset_mock() self.pr.set_check("baldrick-1", "hello", name="test", commit_hash='hello', details_url="this_is_a_url") expected_json = { 'external_id': 'baldrick-1', 'name': 'test', 'head_sha': 'hello', 'details_url': 'this_is_a_url', 'status': 'completed', 'output': { 'title': 'hello', 'summary': '' }, 'conclusion': 'neutral' } post.assert_called_once_with( 'https://api.github.com/repos/fakerepo/doesnotexist/check-runs', headers={ 'Accept': 'application/vnd.github.antiope-preview+json' }, json=expected_json) post.reset_mock() self.pr.set_check("baldrick-1", "hello", name="test", status="completed", conclusion=None) expected_json = { 'external_id': 'baldrick-1', 'name': 'test', 'head_sha': 987654321, 'status': 'completed', 'output': { 'title': 'hello', 'summary': '' }, 'conclusion': 'neutral' } post.assert_called_once_with( 'https://api.github.com/repos/fakerepo/doesnotexist/check-runs', headers={ 'Accept': 'application/vnd.github.antiope-preview+json' }, json=expected_json)