def test_fetch_pr_data(self): """Test fetch_pr_data function.""" if self.skip_github_tests: print("Skipping test_fetch_pr_data, no GitHub token available?") return pr_data, pr_url = gh.fetch_pr_data(1, GITHUB_USER, GITHUB_REPO, GITHUB_TEST_ACCOUNT) self.assertEqual(pr_data['number'], 1) self.assertEqual(pr_data['title'], "a pr") self.assertFalse( any(key in pr_data for key in ['issue_comments', 'review', 'status_last_commit'])) pr_data, pr_url = gh.fetch_pr_data(2, GITHUB_USER, GITHUB_REPO, GITHUB_TEST_ACCOUNT, full=True) self.assertEqual(pr_data['number'], 2) self.assertEqual(pr_data['title'], "an open pr (do not close this please)") self.assertTrue(pr_data['issue_comments']) self.assertEqual(pr_data['issue_comments'][0]['body'], "this is a test") self.assertTrue(pr_data['reviews']) self.assertEqual(pr_data['reviews'][0]['state'], "APPROVED") self.assertEqual(pr_data['reviews'][0]['user']['login'], 'boegel') self.assertEqual(pr_data['status_last_commit'], 'pending')
def test_fetch_pr_data(self): """Test fetch_pr_data function.""" if self.skip_github_tests: print "Skipping test_fetch_pr_data, no GitHub token available?" return pr_data, pr_url = gh.fetch_pr_data(1, GITHUB_USER, GITHUB_REPO, GITHUB_TEST_ACCOUNT) self.assertEqual(pr_data['number'], 1) self.assertEqual(pr_data['title'], "a pr") self.assertFalse(any(key in pr_data for key in ['issue_comments', 'review', 'status_last_commit'])) pr_data, pr_url = gh.fetch_pr_data(2, GITHUB_USER, GITHUB_REPO, GITHUB_TEST_ACCOUNT, full=True) self.assertEqual(pr_data['number'], 2) self.assertEqual(pr_data['title'], "an open pr (do not close this please)") self.assertTrue(pr_data['issue_comments']) self.assertEqual(pr_data['issue_comments'][0]['body'], "this is a test") self.assertTrue(pr_data['reviews']) self.assertEqual(pr_data['reviews'][0]['state'], "APPROVED") self.assertEqual(pr_data['reviews'][0]['user']['login'], 'boegel') self.assertEqual(pr_data['status_last_commit'], 'pending')
def test_reasons_for_closing(self): """Test reasons_for_closing function.""" if self.skip_github_tests: print( "Skipping test_reasons_for_closing, no GitHub token available?" ) return repo_owner = gh.GITHUB_EB_MAIN repo_name = gh.GITHUB_EASYCONFIGS_REPO build_options = { 'dry_run': True, 'github_user': GITHUB_TEST_ACCOUNT, 'pr_target_account': repo_owner, 'pr_target_repo': repo_name, 'robot_path': [], } init_config(build_options=build_options) pr_data, _ = gh.fetch_pr_data(1844, repo_owner, repo_name, GITHUB_TEST_ACCOUNT, full=True) self.mock_stdout(True) self.mock_stderr(True) # can't easily check return value, since auto-detected reasons may change over time if PR is touched res = gh.reasons_for_closing(pr_data) stdout = self.get_stdout() stderr = self.get_stderr() self.mock_stdout(False) self.mock_stderr(False) self.assertTrue(isinstance(res, list)) self.assertEqual(stderr.strip(), "WARNING: Using easyconfigs from closed PR #1844") patterns = [ "Status of last commit is SUCCESS", "Last comment on", "No activity since", "* QEMU-2.4.0", ] for pattern in patterns: self.assertTrue(pattern in stdout, "Pattern '%s' found in: %s" % (pattern, stdout))
def test_reasons_for_closing(self): """Test reasons_for_closing function.""" if self.skip_github_tests: print "Skipping test_reasons_for_closing, no GitHub token available?" return repo_owner = gh.GITHUB_EB_MAIN repo_name = gh.GITHUB_EASYCONFIGS_REPO build_options = { 'dry_run': True, 'github_user': GITHUB_TEST_ACCOUNT, 'pr_target_account': repo_owner, 'pr_target_repo': repo_name, 'robot_path': [], } init_config(build_options=build_options) pr_data, _ = gh.fetch_pr_data(1844, repo_owner, repo_name, GITHUB_TEST_ACCOUNT, full=True) self.mock_stdout(True) self.mock_stderr(True) # can't easily check return value, since auto-detected reasons may change over time if PR is touched res = gh.reasons_for_closing(pr_data) stdout = self.get_stdout() stderr = self.get_stderr() self.mock_stdout(False) self.mock_stderr(False) self.assertTrue(isinstance(res, list)) self.assertEqual(stderr.strip(), "WARNING: Using easyconfigs from closed PR #1844") patterns = [ "Status of last commit is SUCCESS", "Last comment on", "No activity since", "* QEMU-2.4.0", ] for pattern in patterns: self.assertTrue(pattern in stdout, "Pattern '%s' found in: %s" % (pattern, stdout))
def review_pr(paths=None, pr=None, colored=True, branch='develop', testing=False): """ Print multi-diff overview between specified easyconfigs or PR and specified branch. :param pr: pull request number in easybuild-easyconfigs repo to review :param paths: path tuples (path, generated) of easyconfigs to review :param colored: boolean indicating whether a colored multi-diff should be generated :param branch: easybuild-easyconfigs branch to compare with :param testing: whether to ignore PR labels (used in test_review_pr) """ pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO if pr_target_repo != GITHUB_EASYCONFIGS_REPO: raise EasyBuildError( "Reviewing PRs for repositories other than easyconfigs hasn't been implemented yet" ) tmpdir = tempfile.mkdtemp() download_repo_path = download_repo(branch=branch, path=tmpdir) repo_path = os.path.join(download_repo_path, 'easybuild', 'easyconfigs') if pr: pr_files = [ path for path in fetch_easyconfigs_from_pr(pr) if path.endswith('.eb') ] elif paths: pr_files = paths else: raise EasyBuildError("No PR # or easyconfig path specified") lines = [] ecs, _ = parse_easyconfigs([(fp, False) for fp in pr_files], validate=False) for ec in ecs: files = find_related_easyconfigs(repo_path, ec['ec']) if pr: pr_msg = "PR#%s" % pr else: pr_msg = "new PR" _log.debug("File in %s %s has these related easyconfigs: %s" % (pr_msg, ec['spec'], files)) if files: lines.append(multidiff(ec['spec'], files, colored=colored)) else: lines.extend([ '', "(no related easyconfigs found for %s)\n" % os.path.basename(ec['spec']) ]) if pr: file_info = det_file_info(pr_files, download_repo_path) pr_target_account = build_option('pr_target_account') github_user = build_option('github_user') pr_data, _ = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user) pr_labels = [label['name'] for label in pr_data['labels']] if not testing else [] expected_labels = det_pr_labels(file_info, pr_target_repo) missing_labels = [ label for label in expected_labels if label not in pr_labels ] if missing_labels: lines.extend([ '', "This PR should be labelled with %s" % ', '.join(["'%s'" % ml for ml in missing_labels]) ]) if not pr_data['milestone']: lines.extend(['', "This PR should be associated with a milestone"]) elif '.x' in pr_data['milestone']['title']: lines.extend([ '', "This PR is associated with a generic '.x' milestone, " "it should be associated to the next release milestone once merged" ]) return '\n'.join(lines)
def main(): opts = { 'core-cnt': ("Default core count to use for jobs", None, 'store', None), 'github-account': ("GitHub account where repository is located", None, 'store', 'easybuilders', 'a'), 'github-user': ("GitHub user to use (for authenticated access)", None, 'store', 'boegel', 'u'), 'mode': ("Mode to run in", 'choice', 'store', MODE_CHECK_TRAVIS, [MODE_CHECK_GITHUB_ACTIONS, MODE_CHECK_TRAVIS, MODE_TEST_PR]), 'owner': ("Owner of the bot account that is used", None, 'store', 'boegel'), 'repository': ("Repository to use", None, 'store', 'easybuild-easyconfigs', 'r'), 'host': ("Label for current host (used to filter comments asking to test a PR)", None, 'store', ''), 'pr-test-cmd': ("Command to use for testing easyconfig pull requests (should include '%(pr)s' template value)", None, 'store', ''), } go = simple_option(go_dict=opts) init_build_options() github_account = go.options.github_account github_user = go.options.github_user mode = go.options.mode owner = go.options.owner owner = go.options.owner repository = go.options.repository host = go.options.host pr_test_cmd = go.options.pr_test_cmd core_cnt = go.options.core_cnt github_token = fetch_github_token(github_user) # prepare using GitHub API github = RestClient(GITHUB_API_URL, username=github_user, token=github_token, user_agent='eb-pr-check') if mode in [MODE_CHECK_GITHUB_ACTIONS, MODE_CHECK_TRAVIS]: if mode == MODE_CHECK_TRAVIS: res = fetch_travis_failed_builds(github_account, repository, owner, github_token) elif mode == MODE_CHECK_GITHUB_ACTIONS: res = fetch_github_failed_workflows(github, github_account, repository, github_user, owner) else: error("Unknown mode: %s" % mode) for pr, pr_comment, check_msg in res: params = {'per_page': GITHUB_MAX_PER_PAGE} pr_data, _ = fetch_pr_data(pr, github_account, repository, github_user, full=True, **params) if pr_data['state'] == GITHUB_PR_STATE_OPEN: comment(github, github_user, repository, pr_data, pr_comment, check_msg=check_msg, verbose=DRY_RUN) else: print("Not posting comment in already closed %s PR #%s" % (repository, pr)) elif mode == MODE_TEST_PR: if not host: error("--host is required when using '--mode %s' !" % MODE_TEST_PR) if '%(pr)s' not in pr_test_cmd or '%(eb_args)s' not in pr_test_cmd: error( "--pr-test-cmd should include '%%(pr)s' and '%%(eb_args)s', found '%s'" % (pr_test_cmd)) if core_cnt is None: error( "--core-cnt must be used to specify the default number of cores to request per submitted job!" ) notifications = check_notifications(github, github_user, github_account, repository) process_notifications(notifications, github, github_user, github_account, repository, host, pr_test_cmd, core_cnt) else: error("Unknown mode: %s" % mode)
def process_notifications(notifications, github, github_user, github_account, repository, host, pr_test_cmd, core_cnt): """Process provided notifications.""" res = [] cnt = len(notifications) for idx, notification in enumerate(notifications): pr_title = notification['subject']['title'] pr_id = notification['subject']['url'].split('/')[-1] msg = "[%d/%d] Processing notification for %s PR #%s \"%s\"... " % ( idx + 1, cnt, repository, pr_id, pr_title) msg += "(thread id: %s, timestamp: %s)" % (notification['thread_id'], notification['timestamp']) print(msg) # check comments (latest first) pr_data, _ = fetch_pr_data(pr_id, github_account, repository, github_user, full=True, per_page=GITHUB_MAX_PER_PAGE) comments_data = pr_data['issue_comments'] # determine comment that triggered the notification trigger_comment_id = None mention_regex = re.compile(r'\s*@%s:?\s*' % github_user, re.M) for comment_data in comments_data[::-1]: comment_id, comment_txt = comment_data['id'], comment_data['body'] if mention_regex.search(comment_txt): trigger_comment_id = comment_id break check_str = "notification for comment with ID %s processed" % trigger_comment_id processed = False for comment_data in comments_data[::-1]: comment_by, comment_txt = comment_data['user'][ 'login'], comment_data['body'] if comment_by == github_user and check_str in comment_txt: print("check_str '%s' found in: %s" % (check_str, comment_txt)) processed = True break if processed: msg = "Notification %s already processed, so skipping it... " % notification[ 'thread_id'] msg += "(timestamp: %s)" % notification['timestamp'] print(msg) continue host_regex = re.compile(r'@.*%s' % host, re.M) mention_found = False for comment_data in comments_data[::-1]: comment_id, comment_by = comment_data['id'], comment_data['user'][ 'login'] comment_txt = comment_data['body'] if mention_regex.search(comment_txt): print("Found comment including '%s': %s" % (mention_regex.pattern, comment_txt)) msg = mention_regex.sub(' ', comment_txt) # require that @<host> is included in comment before taking any action if host_regex.search(msg): print("Comment includes '%s', so processing it..." % host_regex.pattern) maintainers = [ 'akesandgren', 'bartoldeman', 'bedroge', 'boegel', 'branfosj', 'casparvl', 'jfgrimm', 'lexming', 'Micket', 'migueldiascosta', 'ocaisa', 'SebastianAchilles', 'smoors', 'verdurin' ] contributors = ['robert-mijakovic'] allowed_accounts = maintainers + contributors please_regex = re.compile(r'[Pp]lease test', re.M) if comment_by not in allowed_accounts: allowed_accounts_str = ' or '.join( '@%s' % x for x in allowed_accounts) reply_msg = "@%s: I noticed your comment, " % comment_by reply_msg += "but I only dance when %s tells me (for now), I'm sorry..." % allowed_accounts_str elif "PLEASE " in msg: reply_msg = "Don't scream, it's rude and I don't like people who do..." elif please_regex.search(msg): system_info = get_system_info() hostname = system_info.get('hostname', '(hostname not known)') reply_msg = "@%s: Request for testing this PR well received on %s\n" % ( comment_by, hostname) tmpl_dict = {'pr': pr_id} # check whether custom arguments for 'eb' or submit command are specified tmpl_dict.update({ 'core_cnt': core_cnt, 'eb_args': '', }) for item in shlex.split(msg): for key in ['CORE_CNT', 'EB_ARGS']: if item.startswith(key + '='): _, value = item.split('=', 1) tmpl_dict[key.lower()] = '"%s"' % value break # run pr test command, check exit code and capture output cmd = pr_test_cmd % tmpl_dict (out, ec) = run_cmd(cmd, simple=False) reply_msg += '\n'.join([ '', "PR test command '`%s`' executed!" % cmd, "* exit code: %s" % ec, "* output:", "```", out.strip(), "```", '', "Test results coming soon (I hope)...", ]) else: reply_msg = "Got message \"%s\", but I don't know what to do with it, sorry..." % msg # always include 'details' part than includes a check string # which includes the ID of the comment we're reacting to, # so we can avoid re-processing the same comment again... reply_msg += '\n'.join([ '', '', "<details>", '', "*- %s*" % check_str, '', "*Message to humans: this is just bookkeeping information for me,", "it is of no use to you (unless you think I have a bug, which I don't).*", "</details>", ]) comment(github, github_user, repository, pr_data, reply_msg, verbose=DRY_RUN) else: print( "Pattern '%s' not found in comment for PR #%s, so ignoring it" % (host_regex.pattern, pr_id)) mention_found = True break else: # skip irrelevant comments (no mention found) print( "Pattern '%s' not found in comment for PR #%s, so ignoring it" % (mention_regex.pattern, pr_id)) continue if not mention_found: print_warning( "Relevant comment for notification #%d for PR %s not found?!" % (idx, pr_id)) sys.stderr.write("Notification data:\n" + pformat(notification)) return res
def fetch_github_failed_workflows(github, github_account, repository, github_user, owner): """Scan GitHub Actions for failed workflow runs.""" res = [] # only consider failed workflows triggered by pull requests params = { 'event': 'pull_request', # filtering based on status='failure' no longer works correctly?! # also with status='completed' some workflow runs are not included in result... # 'status': 'failure', 'per_page': GITHUB_MAX_PER_PAGE, } try: status, run_data = github.repos[github_account][ repository].actions.runs.get(**params) except socket.gaierror as err: error("Failed to download GitHub Actions workflow runs data: %s" % err) if status == 200: run_data = list(run_data['workflow_runs']) print("Found %s failed workflow runs for %s/%s" % (len(run_data), github_account, repository)) else: error( "Status for downloading GitHub Actions workflow runs data should be 200, got %s" % status) failing_prs = set() for idx, entry in enumerate(run_data): if entry['status'] != 'completed': print("Ignoring incomplete workflow run %s" % entry['html_url']) continue if entry['conclusion'] == 'success': print("Ignoring successful workflow run %s" % entry['html_url']) continue head_user = entry['head_repository']['owner']['login'] head = '%s:%s' % (head_user, entry['head_branch']) head_sha = entry['head_sha'] # determine corresponding PR (if any) status, pr_data = github.repos[github_account][repository].pulls.get( head=head) if status != 200: error( "Status for downloading data for PR with head %s should be 200, got %s" % (head, status)) if len(pr_data) == 1: pr_data = pr_data[0] print("Failed workflow run %s found (PR: %s)" % (entry['html_url'], pr_data['html_url'])) pr_id = pr_data['number'] # skip PRs for which a failing workflow was already encountered if pr_id in failing_prs: print("PR #%s already encountered, so skipping workflow %s" % (pr_id, entry['html_url'])) continue pr_data, _ = fetch_pr_data(pr_id, github_account, repository, github_user, full=True, per_page=GITHUB_MAX_PER_PAGE) if pr_data['state'] == 'open': pr_head_sha = pr_data['head']['sha'] # make sure workflow was run for latest commit in this PR if head_sha != pr_head_sha: msg = "Workflow %s was for commit %s, " % ( entry['html_url'], head_sha) msg += "not latest commit in PR #%s (%s), so skipping" % ( pr_id, pr_head_sha) print(msg) continue # check status of most recent commit in this PR, # ignore this PR if status is "success" or "pending" pr_status = pr_data['status_last_commit'] print("Status of last commit (%s) in PR #%s: %s" % (pr_head_sha, pr_id, pr_status)) if pr_status in [ 'action_required', STATUS_PENDING, STATUS_SUCCESS ]: print( "Status of last commit in PR #%s is '%s', so ignoring it for now..." % (pr_id, pr_status)) continue # download list of jobs in workflow run_id = entry['id'] status, jobs_data = github.repos[github_account][ repository].actions.runs[run_id].jobs.get() if status != 200: error( "Failed to download list of jobs for workflow run %s" % entry['html_url']) # determine ID of first failing job job_id = None for job in jobs_data['jobs']: if job['conclusion'] == 'failure': job_id = job['id'] print("Found failing job for workflow %s: %s" % (entry['html_url'], job_id)) break if job_id is None: error("ID of failing job not found for workflow %s" % entry['html_url']) try: status, log_txt = github.repos[github_account][ repository].actions.jobs[job_id].logs.get() except HTTPError as err: status = err.code if status == 200: print("Downloaded log for job %s" % job_id) else: warning("Failed to download log for job %s" % job_id) log_txt = '(failed to fetch log contents due to HTTP status code %s)' % status # strip off timestamp prefixes # example timestamp: 2020-07-13T09:54:36.5004935Z timestamp_regex = re.compile( r'^[0-9-]{10}T[0-9:]{8}\.[0-9]+Z ') log_lines = [ timestamp_regex.sub('', x) for x in log_txt.splitlines() ] # determine line that marks end of output for failing test suite: # "ERROR: Not all tests were successful" error_line_idx = None for idx, line in enumerate(log_lines): if line.startswith("ERROR: Not all tests were successful"): error_line_idx = idx print("Found error line @ index %s" % error_line_idx) break if error_line_idx is None: log_txt_clean = '\n'.join(log_lines) warning( "Log line that marks end of test suite output not found for job %s!\n%s" % (job_id, log_txt_clean)) if is_fluke(log_txt): owner_gh_token = fetch_github_token(owner) if owner_gh_token: github_owner = RestClient(GITHUB_API_URL, username=owner, token=owner_gh_token, user_agent='eb-pr-check') print( "Fluke found, restarting this workflow using @%s's GitHub account..." % owner) repo_api = github_owner.repos[github_account][ repository] status, jobs_data = repo_api.actions.runs[ run_id].rerun.post() if status == 201: print("Workflow %s restarted" % entry['html_url']) else: print( "Failed to restart workflow %s: status %s" % (entry['html_url'], status)) else: warning( "Fluke found but can't restart workflow, no token found for @%s" % owner) continue # find line that marks start of test output: only dots and 'E'/'F' characters start_test_regex = re.compile(r'^[\.EF]+$') start_line_idx = error_line_idx start_log_line = log_lines[start_line_idx] while (start_line_idx >= 0 and not (start_log_line and start_test_regex.match(start_log_line))): start_line_idx -= 1 start_log_line = log_lines[start_line_idx] log_lines = log_lines[start_line_idx + 1:error_line_idx + 1] # compose comment pr_comment = "@%s: Tests failed in GitHub Actions" % pr_data[ 'user']['login'] pr_comment += ", see %s" % entry['html_url'] # use first part of comment to check whether comment was already posted check_msg = pr_comment if len(log_lines) > 100: log_lines = log_lines[-100:] pr_comment += "\nLast 100 lines of output from first failing test suite run:\n\n```" else: pr_comment += "\nOutput from first failing test suite run:\n\n```" for line in log_lines: pr_comment += line + '\n' pr_comment += "```\n" pr_comment += "\n*bleep, bloop, I'm just a bot (boegelbot v%s)*\n" % VERSION pr_comment += "Please talk to my owner `@%s` if you notice you me acting stupid),\n" % owner pr_comment += "or submit a pull request to https://github.com/boegel/boegelbot fix the problem." res.append((pr_id, pr_comment, check_msg)) failing_prs.add(pr_id) else: print("Ignoring failed workflow run for closed PR %s" % pr_data['html_url']) else: warning("Expected exactly one PR with head %s, found %s: %s" % (head, len(pr_data), pr_data)) print("Processed %d failed workflow runs, found %d PRs to report back on" % (len(run_data), len(res))) return res
def process_notifications(notifications, github, github_user, github_account, repository, host, pr_test_cmd): """Process provided notifications.""" res = [] cnt = len(notifications) for idx, notification in enumerate(notifications): pr_title = notification['subject']['title'] pr_id = notification['subject']['url'].split('/')[-1] msg = "[%d/%d] Processing notification for %s PR #%s \"%s\"... " % ( idx + 1, cnt, repository, pr_id, pr_title) msg += "(thread id: %s, timestamp: %s)" % (notification['thread_id'], notification['timestamp']) print(msg) # check comments (latest first) pr_data, _ = fetch_pr_data(pr_id, github_account, repository, github_user, full=True) comments_data = pr_data['issue_comments'] # determine comment that triggered the notification trigger_comment_id = None mention_regex = re.compile(r'\s*@%s:?\s*' % github_user, re.M) for comment_data in comments_data[::-1]: comment_id, comment_txt = comment_data['id'], comment_data['body'] if mention_regex.search(comment_txt): trigger_comment_id = comment_id break check_str = "notification for comment with ID %s processed" % trigger_comment_id processed = False for comment_data in comments_data[::-1]: comment_by, comment_txt = comment_data['user'][ 'login'], comment_data['body'] if comment_by == github_user and check_str in comment_txt: processed = True break if processed: msg = "Notification %s already processed, so skipping it... " % notification[ 'thread_id'] msg += "(timestamp: %s)" % notification['timestamp'] print(msg) continue host_regex = re.compile(r'@.*%s' % host, re.M) mention_found = False for comment_data in comments_data[::-1]: comment_id, comment_by = comment_data['id'], comment_data['user'][ 'login'] comment_txt = comment_data['body'] if mention_regex.search(comment_txt): print("Found comment including '%s': %s" % (mention_regex.pattern, comment_txt)) msg = mention_regex.sub('', comment_txt) # require that @<host> is included in comment before taking any action if host_regex.search(msg): print("Comment includes '%s', so processing it..." % host_regex.pattern) allowed_accounts = [ 'boegel', 'casparvl', 'smoors', 'verdurin' ] if comment_by not in allowed_accounts: allowed_accounts_str = ' or '.join( '@%s' % x for x in allowed_accounts) reply_msg = "@%s: I noticed your comment, " % comment_by reply_msg += "but I only dance when %s tells me (for now), I'm sorry..." % allowed_accounts_str elif "please test" in msg: system_info = get_system_info() hostname = system_info.get('hostname', '(hostname not known)') reply_msg = "@%s: Request for testing this PR well received on %s\n" % ( comment_by, hostname) tmpl_dict = {'pr': pr_id} # check whether custom arguments for 'eb' command are specified eb_args_regex = re.compile(r'EB_ARGS=(?P<eb_args>.*)$', re.M) res = eb_args_regex.search(msg) if res: eb_args = res.group('eb_args').replace('"', '\\"') tmpl_dict.update({'eb_args': '"%s"' % eb_args}) else: tmpl_dict.update({'eb_args': ''}) # run pr test command, check exit code and capture output cmd = pr_test_cmd % tmpl_dict (out, ec) = run_cmd(cmd, simple=False) reply_msg += '\n'.join([ '', "PR test command '`%s`' executed!" % cmd, "* exit code: %s" % ec, "* output:", "```", out.strip(), "```", '', "Test results coming soon (I hope)...", ]) else: reply_msg = "Got message \"%s\", but I don't know what to do with it, sorry..." % msg # always include 'details' part than includes a check string # which includes the ID of the comment we're reacting to, # so we can avoid re-processing the same comment again... reply_msg += '\n'.join([ '', '', "<details>", '', "*- %s*" % check_str, '', "*Message to humans: this is just bookkeeping information for me,", "it is of no use to you (unless you think I have a bug, which I don't).*", "</details>", ]) comment(github, github_user, repository, pr_data, reply_msg, verbose=DRY_RUN) else: print( "Pattern '%s' not found in comment for PR #%s, so ignoring it" % (host_regex.pattern, pr_id)) mention_found = True break else: # skip irrelevant comments (no mention found) print( "Pattern '%s' not found in comment for PR #%s, so ignoring it" % (mention_regex.pattern, pr_id)) continue if not mention_found: print_warning( "Relevant comment for notification #%d for PR %s not found?!" % (idx, pr_id)) sys.stderr.write("Notification data:\n" + pformat(notification)) return res