def _get_pr_numbers(last_deploy, current_deploy): repo = Github().get_organization('dimagi').get_repo('commcare-hq') last_deploy_sha = repo.get_commit(last_deploy).sha current_deploy_sha = repo.get_commit(current_deploy).sha comparison = repo.compare(last_deploy_sha, current_deploy_sha) return [ int(re.search(r'Merge pull request #(\d+)', repo_commit.commit.message).group(1)) for repo_commit in comparison.commits if repo_commit.commit.message.startswith('Merge pull request') ]
def _get_pr_numbers(last_deploy, current_deploy): repo = Github().get_organization('dimagi').get_repo('commcare-hq') last_deploy_sha = repo.get_commit(last_deploy).sha current_deploy_sha = repo.get_commit(current_deploy).sha comparison = repo.compare(last_deploy_sha, current_deploy_sha) return [ int( re.search(r'Merge pull request #(\d+)', repo_commit.commit.message).group(1)) for repo_commit in comparison.commits if repo_commit.commit.message.startswith('Merge pull request') ]
def init_github(args): # Initializes a GitHub connection global commit_sha global github_repo global github_pr global github_commit global build_number if args.repo is None: err("--repo <name> must be passed when connecting to GitHub") if args.sha is None: err("--sha <SHA> must be passed when connecting to GitHub") commit_sha = args.sha if 'GH_TOKEN' not in os.environ: err("the GH_TOKEN environment variable must be set when connecting " "to GitHub") github_repo = Github(os.environ['GH_TOKEN']).get_repo(args.repo) github_commit = github_repo.get_commit(commit_sha) if args.pull_request: github_pr = github_repo.get_pull(args.pull_request) else: github_pr = None # Get the shippable build number, useful to find logs build_number = os.environ.get("BUILD_NUMBER") if build_number is None: err("the BUILD_NUMBER environment variable must be set when " "connecting to GitHub")
def handle_push(): payload = request.json try: repo_slug = payload['repository']['full_name'] except KeyError: abort(422, 'Invalid JSON payload: repository.full_name is missing') try: conf = repo_config(repo_slug) except KeyError: abort(400, "Unknown repository: %s" % repo_slug) LOG.info("Handling push from repository: %s", repo_slug) verify_signature(conf.get('hook_secret', ''), request.get_header('X-Hub-Signature'), request.body) branch = ref_head_name(payload.get('ref', '')) if not branch or not re.match(r"^%s$" % conf.get('branch_regex', 'master'), branch): return ok("Skipping push into branch: %s" % (branch or '<unknown>')) closed_pullreqs = [] try: repo = Github(conf.get('github_token'), base_url=GH_BASE_URL).get_repo(repo_slug) pushed_commits = (repo.get_commit(c['id']) for c in payload.get('commits', [])) for pullreq, merged_commits in find_matching_pulls( repo, pushed_commits): pullreq_id = "%s#%s" % (repo_slug, pullreq.number) LOG.debug("Closing pull request %s", pullreq_id) close_pullreq_with_comment(pullreq, gen_comment(repo_slug, merged_commits)) closed_pullreqs.append(pullreq_id) except (BadCredentialsException, TwoFactorException) as e: abort(500, "Authentication error, GitHub returned: %s" % e) except GithubException as e: abort(503, str(e)) if closed_pullreqs: return ok("Closed pull requests: %s" % ', '.join(closed_pullreqs)) else: return ok('No pull request has been closed')
def handle_push(): payload = request.json try: repo_slug = payload['repository']['full_name'] except KeyError: abort(422, 'Invalid JSON payload: repository.full_name is missing') try: conf = repo_config(repo_slug) except KeyError: abort(400, "Unknown repository: %s" % repo_slug) LOG.info("Handling push from repository: %s", repo_slug) verify_signature(conf.get('hook_secret', ''), request.get_header('X-Hub-Signature'), request.body) branch = ref_head_name(payload.get('ref', '')) if not branch or not re.match(r"^%s$" % conf.get('branch_regex', 'master'), branch): return ok("Skipping push into branch: %s" % (branch or '<unknown>')) closed_pullreqs = [] try: repo = Github(conf.get('github_token'), base_url=GH_BASE_URL).get_repo(repo_slug) pushed_commits = (repo.get_commit(c['id']) for c in payload.get('commits', [])) for pullreq, merged_commits in find_matching_pulls(repo, pushed_commits): pullreq_id = "%s#%s" % (repo_slug, pullreq.number) LOG.debug("Closing pull request %s", pullreq_id) close_pullreq_with_comment(pullreq, gen_comment(repo_slug, merged_commits)) closed_pullreqs.append(pullreq_id) except (BadCredentialsException, TwoFactorException) as e: abort(500, "Authentication error, GitHub returned: %s" % e) except GithubException as e: abort(503, str(e)) if closed_pullreqs: return ok("Closed pull requests: %s" % ', '.join(closed_pullreqs)) else: return ok('No pull request has been closed')
class StyleCheck: """ 运行test进行测试 """ def __init__(self, repo_name, local_repo_home, git_user, git_pwd): """ 初始化工作,没有考虑异常 如果event.payload['size']为0,直接返回 """ self.repo = Github(git_user, git_pwd).get_user().get_repo(repo_name) self.event = self.repo.get_events()[0] if self.event.payload['size'] == 0: return tmp = self.event.payload['commits'][0]['sha'] self.before_sha = self.repo.get_commit(tmp).parents[0].sha self.git_url = u'[email protected]:'+self.repo.full_name+u'.git' self.local_repo_home = local_repo_home self.repo_dir = local_repo_home + '/'+repo_name+'/' self.logger = init_log() def run_command(self, args, cwd=os.environ['WORKSPACE']): """ 运行命令 """ try: proc = subprocess.Popen(args, stdout=subprocess.PIPE, \ stderr=subprocess.PIPE, cwd=cwd) except OSError: self.logger.error(str(sys.exc_info())) if DEBUG: print ' '.join(args) self.logger.info(' '.join(args)) return proc.communicate() def git_clone(self): """ git clone """ (results, code) = self.run_command(('git', 'clone', self.git_url), \ cwd=self.local_repo_home) if DEBUG: print code print results def git_pull(self): """ pull the newest data """ (results, code) = self.run_command(('git', 'pull'), self.repo_dir) if DEBUG: print code print results def git_reset(self): """ hard-reset """ (results, code) = self.run_command(('git', 'reset', '--hard', \ self.before_sha), self.repo_dir) if DEBUG: print code print results (results, code) = self.run_command(('git', 'push', '--force'), \ self.repo_dir) if DEBUG: print code print results def single_style_check(self, code_path): """ 检查代码风格,返回检查结果和分数 """ try: (results, code) = self.run_command(('pylint', '-f', 'parseable', code_path), self.repo_dir) except Exception: print 'run pylint error' return ("", 0) if DEBUG: print "error info", code try: score = float([r for r in results.split('\n') if r.startswith('Your code')][0].split(' ')[6].split('/')[0]) except Exception: score = None return (results, score) def test(self): """ 进行代码检测 如果event里面commit数量为0,或者event不是PushEent类型,直接返回 """ if self.event.payload['size'] == 0 or self.event.type != 'PushEvent': #没有commit,直接返回,不用进行代码检测 return 2 if not os.path.exists(self.repo_dir): self.git_clone() else: self.git_pull() pylint_log = codecs.open('pylint.log', 'w', 'utf-8') style_pass = True file_list = self.get_file_list() for file_path in file_list: (detail, score) = self.single_style_check(file_path) if score is not None: if score < PYLINT_MIN_SCORE: style_pass = False print detail pylint_log.writelines(detail) else: print 'score is None' pylint_log.close() if style_pass is False: self.git_reset() tmp_f = open('delete_build', 'w') tmp_f.write(os.environ['BUILD_NUMBER']) tmp_f.close() return 1 return 0 def get_ignore_list(self): """ 从配置文件中读取不进行代码检测的文件列表 配置文件规则: 每行一个相对路径 以.py结尾,则认为是文件 ------> 通过file_name in ignore_list 判断 否则认为是文件夹-----> startswith 进行判断 return: (文件夹名列表, 文件名列表) """ pass def get_file_list(self): """ 从payload字典中获取commits的sha,在通过sha获取commit, 然后从commit里面得到修改的文件 只检查.py文件 """ file_list = [] #ignore_list = [] for i in self.event.payload['commits']: commit = self.repo.get_commit(i['sha']) for j in commit.files: if DEBUG: print i['sha'], j.filename, j.status if j.filename.endswith('.py'): if j.status == "added" or j.status == 'modified': file_list.append(j.filename) file_list = list(set(file_list)) if j.status == 'removed' and j.filename in file_list: file_list.remove(j.filename) return file_list
class PR: def __init__(self, owner, repo, pr_number, token): self.owner = owner self.repo = repo self.pr_number = pr_number self.token = token self.repo = Github(token).get_user(owner).get_repo(repo) self.pr = self.repo.get_pull(pr_number) self.merge_url = ( "https://api.github.com/repos/{}/{}/pulls/{}/reviews".format( owner, repo, pr_number)) self.merge_headers = { 'Authorization': 'token {}'.format(token), 'Accept': 'application/vnd.github.black-cat-preview+json' } def content(self, fpath, ref=None): if ref is None: ref = GithubObject.NotSet content = self.repo.get_contents(fpath, ref) assert (content.encoding == "base64") return base64.b64decode(content.content) def create_status(self, commit_sha, state, target_url=None, description=None, context=None): if target_url is None: target_url = GithubObject.NotSet if description is None: description = GithubObject.NotSet if context is None: context = GithubObject.NotSet self.repo.get_commit(commit_sha).create_status(state, target_url, description, context) def base_sha(self): return self.pr.base.sha def head_sha(self): return self.pr.head.sha def files(self): return [f.filename for f in self.pr.get_files()] def get_patches(self): return {f.filename: f.patch for f in self.pr.get_files()} def merge(self, commit_message=None): if commit_message is None: commit_message = GithubObject.NotSet self.pr.merge(commit_message) return 0 def review(self, event, body=None): if body is None: body = GithubObject.NotSet data = {'event': event, 'body': body} r = requests.post(self.merge_url, json=data, headers=self.merge_headers) return 0 if r.status_code == requests.codes.ok else 1
def get_cve_commits(self, commits): def setup_cve_data(cve): cve_commits[cve].contributors.append(commit['author']) cve_commits[cve].insertions = cve_commits[cve].foundCommit[ 'insertions'] cve_commits[cve].deletions = cve_commits[cve].foundCommit[ 'deletions'] cve_commits[cve].files_changed = cve_commits[cve].foundCommit[ 'files_changed'] cve_commits = dict() cve_manager = CveManager(self.password) commits.sort(key=self.commit_sort) # setup the API connection to the repo repo = Github(os.environ['GITHUBTOKEN']).get_repo( os.environ['REPO_URL']) # look through mined commits and collect every cve print("Parsing commits") for commit in progressbar.progressbar(commits): if commit['message'] and self.cve_id_pattern.search( commit['message']): cve_group = re.findall(self.cve_id_pattern, commit['message']) # get data stored about the cve in the db for cve in cve_group: cve = cve[0].upper() cve_db_data = cve_manager.get_cve(cve) if not cve_db_data: continue elif commit['isMerge']: # merge commit stats are not parsed, so it is necessary to get them here try: response = repo.get_commit( sha=commit['commit_hash']) commit['insertions'] = response.stats.additions commit['deletions'] = response.stats.deletions commit['files_changed'] = len(response.files) except Exception as ex: print(ex) print('Merge commit unavailable') # a sleep is needed to not overwhelm the API time.sleep(0.73) cwe_group = cve_manager.get_cwe(cve) # if a cve only appears once it is assumed to be a fix # if a cve is not in the database it is considered to be invalid and therefore not considered if cve not in cve_commits.keys(): cve_commits[cve] = CveData(commit, commit, contributors=list()) cve_commits[cve].base_score = cve_db_data[0][5] if cwe_group and cwe_group[0][1]: cve_commits[cve].cwe = cwe_group[0][1] if cve_db_data[0][6]: cve_commits[cve].severity = cve_db_data[0][ 6].rstrip() elif cve_commits[cve].base_score: if 0 <= cve_commits[cve].base_score < 4: cve_commits[cve].severity = "LOW" elif 4 <= cve_commits[cve].base_score < 7: cve_commits[cve].severity = "MEDIUM" else: cve_commits[cve].severity = "HIGH" else: cve_commits[cve].severity = None cve_commits[cve].published_date = cve_db_data[0][8] else: cve_commits[cve].fixCommit = commit # a for cycle is needed, since I have to recheck every inbetween commit # for their data, since at this point I know when the cve has been fixed for cve in cve_commits: started = False if cve_commits[cve].foundCommit == cve_commits[cve].fixCommit: setup_cve_data(cve) continue else: for commit in commits: if started: if commit['author'] not in cve_commits[ cve].contributors: cve_commits[cve].contributors.append( commit['author']) cve_commits[cve].between = cve_commits[cve].between + 1 cve_commits[cve].insertions = cve_commits[ cve].insertions + commit['insertions'] cve_commits[cve].deletions = cve_commits[ cve].deletions + commit['deletions'] cve_commits[cve].files_changed = cve_commits[ cve].files_changed + commit['files_changed'] if cve_commits[cve].fixCommit == commit: break elif cve_commits[cve].foundCommit == commit: started = True setup_cve_data(cve) return cve_commits
import os import sys import boto3 from github import Github SSM_CLIENT = boto3.client("ssm") GITHUB_REPO_NAME = os.environ.get("GITHUB_REPO_NAME", "") PR_NUMBER = os.environ.get("PR_NUMBER", "") FAILED = bool(int(sys.argv[2])) GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN", "") if __name__ == "__main__": repo = Github(GITHUB_TOKEN).get_repo(GITHUB_REPO_NAME) pr = repo.get_pull(int(PR_NUMBER)) message, event = ("end to end tests failed", "REQUEST_CHANGES") if not FAILED: message, event = ("end to end tests passed\n", "APPROVE") with open("../../cov_report", "r") as fh: cov = fh.read().replace(f"/{GITHUB_REPO_NAME}/", "") message += f"```{cov}```" pr.create_review(body=message, event=event, commit=repo.get_commit(sys.argv[1]))
# Check if there were at least one valid label # Note: In both cases we exit without an error code and let the check to succeed. This is because GitHub # workflow will create different checks for different trigger conditions. So, adding a missing label won't # clear the initial failed check during the PR creation, for example. # Instead, we will create a pull request review, marked with 'REQUEST_CHANGES' when no valid label was found. # This will prevent merging the pull request until a valid label is added, which will trigger this check again # and will create a new pull request review, but in this case marked as 'APPROVE' if len(pr_valid_labels): # If there were valid labels, then create a pull request request review, approving it print( f'Success! This pull request contains the following valid labels: {pr_valid_labels}' ) repo.get_commit(sha=github_sha).create_status( state="success", target_url="https://amazon.com", description="This pull request contains the following valid labels", context="ci/FooCI") if post_review == "true": pr.create_review(event='APPROVE') else: # If there were not valid labels, then create a pull request review, requesting changes print( f'Error! This pull request does not contain any of the valid labels: {valid_labels}' ) repo.get_commit(sha=github_sha).create_status( state="failure", target_url="https://amazon.com", description= "This pull request does not contain any of the valid labels", context="ci/FooCI")
def main(): parser = argparse.ArgumentParser( description='GitHub PR matcher', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('owner', metavar='owner', help='Github repository owner') parser.add_argument('repo', metavar='repo', help='Github repository') parser.add_argument('sha', metavar='sha', help='Current commit SHA') parser.add_argument( 'token', metavar='token', help='Github token with permission to read/write repos') parser.add_argument('--context', metavar='context', action='append', help="Required context with successful status. " "Could be used multiple times. If not defined all " "contexts (there should be at least one) are checked.") parser.add_argument('--branch', metavar='branch', default=None, help='Filter pull requests by base branch name') parser.add_argument( '--updated', metavar='days', type=int, default=7, help="Filter pull requests updated within specified number " "of days from the current date") parser.add_argument("--verbose", action="store_true", help="Output more info") args = parser.parse_args() repo = Github(args.token).get_user(args.owner).get_repo(args.repo) prs = repo.get_pulls( state='closed', base=(GithubObject.NotSet if args.branch is None else args.branch), sort='updated', direction='desc') pr = None update_deadline = datetime.now() - timedelta(days=args.updated) for _pr in prs: if _pr.updated_at < update_deadline: # skip older PRs break elif _pr.merge_commit_sha == args.sha: pr = _pr break if pr is None: print("PR not found in '{}'. Criterias: state='closed', branch='{}', " "merge_commit_sha='{}', updated: within {} days.".format( repo.full_name, args.branch, args.sha, args.updated), file=sys.stderr) return 0 if args.verbose: print( "Found closed PR #{} with merged_commit_sha='{}' and head_sha='{}'. title: '{}', url: '{}'" "".format(pr.number, args.sha, pr.head.sha, pr.title, pr.html_url), file=sys.stderr) assert pr.is_merged() ci = repo.get_commit(pr.head.sha) # details: # - https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref cst = ci.get_combined_status() ctxs = {} for st in cst.statuses: if args.verbose: print("Found status for '{}': {}".format(pr.head.sha, st), file=sys.stderr) if (args.context is None or st.context in args.context): if (st.state != 'success'): print("Context '{}' for commit '{}' has not passed status. " "id: '{}', status source: '{}', url: '{}'".format( st.context, ci.sha, st.id, st.target_url, cst.url), file=sys.stderr) return 0 else: ctxs[st.context] = True if args.context is not None: not_passed_ctx = set(args.context) - set(ctxs.keys()) if len(not_passed_ctx) > 0: print("The following required contexts for commit '{}' are not " "passed: {}. url: '{}'".format(ci.sha, list(not_passed_ctx), cst.url), file=sys.stderr) return 0 elif len(ctxs) == 0: print("Not found statuses for required contexts for commit '{}'. " "url: '{}'".format(ci.sha, cst.url), file=sys.stderr) return 0 print("{}".format(pr.html_url)) return 0