def from_pull_request(cls, pr: PullRequest) \ -> Union['UpdateRequest', IgnoredRequest]: """ Construct from a Github pull request. :param pr: The pull request. :return: The constructed update request. """ if pr.is_merged(): return IgnoredRequest.merged if pr.changed_files != 1: return IgnoredRequest.invalid # system file after merge file = pr.get_files()[0] # construct download url of the original file orig_file = cls.FILE_URL.format(repo=pr.base.repo.full_name, commit=pr.base.sha, file=file.filename) # retrieve the original and patched files resp = requests.get(orig_file) if not resp.ok: raise ReconstructionError('Unable to retrieve the original file: ' '%d %s' % (resp.status_code, resp.reason)) original = resp.text resp = requests.get(file.raw_url) if not resp.ok: raise ReconstructionError('Unable to retrieve the original file: ' '%d %s' % (resp.status_code, resp.reason)) patched = resp.text # read xml and reconstruct system update object update = None try: adapter = oec.Adapter() update = data_compare(adapter.read_system(io.StringIO(original)), adapter.read_system(io.StringIO(patched))) if update is None: logging.debug('No changes detected on ' + pr.number) return IgnoredRequest.invalid except Exception as e: logging.debug(e) return IgnoredRequest.invalid message, reference = UpdateRequest._parse_description(pr.body) return UpdateRequest(update, title=pr.title, message=message, reference=reference, pullreq_num=pr.number, pullreq_url=pr.html_url, branch=pr.head.label, rejected=pr.state == "closed")
def check_for_changelogs(pr: PullRequest) -> None: for change in pr.get_files(): if change.filename == 'requirements.txt': lines = build_changelog(change) for comment in pr.get_issue_comments(): if comment.body.startswith('# Changelogs'): # Changelog comment comment.edit(body='\n'.join(lines)) break else: pr.create_issue_comment('\n'.join(lines))
def _fetch_repo(self, repo, results): for issue in repo.get_issues(): details = { 'repo': repo.full_name, 'number': issue.number, 'url': issue.html_url, 'title': issue.title, 'assignee': (issue.assignee.login if issue.assignee else None), 'user': issue.user.login, 'blobs': [issue.title, issue.body], 'files': [], 'type': 'issue', } # for comment in issue.get_comments(): # details['blobs'].append(comment.body) if issue.pull_request: details['type'] = 'pull request' pull = PullRequest(repo._requester, {}, {}, completed=True) pull._url = _ValuedAttribute('%s/%s/%s' % (repo.url, 'pulls', issue.number)) for pull_file in pull.get_files(): details['files'].append(pull_file.filename) # for comment in pull.get_comments(): # details['blobs'].append(comment.body) # I'm not totally sure this is useful, so disabling as it # get's us back a lot of API requests # for commit in pull.get_commits(): # details['blobs'].append(commit.commit.message) results[repo.full_name].append(details)
def get_files_in_pr(pr: PullRequest) -> List[str]: prs = get_prs_since(repo_url, since) return [_f.filename for pr in prs for _f in pr.get_files()]
def get_added_files(pr: PullRequest.PullRequest): print(pr, pr.number) for file in pr.get_files(): if file.status == "added": yield file.filename
def store(self, pull_request: GithubPullRequest): """Override :func:`~Entity.store`.""" _LOGGER.info("Extracting PR #%d", pull_request.number) if pull_request.number in self.previous_knowledge.index: _LOGGER.debug("PullRequest %s already analysed, skipping") return created_at = int(pull_request.created_at.timestamp()) closed_at = int(pull_request.closed_at.timestamp() ) if pull_request.closed_at is not None else None merged_at = int(pull_request.merged_at.timestamp() ) if pull_request.merged_at is not None else None closed_by = pull_request.as_issue( ).closed_by.login if pull_request.as_issue( ).closed_by is not None else None merged_by = pull_request.merged_by.login if pull_request.merged_by is not None else None labels = [label.name for label in pull_request.get_labels()] # Evaluate size of PR pull_request_size = None if labels: pull_request_size = GitHubKnowledge.get_labeled_size(labels) if not pull_request_size: lines_changes = pull_request.additions + pull_request.deletions pull_request_size = GitHubKnowledge.assign_pull_request_size( lines_changes=lines_changes) reviews = self.extract_pull_request_reviews(pull_request) self.stored_entities[str(pull_request.number)] = { "title": pull_request.title, "body": pull_request.body, "size": pull_request_size, "created_by": pull_request.user.login, "created_at": created_at, "closed_at": closed_at, "closed_by": closed_by, "merged_at": merged_at, "merged_by": merged_by, "commits_number": pull_request.commits, "changed_files_number": pull_request.changed_files, "interactions": GitHubKnowledge.get_interactions( pull_request.get_issue_comments()), "reviews": reviews, "labels": labels, "commits": [c.sha for c in pull_request.get_commits()], "changed_files": [f.filename for f in pull_request.get_files()], "first_review_at": get_first_review_time(reviews), "first_approve_at": get_approve_time(reviews), }