def create_pr( repo: Repository, pr_branch_name: str, head: str, file: ContentFile, updated_content: str, pr_title: str, pr_body: str, ): try: repo.get_branch(pr_branch_name) print(f"Branch '{pr_branch_name}' already exist. Skipping update.") return except GithubException as ex: if ex.status != 404: raise pr_branch = repo.create_git_ref(pr_branch_name, head) repo.update_file( file.path, f"{pr_title}\n\n{pr_body}", updated_content, file.sha, branch=pr_branch_name, ) repo.create_pull(title=pr_title, body=pr_body, head=pr_branch.ref, base=BASE_BRANCH)
def create_branch(repository: Repository, source_branch_name: str, target_branch_name: str) -> Branch: """ Create branch in the forked repository. :param repository: Forked repository. :param source_branch_name: Name of the base branch from which target branch is created. :param target_branch_name: Target name of the new branch. :return: Created branch. """ source_branch = repository.get_branch(source_branch_name) repository.create_git_ref(ref='refs/heads/' + target_branch_name, sha=source_branch.commit.sha) return repository.get_branch(target_branch_name)
def get_master_commit_sha(repo: Repository) -> str: # noqa: E999 '''Return the sha commit of the master branch Args: repo (Repository): The repository whose master branch will be queried Returns: (str): The commit sha of the master branch's HEAD ''' branch_data = repo.get_branch('master') commit_sha = branch_data.commit.sha return commit_sha
def dequeue_and_build(base_repo: Repository): grouped_targets = dequeue_builds() for packed_ref, targets in grouped_targets.items(): repo_clone_url, sha = unpack(packed_ref) repo_name = repo_name_from_packed_ref(packed_ref) repo = get_github().get_repo(repo_name) # If the commit is made on the base repo, take the config from the current commit. # Otherwise, retrieve it from master clone_commit( base_repo.clone_url, sha if repo_clone_url == base_repo.clone_url else base_repo.get_branch(base_repo.default_branch).commit.sha, ) for app_name, pr_number in targets: app = App(app_name) with tempfile.TemporaryFile("w+") as logs: try: with redirect_descriptor(stdout, logs), redirect_descriptor( stderr, logs): land_app(app, pr_number, sha, repo) if app.config is not None: update_service_routes([app], pr_number) except: traceback.print_exc(file=logs) logs.seek(0) report_build_status( app.name, pr_number, pack(repo.clone_url, sha), BuildStatus.failure, None, logs.read(), private=repo.full_name == base_repo.full_name, ) else: logs.seek(0) report_build_status( app.name, pr_number, pack(repo.clone_url, sha), BuildStatus.success, None if app.config is None else ",".join( hostname.to_str() for hostname in get_pr_subdomains(app, pr_number)), logs.read(), private=repo.full_name == base_repo.full_name, ) if grouped_targets: # because we ran a build, we need to clear the queue of anyone we blocked # we run this in a new worker to avoid timing out clear_queue(noreply=True)
def push_commit(repo_root: Path, gh_admin_repo: Repository) -> None: """Push commit to Github.""" print("Disabling branch protection...") branch = gh_admin_repo.get_branch("main") required_checks = branch.get_required_status_checks() branch.edit_required_status_checks(required_checks.strict, []) print("Pushing new commit...") subprocess.run(["git", "push"], cwd=repo_root, check=True) print("Re-enabling branch protection...") branch.edit_required_status_checks(required_checks.strict, required_checks.contexts)
def update_version(repo: Repository, version: str) -> None: """Update manifest.json with the new version""" print("Updating manifest.json...") manifest = repo.get_contents("custom_components/google_home/manifest.json") assert isinstance(manifest, ContentFile) manifest_json = json.loads(manifest.decoded_content) manifest_json["version"] = version updated_manifest = json.dumps(manifest_json, indent=2) + "\n" branch = repo.get_branch("master") # Disable branch protection before commit branch.remove_admin_enforcement() repo.update_file( path=manifest.path, message=f"Release v{version}", content=updated_manifest, sha=manifest.sha, ) # Re-enable branch protection branch.set_admin_enforcement()
def create_branch_if_not_exist(self, repository: Repository, source_branch_name, target_branch_name) -> Branch: """ If branch does not exist in the forked repository then create it. :return: Created/Fetched branch. """ LOGGER.info( "Checking if the branch <%s> exists in the repository <%s>", target_branch_name, repository.name) if not self.check_if_branch_exists(repository, target_branch_name): LOGGER.info("Branch does not exist. Creating new one...") branch = self.create_branch(repository, source_branch_name, target_branch_name) else: branch = repository.get_branch(target_branch_name) LOGGER.info("Branch exists. Returning existing one <%s>.", branch.name) return branch
def _repo_settings_sync(repo: Repository): """ Sync repository settings to settings in config file. :param repo: Github repository object to sync settings :returns: None """ config = _load_config(repo=repo.name) default_branch = config.get('default_branch', {}) default_branch_name = default_branch.get('name') repo.edit( has_issues=config.get('has_issues'), has_projects=config.get('has_projects'), has_wiki=config.get('has_wiki'), default_branch=default_branch_name, allow_squash_merge=config.get('allow_squash_merge'), allow_merge_commit=config.get('allow_merge_commit'), allow_rebase_merge=config.get('allow_rebase_merge'), delete_branch_on_merge=config.get('delete_branch_on_merge'), ) #: Vulnerability alerting and remediation if config.get('enable_vulnerability_alert'): repo.enable_vulnerability_alert() else: repo.disable_vulnerability_alert() if config.get('enable_automated_security_fixes'): repo.enable_automated_security_fixes() else: repo.disable_automated_security_fixes() #: Set branch protection on default branch branch = repo.get_branch(branch=default_branch_name) branch.edit_protection( require_code_owner_reviews=default_branch.get('require_code_owner_reviews'), required_approving_review_count=default_branch.get('required_approving_review_count'), )
def land_commit( sha: str, repo: Repository, base_repo: Repository, pr: Optional[PullRequest], files: Iterable[Union[File, str]], *, target_app: Optional[str] = None, dequeue_only=False, ): """ :param sha: The hash of the commit we are building :param repo: The repo containing the above commit :param base_repo: The *base* cs61a-apps repo containing the deploy.yaml config :param pr: The PR made to trigger the build, if any :param files: Files changed in the commit, used for target determination :param target_app: App to rebuild, if not all :param dequeue_only: Only pop targets off the queue, do not build any new targets """ if dequeue_only: targets = [] elif target_app: targets = [target_app] else: targets = determine_targets( repo, files if repo.full_name == base_repo.full_name else []) pr_number = pr.number if pr else 0 grouped_targets = enqueue_builds(targets, pr_number, pack(repo.clone_url, sha)) for packed_ref, targets in grouped_targets.items(): repo_clone_url, sha = unpack(packed_ref) # If the commit is made on the base repo, take the config from the current commit. # Otherwise, retrieve it from master clone_commit( base_repo.clone_url, sha if repo_clone_url == base_repo.clone_url else base_repo.get_branch(base_repo.default_branch).commit.sha, ) apps = [App(target) for target in targets] for app in apps: with tempfile.TemporaryFile("w+") as logs: try: with redirect_descriptor(stdout, logs), redirect_descriptor( stderr, logs): land_app(app, pr_number, sha, repo) if app.config is not None: update_service_routes([app], pr_number) except: traceback.print_exc(file=logs) logs.seek(0) report_build_status( app.name, pr_number, pack(repo.clone_url, sha), BuildStatus.failure, None, logs.read(), private=repo.full_name == base_repo.full_name, ) else: logs.seek(0) report_build_status( app.name, pr_number, pack(repo.clone_url, sha), BuildStatus.success, None if app.config is None else ",".join( hostname.to_str() for hostname in get_pr_subdomains(app, pr_number)), logs.read(), private=repo.full_name == base_repo.full_name, ) if grouped_targets: # because we ran a build, we need to clear the queue of anyone we blocked # we run this in a new worker to avoid timing out clear_queue(repo=repo.full_name, pr_number=pr_number, noreply=True)