def crawl_repo(repo_dir, key, tag, url, output_dir, jira_only): # Prepare the repo dir. repo_dir = os.path.normpath(repo_dir) repo_dir = os.path.expanduser(repo_dir) if not os.path.isdir(repo_dir): try: git.clone(url, repo_dir) except Exception as e: print "[Error] crawl_repo: clone: ", type(e) print >> sys.stderr, repo_dir print >> sys.stderr, e return git_repo = git.bake("-C", repo_dir) try: git_repo.fetch() except Exception as e: print "[Error] crawl_repo: fetch ", type(e) print >> sys.stderr, e try: git_repo.checkout(tag) except Exception as e: print "[Error] crawl_repo: checkout ", type(e) print >> sys.stderr, e # Prepare the dir to store issues. repo = os.path.basename(repo_dir) path = os.path.join(output_dir, repo + "-issues") if not os.path.isdir(path): os.makedirs(path) for log_line in git_repo.log("--oneline"): commit_id = log_line.split()[0] log_message = git_repo.log("-1", commit_id) process_log(log_message, key, repo, url, path, jira_only)
def sync_branches_list(snap): """ Syncs the downstream snap branches to a yaml file for parsing in jobs """ click.echo(f"Checking: git+ssh://[email protected]/snap-{snap}") git_repo = f"git+ssh://[email protected]/snap-{snap}" snap_releases = remote_branches(git_repo) snap_releases.reverse() env = os.environ.copy() repo = f"https://{env['CDKBOT_GH_USR']}:{env['CDKBOT_GH_PSW']}@github.com/charmed-kubernetes/jenkins" with tempfile.TemporaryDirectory() as tmpdir: git.clone(repo, tmpdir) git.config("user.email", "*****@*****.**", _env=env, _cwd=tmpdir) git.config("user.name", "cdkbot", _env=env, _cwd=tmpdir) git.config("--global", "push.default", "simple", _cwd=tmpdir) output = Path(f"{tmpdir}/jobs/includes/k8s-snap-branches-list.inc") click.echo(f"Saving to {str(output)}") output.write_text(yaml.dump(snap_releases, default_flow_style=False, indent=2)) cmd_ok(f"git add {str(output)}", cwd=tmpdir) ret = cmd_ok( ["git", "commit", "-m", "Updating k8s snap branches list"], cwd=tmpdir ) if not ret.ok: return click.echo(f"Committing to {repo}.") ret = cmd_ok(["git", "push", repo, "master"], cwd=tmpdir) if not ret.ok: raise SystemExit("Failed to commit latest snap branches.")
def clone_commit(cirpy_dir, commit): """ Clones the `circuitpython` repository, fetches the commit, then checks out the repo at that ref. """ working_dir = pathlib.Path() rosiepi_logger.info("Cloning repository at reference: %s", commit) try: git.clone("--depth", "1", "-n", "https://github.com/sommersoft/circuitpython.git", cirpy_dir) os.chdir(cirpy_dir) git.fetch("origin", commit) git.checkout(commit) git.submodule("sync") git.submodule("update", "--init") except sh.ErrorReturnCode as git_err: git_stderr = str(git_err.stderr, encoding="utf-8").strip("\n") err_msg = [ f"Failed to retrive repository at {commit}:", f" - {git_stderr}", ] rosiepi_logger.warning("%s", "\n".join(err_msg)) raise RuntimeError(git_stderr) from None finally: os.chdir(working_dir)
def validate_passes_linting(self, repo): """ Clones the repo and runs pylint on the Python files""" if not repo["name"].startswith("Adafruit_CircuitPython"): return [] ignored_py_files = ["setup.py", "conf.py"] desination_type = TemporaryDirectory if self.keep_repos: desination_type = pathlib.Path("repos").absolute with desination_type() as tempdir: repo_dir = pathlib.Path(tempdir) / repo["name"] try: if not repo_dir.exists(): git.clone("--depth=1", repo["git_url"], repo_dir) except sh.ErrorReturnCode as err: self.output_file_data.append( f"Failed to clone repo for linting: {repo['full_name']}\n {err.stderr}" ) return [ERROR_OUTPUT_HANDLER] if self.keep_repos and (repo_dir / '.pylint-ok').exists(): return [] for file in repo_dir.rglob("*.py"): if file.name in ignored_py_files or str( file.parent).endswith("examples"): continue py_run_args = f"{file} --output-format=json" if (repo_dir / '.pylintrc').exists(): py_run_args += (f" --rcfile={str(repo_dir / '.pylintrc')}") logging.debug("Running pylint on %s", file) pylint_stdout, pylint_stderr = linter.py_run(py_run_args, return_std=True) if pylint_stderr.getvalue(): self.output_file_data.append( f"PyLint error ({repo['name']}): '{pylint_stderr.getvalue()}'" ) return [ERROR_OUTPUT_HANDLER] try: pylint_result = json.loads(pylint_stdout.getvalue()) except json.JSONDecodeError as json_err: self.output_file_data.append( f"PyLint output JSONDecodeError: {json_err.msg}") return [ERROR_OUTPUT_HANDLER] if pylint_result: return [ERROR_PYLINT_FAILED_LINTING] if self.keep_repos: with open(repo_dir / '.pylint-ok', 'w') as f: f.write(pylint_result) return []
def check_local_clone(): """ Checks if there is a local clone of the circuitpython repository. If not, it will clone it to the `circuitpython` directory. """ check_dir = os.listdir(cirpy_dir()) if ".git" not in check_dir: working_dir = os.getcwd() os.chdir(cirpy_dir()) git.clone("https://github.com/adafruit/circuitpython.git", "--depth", "1") os.chdir(working_dir)
def fetch_bundle(bundle, bundle_path): if not os.path.isdir(bundle_path): os.makedirs(bundle_path, exist_ok=True) git.clone("-o", "adafruit", "https://github.com/adafruit/" + bundle + ".git", bundle_path) working_directory = os.getcwd() os.chdir(bundle_path) git.pull() git.submodule("init") git.submodule("update") os.chdir(working_directory)
def update_repo(data): repo_name = data['repository']['name'] repo_owner = data['repository']['owner']['name'] repo_url = data['repository']['clone_url'] repo_path = get_repo_path(repo_owner, repo_name) if not repo_path.exists(): repo_path.mkdir(parents=True) with pushd(repo_path): git.clone(repo_url, '.', bare=True) else: with pushd(repo_path): git.fetch('origin', data['ref'])
def fetch_bundle(bundle, bundle_path): if not os.path.isdir(bundle_path): os.makedirs(bundle_path, exist_ok=True) if "GITHUB_WORKSPACE" in os.environ: git_url = "https://" + os.environ["ADABOT_GITHUB_ACCESS_TOKEN"] + "@github.com/adafruit/" git.clone("-o", "adafruit", git_url + bundle + ".git", bundle_path) else: git.clone("-o", "adafruit", "https://github.com/adafruit/" + bundle + ".git", bundle_path) working_directory = os.getcwd() os.chdir(bundle_path) git.pull() git.submodule("init") git.submodule("update") os.chdir(working_directory)
def build(snap, build_path, version, arch, dry_run): """ Build snaps Usage: build-eks-snaps.py build \ --snap kubectl \ --snap kube-proxy \ --snap kubelet \ --snap kubernetes-test \ --version 1.14.8 """ if not version.startswith("v"): version = f"v{version}" env = os.environ.copy() env["KUBE_VERSION"] = version env["KUBE_ARCH"] = arch git.clone( "https://github.com/juju-solutions/release.git", build_path, branch="rye/snaps", depth="1", ) build_path = Path(build_path) / "snap" snap_alias = None for _snap in snap: snap_alias = f"{_snap}-eks" if snap_alias: snapcraft_fn = build_path / f"{_snap}.yaml" _set_snap_alias(snapcraft_fn, snap_alias) if dry_run: click.echo("dry-run only:") click.echo( f" > cd release/snap && bash build-scripts/docker-build {_snap}" ) else: for line in sh.bash( "build-scripts/docker-build", _snap, _env=env, _cwd=str(build_path), _bg_exc=False, _iter=True, ): click.echo(line.strip())
def main(): with tempfile.TemporaryDirectory() as tmpdir: info('Created tmp directory ' + tmpdir) os.chdir(tmpdir) git.clone(WIKI_DIR, 'openafs-wiki', _fg=True) os.chdir('openafs-wiki') git.remote('add', 'gerrit', 'ssh://gerrit.openafs.org/openafs-wiki.git') git.fetch('gerrit', _fg=True) git.reset('gerrit/master', '--hard', _fg=True) update_page('devel/GerritsForMaster.mdwn', 'master') update_page('devel/GerritsForStable.mdwn', 'openafs-stable-1_8_x') update_page('devel/GerritsForOldStable.mdwn', 'openafs-stable-1_6_x') try: git.commit('-m', 'update gerrit list', _fg=True) except ErrorReturnCode_1: print('No changes') else: git.push('gerrit', 'HEAD:refs/heads/master', _fg=True)
def _cut_stable_release(layer_list, charm_list, ancillary_list, filter_by_tag, dry_run): """This will merge each layers master onto the stable branches. PLEASE NOTE: This step should come after each stable branch has been tagged and references a current stable bundle revision. layer_list: YAML spec containing git repos and their upstream/downstream properties charm_list: YAML spec containing git repos and their upstream/downstream properties """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) ancillary_list = yaml.safe_load( Path(ancillary_list).read_text(encoding="utf8")) new_env = os.environ.copy() for layer_map in layer_list + charm_list + ancillary_list: for layer_name, repos in layer_map.items(): downstream = repos["downstream"] if not repos.get("needs_stable", True): continue tags = repos.get("tags", None) if tags: if not any(match in filter_by_tag for match in tags): continue log.info( f"Releasing :: {layer_name:^35} :: from: master to: stable") if not dry_run: downstream = f"https://{new_env['CDKBOT_GH_USR']}:{new_env['CDKBOT_GH_PSW']}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) for line in git.clone(downstream, identifier, _iter=True): log.info(line) git_rev_master = git("rev-parse", "origin/master", _cwd=identifier).stdout.decode() git_rev_stable = git("rev-parse", "origin/stable", _cwd=identifier).stdout.decode() if git_rev_master == git_rev_stable: log.info( f"Skipping :: {layer_name:^35} :: master == stable") continue git.config("user.email", "*****@*****.**", _cwd=identifier) git.config("user.name", "cdkbot", _cwd=identifier) git.config("--global", "push.default", "simple") git.checkout("-f", "stable", _cwd=identifier) git.merge("master", "--no-ff", _cwd=identifier) for line in git.push("origin", "stable", _cwd=identifier, _iter=True): log.info(line)
def build_bundles(bundle_list, bundle_branch, filter_by_tag, bundle_repo, to_channel): build_env = BuildEnv(build_type=BuildType.BUNDLE) build_env.db["build_args"] = { "artifact_list": bundle_list, "bundle_branch": bundle_branch, "filter_by_tag": list(filter_by_tag), "to_channel": to_channel, } bundle_repo_dir = build_env.tmp_dir / "bundles-kubernetes" # bundle_build_dir = build_env.tmp_dir / "tmp-bundles" # sh.rm("-rf", bundle_repo_dir) # sh.rm("-rf", bundle_build_dir) # os.makedirs(str(bundle_repo_dir), exist_ok=True) # os.makedirs(str(bundle_build_dir), exist_ok=True) for line in git.clone( "--branch", bundle_branch, bundle_repo, str(bundle_repo_dir), _iter=True, _bg_exc=False, ): click.echo(line) for bundle_map in build_env.artifacts: for bundle_name, bundle_opts in bundle_map.items(): if not any(match in filter_by_tag for match in bundle_opts["tags"]): click.echo(f"Skipping {bundle_name}") continue click.echo(f"Processing {bundle_name}") cmd = [ str(bundle_repo_dir / "bundle"), "-o", bundle_name, "-c", to_channel, bundle_opts["fragments"], ] click.echo(f"Running {' '.join(cmd)}") import subprocess subprocess.run(" ".join(cmd), shell=True) bundle_entity = f"cs:~{bundle_opts['namespace']}/{bundle_name}" build_entity = BundleBuildEntity(build_env, bundle_name, bundle_opts, bundle_entity) build_entity.push() build_entity.promote(to_channel=to_channel) build_env.save()
def update_json_file(working_directory, cp_org_dir, output_filename, json_string): """ Clone the circuitpython-org repo, update libraries.json, and push the updates in a commit. """ if "TRAIVS" in os.environ: if not os.path.isdir(cp_org_dir): os.makedirs(cp_org_dir, exist_ok=True) git_url = "https://" + os.environ[ "ADABOT_GITHUB_ACCESS_TOKEN"] + "@github.com/adafruit/circuitpython-org.git" git.clone("-o", "adafruit", git_url, cp_org_dir) os.chdir(cp_org_dir) git.pull() git.submodule("update", "--init", "--recursive") with open(output_filename, "w") as json_file: json.dump(json_string, json_file, indent=2) commit_day = date.date.strftime(datetime.datetime.today(), "%Y-%m-%d") commit_msg = "adabot: auto-update of libraries.json ({})".format( commit_day) git.commit("-a", "-m", commit_msg) git_push = git.push("adafruit", "master") print(git_push)
def setup(self): """ Setup directory for charm build """ downstream = f"https://github.com/{self.opts['downstream']}" click.echo(f"Cloning repo from {downstream}") os.makedirs(self.src_path) for line in git.clone( "--branch", self.build.db["build_args"]["charm_branch"], downstream, self.src_path, _iter=True, _bg_exc=False, ): click.echo(line)
def _cut_stable_release(layer_list, charm_list, ancillary_list, filter_by_tag, dry_run): """This will merge each layers master onto the stable branches. PLEASE NOTE: This step should come after each stable branch has been tagged and references a current stable bundle revision. layer_list: YAML spec containing git repos and their upstream/downstream properties charm_list: YAML spec containing git repos and their upstream/downstream properties """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) ancillary_list = yaml.safe_load( Path(ancillary_list).read_text(encoding="utf8")) new_env = os.environ.copy() failed_to_release = [] for layer_map in layer_list + charm_list + ancillary_list: for layer_name, repos in layer_map.items(): downstream = repos["downstream"] if not repos.get("needs_stable", True): continue tags = repos.get("tags", None) if tags: if not any(match in filter_by_tag for match in tags): continue auth = (new_env.get("CDKBOT_GH_USR"), new_env.get("CDKBOT_GH_PSW")) default_branch = repos.get("branch") or default_gh_branch( downstream, auth=auth) log.info( f"Releasing :: {layer_name:^35} :: from: {default_branch} to: stable" ) downstream = f"https://{':'.join(auth)}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) for line in git.clone(downstream, identifier, _iter=True): log.info(line) git_rev_default = (git("rev-parse", f"origin/{default_branch}", _cwd=identifier).stdout.decode().strip()) git_rev_stable = (git("rev-parse", "origin/stable", _cwd=identifier).stdout.decode().strip()) if git_rev_default == git_rev_stable: log.info( f"Skipping :: {layer_name:^35} :: {default_branch} == stable" ) continue log.info( f"Commits :: {layer_name:^35} :: {default_branch} != stable") log.info(f" {default_branch:10}= {git_rev_default:32}") log.info(f" {'stable':10}= {git_rev_stable:32}") for line in git("rev-list", f"origin/stable..origin/{default_branch}", _cwd=identifier): for line in git.show( "--format=%h %an '%s' %cr", "--no-patch", line.strip(), _cwd=identifier, ): log.info(" " + line.strip()) if not dry_run: git.config("user.email", "*****@*****.**", _cwd=identifier) git.config("user.name", "cdkbot", _cwd=identifier) git.config("--global", "push.default", "simple") git.checkout("-f", "stable", _cwd=identifier) git.reset(default_branch, _cwd=identifier) for line in git.push("origin", "stable", "-f", _cwd=identifier, _iter=True): log.info(line)
def sync_repos(self, push_to_org, repositories, bitbucket_account_id, bitbucket_access_token, github_account_id, github_access_token): # Make a folder to clone repos from BitBucket cur_dir_path = os.getcwd() os.chdir(cur_dir_path) is_dir = os.path.isdir("syncDirectory") if (not is_dir): self.log.debug("Created directory syncDirectory") os.mkdir("syncDirectory") os.chdir("syncDirectory") for repo in repositories: repo_name = repo['name'] prefixed_repo_name = self.prefix + repo_name teams_to_assign = repo["teams"] if ("teams" in repo) else [] if ('github_link' in repo): github_link = repo['github_link'] repo['new_migration'] = False pass else: github_link = self.make_new_repo(push_to_org, repo, github_account_id, github_access_token) if (github_link is None): self.log.error("Failed to make new repository", result="FAILED", repo_name=prefixed_repo_name) continue repo['github_link'] = github_link repo['new_migration'] = True # Use this instead of setting the authenticated link as a new remote. # Remote links get stored in git config bitbucket_link = repo['bitbucket_link'] bitbucket_link_domain = bitbucket_link.split("//")[1] authenticated_bitbucket_link = f"https://{bitbucket_account_id}:{bitbucket_access_token}@{bitbucket_link_domain}" self.log.info("Syncing repository", repo_name=repo_name) # Clone the repository from BitBucket if (not os.path.isdir(repo_name)): self.log.info("Cloning repository", repo_name=repo_name) try: git.clone(authenticated_bitbucket_link) self.log.debug("Cloned repository", result="SUCCESS", repo_name=repo_name) except ErrorReturnCode as e: self.log.error("Failed to clone repository", result="FAILED", repo_name=repo_name, exit_code=e.exit_code) continue os.chdir(repo_name) # IMPORTANT DO NOT DELETE # Sync all tags individually tags_sync_success, all_tags, failed_tags = self.sync_tags( repo, bitbucket_account_id, bitbucket_access_token, github_account_id, github_access_token) if (not tags_sync_success): self.log.warning("Failed to sync tags for repository", result="FAILED", repo_name=repo_name, failed_tags=failed_tags) # Sync all branches individually branches_sync_success, all_branches, failed_branches = self.sync_branches( repo, bitbucket_account_id, bitbucket_access_token, github_account_id, github_access_token) if (not branches_sync_success): self.log.warning("Failed to sync branches for repository", result="FAILED", repo_name=repo_name, failed_branches=failed_branches) if (tags_sync_success and branches_sync_success): self.log.debug( "Successfully synced all tags and branches for repository", result="SUCCESS", repo_name=repo_name) # Assign repo to teams, reuse existing function assign_repos_to_teams() if (push_to_org and github_link and teams_to_assign): repo_assignment = {} for team_name in teams_to_assign: repo_assignment[team_name] = [prefixed_repo_name] self.assign_repos_to_teams(repo_assignment, github_access_token) os.chdir("..") # IMPORTANT DO NOT DELETE
help='re-git the source code', action='store_true') parser.add_argument('--configure', help='re-configure the build', action='store_true') args = parser.parse_args() # --- install the source code if args.gitclone: print "git clone the source code" # use 1.1.4, not latest and not 1.2, until told otherwise rm('-fr', 'libtorrent') git.clone('https://github.com/arvidn/libtorrent.git') cd('libtorrent') git.checkout('RC_1_1') # gets 1.1.4 <<----- DO THIS else: # ensure cd to libtorrent cd('libtorrent') # --- configure if args.configure: print "configure" cmd = sh.Command('./autotool.sh') cmd() configure = sh.Command("./configure") configure('--libdir=/usr/lib/i386-linux-gnu/', 'LDFLAGS=-L/usr/local/lib/',
def check_patches(repo, patches, flags, use_apply, dry_run): """ Gather a list of patches from the `adabot/patches` directory on the adabot repo. Clone the `repo` and run git apply --check to test wether it requires any of the gathered patches. When `use_apply` is true, any flags except `--apply` are passed through to the check call. This ensures that the check call is representative of the actual apply call. """ applied = 0 skipped = 0 failed = 0 repo_directory = lib_directory + repo["name"] for patch in patches: try: os.chdir(lib_directory) except FileNotFoundError: os.mkdir(lib_directory) os.chdir(lib_directory) try: git.clone(repo["url"]) except sh.ErrorReturnCode_128 as Err: if b"already exists" in Err.stderr: pass else: raise RuntimeError(Err.stderr) os.chdir(repo_directory) patch_filepath = patch_directory + patch try: check_flags = ["--check"] if use_apply: for flag in flags: if not flag in ("--apply", "--signoff"): check_flags.append(flag) git.apply(check_flags, patch_filepath) run_apply = True except sh.ErrorReturnCode_1 as Err: run_apply = False if (b"error" not in Err.stderr or b"patch does not apply" in Err.stderr): parse_err = Err.stderr.decode() parse_err = parse_err[parse_err.rfind(":") + 1:-1] print(" . Skipping {}:{}".format(repo["name"], parse_err)) skipped += 1 else: failed += 1 error_str = str(Err.stderr, encoding="utf-8").replace("\n", " ") error_start = error_str.rfind("error:") + 7 check_errors.append( dict(repo_name=repo["name"], patch_name=patch, error=error_str[error_start:])) except sh.ErrorReturnCode as Err: run_apply = False failed += 1 error_str = str(Err.stderr, encoding="utf-8").replace("\n", " ") error_start = error_str.rfind("error:") + 7 check_errors.append( dict(repo_name=repo["name"], patch_name=patch, error=error_str[error_start:])) if run_apply and not dry_run: result = apply_patch(repo_directory, patch_filepath, repo["name"], patch, flags, use_apply) if result: applied += 1 else: failed += 1 elif run_apply and dry_run: applied += 1 return [applied, skipped, failed]
def _tag_stable_forks(layer_list, charm_list, k8s_version, bundle_rev, filter_by_tag, bugfix, dry_run): """Tags stable forks to a certain bundle revision for a k8s version layer_list: YAML spec containing git repos and their upstream/downstream properties bundle_rev: bundle revision to tag for a particular version of k8s git tag (ie. ck-{bundle_rev}), this would mean we tagged current stable branches for 1.14 with the latest charmed kubernetes(ck) bundle rev of {bundle_rev} TODO: Switch to different merge strategy git checkout master git checkout -b staging git merge stable -s ours git checkout stable git reset staging """ layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8")) charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8")) new_env = os.environ.copy() for layer_map in layer_list + charm_list: for layer_name, repos in layer_map.items(): tags = repos.get("tags", None) if tags: if not any(match in filter_by_tag for match in tags): continue downstream = repos["downstream"] if bugfix: tag = f"{k8s_version}+{bundle_rev}" else: tag = f"ck-{k8s_version}-{bundle_rev}" if not repos.get("needs_tagging", True): log.info(f"Skipping {layer_name} :: does not require tagging") continue log.info(f"Tagging {layer_name} ({tag}) :: {repos['downstream']}") if not dry_run: downstream = f"https://{new_env['CDKBOT_GH_USR']}:{new_env['CDKBOT_GH_PSW']}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) for line in git.clone(downstream, identifier, _iter=True): log.info(line) git.config("user.email", "*****@*****.**", _cwd=identifier) git.config("user.name", "cdkbot", _cwd=identifier) git.config("--global", "push.default", "simple") git.checkout("stable", _cwd=identifier) try: for line in git.tag("--force", tag, _cwd=identifier, _iter=True, _bg_exc=False): log.info(line) for line in git.push( "--force", "origin", tag, _cwd=identifier, _bg_exc=False, _iter=True, ): log.info(line) except sh.ErrorReturnCode as error: log.info( f"Problem tagging: {error.stderr.decode().strip()}, will skip for now.." )
def check_patches(repo, patches, flags, use_apply): """ Gather a list of patches from the `adabot/patches` directory on the adabot repo. Clone the `repo` and run git apply --check to test wether it requires any of the gathered patches. When `use_apply` is true, any flags except `--apply` are passed through to the check call. This ensures that the check call is representative of the actual apply call. """ applied = 0 skipped = 0 failed = 0 repo_directory = lib_directory + repo["name"] for patch in patches: try: os.chdir(lib_directory) except FileNotFoundError: os.mkdir(lib_directory) os.chdir(lib_directory) try: git.clone(repo["url"]) except sh.ErrorReturnCode_128 as Err: if b"already exists" in Err.stderr: pass else: raise RuntimeError(Err.stderr) os.chdir(repo_directory) patch_filepath = patch_directory + patch try: check_flags = ["--check"] if use_apply: for flag in flags: if not flag in ("--apply", "--signoff"): check_flags.append(flag) git.apply(check_flags, patch_filepath) run_apply = True except sh.ErrorReturnCode_1 as Err: run_apply = False if not b"error" in Err.stderr: skipped += 1 else: failed += 1 check_errors.append( dict(repo_name=repo["name"], patch_name=patch, error=Err.stderr)) except sh.ErrorReturnCode as Err: run_apply = False failed += 1 check_errors.append( dict(repo_name=repo["name"], patch_name=patch, error=Err.stderr)) if run_apply: result = apply_patch(repo_directory, patch_filepath, repo["name"], patch, flags, use_apply) if result: applied += 1 else: failed += 1 return [applied, skipped, failed]