def obtain_additional_swift_sources(pool_args): (args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_repository_list) = pool_args with shell.pushd(SWIFT_SOURCE_ROOT, dry_run=False, echo=False): print("Cloning '" + repo_name + "'") if skip_history: shell.run([ 'git', 'clone', '--recursive', '--depth', '1', remote, repo_name ], echo=True) else: shell.run(['git', 'clone', '--recursive', remote, repo_name], echo=True) if scheme_name: src_path = os.path.join(SWIFT_SOURCE_ROOT, repo_name, ".git") shell.run([ 'git', '--git-dir', src_path, '--work-tree', os.path.join(SWIFT_SOURCE_ROOT, repo_name), 'checkout', repo_branch ], echo=False) with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): shell.run(["git", "submodule", "update", "--recursive"], echo=False)
def obtain_additional_swift_sources(pool_args): (args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_repository_list) = pool_args with shell.pushd(SWIFT_SOURCE_ROOT, dry_run=False, echo=False): print("Cloning '" + repo_name + "'") if skip_history: shell.run(['env', 'GIT_TERMINAL_PROMPT=0', 'git', 'clone', '--recursive', '--depth', '1', '--branch', repo_branch, remote, repo_name], echo=True) else: shell.run(['env', 'GIT_TERMINAL_PROMPT=0', 'git', 'clone', '--recursive', remote, repo_name], echo=True) if scheme_name: src_path = os.path.join(SWIFT_SOURCE_ROOT, repo_name, ".git") shell.run(['env', 'GIT_TERMINAL_PROMPT=0', 'git', '--git-dir', src_path, '--work-tree', os.path.join(SWIFT_SOURCE_ROOT, repo_name), 'checkout', repo_branch], echo=False) with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): shell.run(['env', 'GIT_TERMINAL_PROMPT=0', "git", "submodule", "update", "--recursive"], echo=False)
def get_timestamp_to_match(args): if not args.match_timestamp: return None with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, "swift"), dry_run=False, echo=False): return shell.capture(["git", "log", "-1", "--format=%cI"], echo=False).strip()
def get_timestamp_to_match(args): if not args.match_timestamp: return None with shell.pushd(os.path.join(args.source_root, "swift"), dry_run=False, echo=False): return shell.capture(["git", "log", "-1", "--format=%cI"], echo=False).strip()
def dump_repo_hashes(config): """ Dumps the current state of the repo into a new config file that contains a master branch scheme with the relevant branches set to the appropriate hashes. """ branch_scheme_name = 'repro' new_config = {} config_copy_keys = ['ssh-clone-pattern', 'https-clone-pattern', 'repos'] for config_copy_key in config_copy_keys: new_config[config_copy_key] = config[config_copy_key] repos = {} branch_scheme = {'aliases': [branch_scheme_name], 'repos': repos} new_config['branch-schemes'] = {branch_scheme_name: branch_scheme} for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): path = os.path.join(SWIFT_SOURCE_ROOT, repo_name) if os.path.isdir(path): with shell.pushd(path, dry_run=False, echo=False): h = shell.capture(["git", "rev-parse", "HEAD"], echo=False).strip() repos[repo_name] = str(h) if not os.path.isdir(path): repos[repo_name] = "dir not exist" json.dump(new_config, sys.stdout, indent=4)
def obtain_all_additional_swift_sources(args, config, with_ssh, scheme_name, skip_history, skip_tags, skip_repository_list): pool_args = [] with shell.pushd(args.source_root, dry_run=False, echo=False): for repo_name, repo_info in config['repos'].items(): if repo_name in skip_repository_list: print("Skipping clone of '" + repo_name + "', requested by " "user") continue if os.path.isdir(os.path.join(repo_name, ".git")): print("Skipping clone of '" + repo_name + "', directory " "already exists") continue # If we have a url override, use that url instead of # interpolating. remote_repo_info = repo_info['remote'] if 'url' in remote_repo_info: remote = remote_repo_info['url'] else: remote_repo_id = remote_repo_info['id'] if with_ssh is True or 'https-clone-pattern' not in config: remote = config['ssh-clone-pattern'] % remote_repo_id else: remote = config['https-clone-pattern'] % remote_repo_id repo_branch = None repo_not_in_scheme = False if scheme_name: for v in config['branch-schemes'].values(): if scheme_name not in v['aliases']: continue # If repo is not specified in the scheme, skip cloning it. if repo_name not in v['repos']: repo_not_in_scheme = True continue repo_branch = v['repos'][repo_name] break else: repo_branch = scheme_name if repo_not_in_scheme: continue pool_args.append([ args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_tags, skip_repository_list ]) if not pool_args: print("Not cloning any repositories.") return return run_parallel(obtain_additional_swift_sources, pool_args, args.n_processes)
def obtain_all_additional_swift_sources(args, config, with_ssh, scheme_name, skip_history, skip_repository_list): pool_args = [] with shell.pushd(SWIFT_SOURCE_ROOT, dry_run=False, echo=False): for repo_name, repo_info in config['repos'].items(): if repo_name in skip_repository_list: print("Skipping clone of '" + repo_name + "', requested by " "user") continue if os.path.isdir(os.path.join(repo_name, ".git")): print("Skipping clone of '" + repo_name + "', directory " "already exists") continue # If we have a url override, use that url instead of # interpolating. remote_repo_info = repo_info['remote'] if 'url' in remote_repo_info: remote = remote_repo_info['url'] else: remote_repo_id = remote_repo_info['id'] if with_ssh is True or 'https-clone-pattern' not in config: remote = config['ssh-clone-pattern'] % remote_repo_id else: remote = config['https-clone-pattern'] % remote_repo_id repo_branch = None repo_not_in_scheme = False if scheme_name: for v in config['branch-schemes'].values(): if scheme_name not in v['aliases']: continue # If repo is not specified in the scheme, skip cloning it. if repo_name not in v['repos']: repo_not_in_scheme = True continue repo_branch = v['repos'][repo_name] break else: repo_branch = scheme_name if repo_not_in_scheme: continue pool_args.append([args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_repository_list]) if not pool_args: print("Not cloning any repositories.") return return shell.run_parallel(obtain_additional_swift_sources, pool_args, args.n_processes)
def dump_repo_hashes(config): max_len = reduce(lambda acc, x: max(acc, len(x)), config['repos'].keys(), 0) fmt = "{:<%r}{}" % (max_len + 5) for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): h = shell.capture(["git", "log", "--oneline", "-n", "1"], echo=False).strip() print(fmt.format(repo_name, h))
def repo_hashes(args, config): repos = {} for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): repo_path = os.path.join(args.source_root, repo_name) if os.path.exists(repo_path): with shell.pushd(repo_path, dry_run=False, echo=False): h = shell.capture(["git", "rev-parse", "HEAD"], echo=False).strip() else: h = 'skip' repos[repo_name] = str(h) return repos
def obtain_additional_swift_sources(pool_args): (args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_tags, skip_repository_list) = pool_args env = dict(os.environ) env.update({'GIT_TERMINAL_PROMPT': 0}) with shell.pushd(args.source_root, dry_run=False, echo=False): print("Cloning '" + repo_name + "'") if skip_history: shell.run([ 'git', 'clone', '--recursive', '--depth', '1', '--branch', repo_branch, remote, repo_name ] + (['--no-tags'] if skip_tags else []), env=env, echo=True) else: shell.run(['git', 'clone', '--recursive', remote, repo_name] + (['--no-tags'] if skip_tags else []), env=env, echo=True) if scheme_name: src_path = os.path.join(args.source_root, repo_name, ".git") shell.run([ 'git', '--git-dir', src_path, '--work-tree', os.path.join(args.source_root, repo_name), 'checkout', repo_branch ], env=env, echo=False) with shell.pushd(os.path.join(args.source_root, repo_name), dry_run=False, echo=False): shell.run(["git", "submodule", "update", "--recursive"], env=env, echo=False)
def dump_hashes_config(args, config): branch_scheme_name = args.dump_hashes_config new_config = {} config_copy_keys = ['ssh-clone-pattern', 'https-clone-pattern', 'repos'] for config_copy_key in config_copy_keys: new_config[config_copy_key] = config[config_copy_key] repos = {} branch_scheme = {'aliases': [branch_scheme_name], 'repos': repos} new_config['branch-schemes'] = {args.dump_hashes_config: branch_scheme} for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): h = shell.capture(["git", "rev-parse", "HEAD"], echo=False).strip() repos[repo_name] = str(h) print(json.dumps(new_config, indent=4))
def dump_repo_hashes(config): """ Dumps the current state of the repo into a new config file that contains a master branch scheme with the relevant branches set to the appropriate hashes. """ branch_scheme_name = 'repro' new_config = {} config_copy_keys = ['ssh-clone-pattern', 'https-clone-pattern', 'repos'] for config_copy_key in config_copy_keys: new_config[config_copy_key] = config[config_copy_key] repos = {} branch_scheme = {'aliases': [branch_scheme_name], 'repos': repos} new_config['branch-schemes'] = {branch_scheme_name: branch_scheme} for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): h = shell.capture(["git", "rev-parse", "HEAD"], echo=False).strip() repos[repo_name] = str(h) json.dump(new_config, sys.stdout, indent=4)
def update_single_repository(args): config, repo_name, scheme_name, scheme_map, tag, timestamp, \ reset_to_remote, should_clean, cross_repos_pr = args repo_path = os.path.join(SWIFT_SOURCE_ROOT, repo_name) if not os.path.isdir(repo_path): return try: print("Updating '" + repo_path + "'") with shell.pushd(repo_path, dry_run=False, echo=False): cross_repo = False checkout_target = None if tag: checkout_target = confirm_tag_in_repo(tag, repo_name) elif scheme_name: checkout_target, cross_repo = get_branch_for_repo( config, repo_name, scheme_name, scheme_map, cross_repos_pr) if timestamp: checkout_target = find_rev_by_timestamp(timestamp, repo_name, checkout_target) elif timestamp: checkout_target = find_rev_by_timestamp(timestamp, repo_name, "HEAD") # The clean option restores a repository to pristine condition. if should_clean: shell.run(['git', 'clean', '-fdx'], echo=True) shell.run(['git', 'submodule', 'foreach', '--recursive', 'git', 'clean', '-fdx'], echo=True) shell.run(['git', 'submodule', 'foreach', '--recursive', 'git', 'reset', '--hard', 'HEAD'], echo=True) shell.run(['git', 'reset', '--hard', 'HEAD'], echo=True) # It is possible to reset --hard and still be mid-rebase. try: shell.run(['git', 'rebase', '--abort'], echo=True) except Exception: pass if checkout_target: shell.run(['git', 'status', '--porcelain', '-uno'], echo=False) shell.run(['git', 'checkout', checkout_target], echo=True) # It's important that we checkout, fetch, and rebase, in order. # .git/FETCH_HEAD updates the not-for-merge attributes based on # which branch was checked out during the fetch. shell.run(["git", "fetch", "--recurse-submodules=yes"], echo=True) # If we were asked to reset to the specified branch, do the hard # reset and return. if checkout_target and reset_to_remote and not cross_repo: shell.run(['git', 'reset', '--hard', "origin/%s" % checkout_target], echo=True) return # Query whether we have a "detached HEAD", which will mean that # we previously checked out a tag rather than a branch. detached_head = False try: # This git command returns error code 1 if HEAD is detached. # Otherwise there was some other error, and we need to handle # it like other command errors. shell.run(["git", "symbolic-ref", "-q", "HEAD"], echo=False) except Exception as e: if e.ret == 1: detached_head = True else: raise # Pass this error up the chain. # If we have a detached HEAD in this repository, we don't want # to rebase. With a detached HEAD, the fetch will have marked # all the branches in FETCH_HEAD as not-for-merge, and the # "git rebase FETCH_HEAD" will try to rebase the tree from the # default branch's current head, making a mess. # Prior to Git 2.6, this is the way to do a "git pull # --rebase" that respects rebase.autostash. See # http://stackoverflow.com/a/30209750/125349 if not cross_repo and not detached_head: shell.run(["git", "rebase", "FETCH_HEAD"], echo=True) elif detached_head: print(repo_path, "\nDetached HEAD; probably checked out a tag. No need " "to rebase.\n") shell.run(["git", "submodule", "update", "--recursive"], echo=True) except Exception: (type, value, tb) = sys.exc_info() print('Error on repo "%s": %s' % (repo_path, traceback.format_exc())) return value
def update_single_repository(pool_args): source_root, config, repo_name, scheme_name, scheme_map, tag, timestamp, \ reset_to_remote, should_clean, cross_repos_pr = pool_args repo_path = os.path.join(source_root, repo_name) if not os.path.isdir(repo_path) or os.path.islink(repo_path): return try: print("Updating '" + repo_path + "'") with shell.pushd(repo_path, dry_run=False, echo=False): cross_repo = False checkout_target = None if tag: checkout_target = confirm_tag_in_repo(tag, repo_name) elif scheme_name: checkout_target, cross_repo = get_branch_for_repo( config, repo_name, scheme_name, scheme_map, cross_repos_pr) if timestamp: checkout_target = find_rev_by_timestamp(timestamp, repo_name, checkout_target) # The clean option restores a repository to pristine condition. if should_clean: shell.run(['git', 'clean', '-fdx'], echo=True) shell.run(['git', 'submodule', 'foreach', '--recursive', 'git', 'clean', '-fdx'], echo=True) shell.run(['git', 'submodule', 'foreach', '--recursive', 'git', 'reset', '--hard', 'HEAD'], echo=True) shell.run(['git', 'reset', '--hard', 'HEAD'], echo=True) # It is possible to reset --hard and still be mid-rebase. try: shell.run(['git', 'rebase', '--abort'], echo=True) except Exception: pass if checkout_target: shell.run(['git', 'status', '--porcelain', '-uno'], echo=False) try: shell.run(['git', 'checkout', checkout_target], echo=True) except Exception as originalException: try: result = shell.run(['git', 'rev-parse', checkout_target]) revision = result[0].strip() shell.run(['git', 'checkout', revision], echo=True) except Exception: raise originalException # It's important that we checkout, fetch, and rebase, in order. # .git/FETCH_HEAD updates the not-for-merge attributes based on # which branch was checked out during the fetch. shell.run(["git", "fetch", "--recurse-submodules=yes", "--tags"], echo=True) # If we were asked to reset to the specified branch, do the hard # reset and return. if checkout_target and reset_to_remote and not cross_repo: full_target = full_target_name('origin', checkout_target) shell.run(['git', 'reset', '--hard', full_target], echo=True) return # Query whether we have a "detached HEAD", which will mean that # we previously checked out a tag rather than a branch. detached_head = False try: # This git command returns error code 1 if HEAD is detached. # Otherwise there was some other error, and we need to handle # it like other command errors. shell.run(["git", "symbolic-ref", "-q", "HEAD"], echo=False) except Exception as e: if e.ret == 1: detached_head = True else: raise # Pass this error up the chain. # If we have a detached HEAD in this repository, we don't want # to rebase. With a detached HEAD, the fetch will have marked # all the branches in FETCH_HEAD as not-for-merge, and the # "git rebase FETCH_HEAD" will try to rebase the tree from the # default branch's current head, making a mess. # Prior to Git 2.6, this is the way to do a "git pull # --rebase" that respects rebase.autostash. See # http://stackoverflow.com/a/30209750/125349 if not cross_repo and not detached_head: shell.run(["git", "rebase", "FETCH_HEAD"], echo=True) elif detached_head: print(repo_path, "\nDetached HEAD; probably checked out a tag. No need " "to rebase.\n") shell.run(["git", "submodule", "update", "--recursive"], echo=True) except Exception: (type, value, tb) = sys.exc_info() print('Error on repo "%s": %s' % (repo_path, traceback.format_exc())) return value