def get_timestamp_to_match(args): if not args.match_timestamp: return None with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, "swift"), dry_run=False, echo=False): return shell.capture(["git", "log", "-1", "--format=%cI"], echo=False).strip()
def test_dry_run(self): shell.dry_run = True basedir = os.getcwd() foobar_dir = os.path.join(self.tmpdir, 'foo', 'bar') shell.makedirs(foobar_dir) self.assertFalse(os.path.exists(os.path.join(self.tmpdir, 'foo'))) self.assertFalse(os.path.exists(foobar_dir)) with shell.pushd(foobar_dir): self.assertEqual(os.getcwd(), basedir) shell.call(['touch', 'testfile']) self.assertFalse( os.path.exists(os.path.join(foobar_dir, 'testfile'))) self.assertEqual(os.getcwd(), basedir) shell.rmtree(self.tmpdir) self.assertTrue(os.path.exists(self.tmpdir)) self.assertEqual( self.stdout.getvalue(), '''\ + mkdir -p {foobar_dir} + pushd {foobar_dir} + touch testfile + popd + rm -rf {tmpdir} '''.format(foobar_dir=foobar_dir, tmpdir=self.tmpdir)) self.assertEqual(self.stderr.getvalue(), "") self.dry_run = False
def test_dry_run(self): shell.dry_run = True basedir = os.getcwd() foobar_dir = os.path.join(self.tmpdir, "foo", "bar") shell.makedirs(foobar_dir) self.assertFalse(os.path.exists(os.path.join(self.tmpdir, "foo"))) self.assertFalse(os.path.exists(foobar_dir)) with shell.pushd(foobar_dir): self.assertEqual(os.getcwd(), basedir) shell.call(["touch", "testfile"]) self.assertFalse(os.path.exists(os.path.join(foobar_dir, "testfile"))) self.assertEqual(os.getcwd(), basedir) shell.rmtree(self.tmpdir) self.assertTrue(os.path.exists(self.tmpdir)) self.assertEqual( self.stdout.getvalue(), """\ + mkdir -p {foobar_dir} + pushd {foobar_dir} + touch testfile + popd + rm -rf {tmpdir} """.format( foobar_dir=foobar_dir, tmpdir=self.tmpdir ), ) self.assertEqual(self.stderr.getvalue(), "") self.dry_run = False
def test_dry_run(self): shell.dry_run = True basedir = os.getcwd() foobar_dir = os.path.join(self.tmpdir, 'foo', 'bar') shell.makedirs(foobar_dir) self.assertFalse(os.path.exists(os.path.join(self.tmpdir, 'foo'))) self.assertFalse(os.path.exists(foobar_dir)) with shell.pushd(foobar_dir): self.assertEqual(os.getcwd(), basedir) shell.call(['touch', 'testfile']) self.assertFalse(os.path.exists( os.path.join(foobar_dir, 'testfile'))) self.assertEqual(os.getcwd(), basedir) shell.rmtree(self.tmpdir) self.assertTrue(os.path.exists(self.tmpdir)) self.assertEqual(self.stdout.getvalue(), '''\ + mkdir -p {foobar_dir} + pushd {foobar_dir} + touch testfile + popd + rm -rf {tmpdir} '''.format(foobar_dir=foobar_dir, tmpdir=self.tmpdir)) self.assertEqual(self.stderr.getvalue(), "") self.dry_run = False
def test_pushd(self): shell.dry_run = False basedir = os.getcwd() with shell.pushd(self.tmpdir): self.assertEqual(os.getcwd(), self.tmpdir) self.assertEqual(os.getcwd(), basedir) # pushd inside pushd with shell.pushd(self.tmpdir): self.assertEqual(os.getcwd(), self.tmpdir) shell.makedirs("foo") with shell.pushd("foo"): self.assertEqual(os.getcwd(), os.path.join(self.tmpdir, "foo")) self.assertEqual(os.getcwd(), self.tmpdir) self.assertEqual(os.getcwd(), basedir) # cd inside pushd with shell.pushd(self.tmpdir): os.chdir("foo") self.assertEqual(os.getcwd(), os.path.join(self.tmpdir, "foo")) os.chdir("..") self.assertEqual(os.getcwd(), self.tmpdir) shell.rmtree("foo") self.assertEqual(os.getcwd(), basedir) self.assertEqual(self.stdout.getvalue(), "") self.assertEqual( self.stderr.getvalue(), """\ + pushd {tmpdir} + popd + pushd {tmpdir} + mkdir -p foo + pushd foo + popd + popd + pushd {tmpdir} + rm -rf foo + popd """.format( tmpdir=self.tmpdir ), )
def dump_repo_hashes(config): max_len = reduce(lambda acc, x: max(acc, len(x)), config['repos'].keys(), 0) fmt = "{:<%r}{}" % (max_len + 5) for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): h = shell.capture(["git", "log", "--oneline", "-n", "1"], echo=False).strip() print(fmt.format(repo_name, h))
def test_pushd(self): shell.dry_run = False basedir = os.getcwd() with shell.pushd(self.tmpdir): self.assertEqual(os.getcwd(), self.tmpdir) self.assertEqual(os.getcwd(), basedir) # pushd inside pushd with shell.pushd(self.tmpdir): self.assertEqual(os.getcwd(), self.tmpdir) shell.makedirs('foo') with shell.pushd('foo'): self.assertEqual(os.getcwd(), os.path.join(self.tmpdir, 'foo')) self.assertEqual(os.getcwd(), self.tmpdir) self.assertEqual(os.getcwd(), basedir) # cd inside pushd with shell.pushd(self.tmpdir): os.chdir('foo') self.assertEqual(os.getcwd(), os.path.join(self.tmpdir, 'foo')) os.chdir('..') self.assertEqual(os.getcwd(), self.tmpdir) shell.rmtree('foo') self.assertEqual(os.getcwd(), basedir) self.assertEqual(self.stdout.getvalue(), "") self.assertEqual( self.stderr.getvalue(), '''\ + pushd {tmpdir} + popd + pushd {tmpdir} + mkdir -p foo + pushd foo + popd + popd + pushd {tmpdir} + rm -rf foo + popd '''.format(tmpdir=self.tmpdir))
def obtain_additional_swift_sources(pool_args): (args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_repository_list) = pool_args with shell.pushd(SWIFT_SOURCE_ROOT, dry_run=False, echo=False): print("Cloning '" + repo_name + "'") if skip_history: shell.run(['git', 'clone', '--recursive', '--depth', '1', remote, repo_name], echo=True) else: shell.run(['git', 'clone', '--recursive', remote, repo_name], echo=True) if scheme_name: src_path = os.path.join(SWIFT_SOURCE_ROOT, repo_name, ".git") shell.run(['git', '--git-dir', src_path, '--work-tree', os.path.join(SWIFT_SOURCE_ROOT, repo_name), 'checkout', repo_branch], echo=False) with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): shell.run(["git", "submodule", "update", "--recursive"], echo=False)
def obtain_all_additional_swift_sources(args, config, with_ssh, scheme_name, skip_history, skip_repository_list): pool_args = [] with shell.pushd(SWIFT_SOURCE_ROOT, dry_run=False, echo=False): for repo_name, repo_info in config['repos'].items(): if repo_name in skip_repository_list: print("Skipping clone of '" + repo_name + "', requested by " "user") continue if os.path.isdir(os.path.join(repo_name, ".git")): print("Skipping clone of '" + repo_name + "', directory " "already exists") continue # If we have a url override, use that url instead of # interpolating. remote_repo_info = repo_info['remote'] if 'url' in remote_repo_info: remote = remote_repo_info['url'] else: remote_repo_id = remote_repo_info['id'] if with_ssh is True or 'https-clone-pattern' not in config: remote = config['ssh-clone-pattern'] % remote_repo_id else: remote = config['https-clone-pattern'] % remote_repo_id repo_branch = None if scheme_name: for v in config['branch-schemes'].values(): if scheme_name not in v['aliases']: continue repo_branch = v['repos'][repo_name] break else: repo_branch = scheme_name pool_args.append([args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name, skip_history, skip_repository_list]) if not pool_args: print("Not cloning any repositories.") return return shell.run_parallel(obtain_additional_swift_sources, pool_args, args.n_processes)
def dump_hashes_config(args, config): branch_scheme_name = args.dump_hashes_config new_config = {} config_copy_keys = ['ssh-clone-pattern', 'https-clone-pattern', 'repos'] for config_copy_key in config_copy_keys: new_config[config_copy_key] = config[config_copy_key] repos = {} branch_scheme = {'aliases': [branch_scheme_name], 'repos': repos} new_config['branch-schemes'] = {args.dump_hashes_config: branch_scheme} for repo_name, repo_info in sorted(config['repos'].items(), key=lambda x: x[0]): with shell.pushd(os.path.join(SWIFT_SOURCE_ROOT, repo_name), dry_run=False, echo=False): h = shell.capture(["git", "rev-parse", "HEAD"], echo=False).strip() repos[repo_name] = str(h) print(json.dumps(new_config, indent=4))
def update_single_repository(args): config, repo_name, scheme_name, scheme_map, tag, timestamp, \ reset_to_remote, should_clean, cross_repos_pr = args repo_path = os.path.join(SWIFT_SOURCE_ROOT, repo_name) if not os.path.isdir(repo_path): return try: print("Updating '" + repo_path + "'") with shell.pushd(repo_path, dry_run=False, echo=False): cross_repo = False checkout_target = None if tag: checkout_target = confirm_tag_in_repo(tag, repo_name) elif scheme_name: checkout_target, cross_repo = get_branch_for_repo( config, repo_name, scheme_name, scheme_map, cross_repos_pr) if timestamp: checkout_target = find_rev_by_timestamp(timestamp, repo_name, checkout_target) elif timestamp: checkout_target = find_rev_by_timestamp(timestamp, repo_name, "HEAD") # The clean option restores a repository to pristine condition. if should_clean: shell.run(['git', 'clean', '-fdx'], echo=True) shell.run(['git', 'submodule', 'foreach', '--recursive', 'git', 'clean', '-fdx'], echo=True) shell.run(['git', 'submodule', 'foreach', '--recursive', 'git', 'reset', '--hard', 'HEAD'], echo=True) shell.run(['git', 'reset', '--hard', 'HEAD'], echo=True) # It is possible to reset --hard and still be mid-rebase. try: shell.run(['git', 'rebase', '--abort'], echo=True) except Exception: pass if checkout_target: shell.run(['git', 'status', '--porcelain', '-uno'], echo=False) shell.run(['git', 'checkout', checkout_target], echo=True) # It's important that we checkout, fetch, and rebase, in order. # .git/FETCH_HEAD updates the not-for-merge attributes based on # which branch was checked out during the fetch. shell.run(["git", "fetch", "--recurse-submodules=yes"], echo=True) # If we were asked to reset to the specified branch, do the hard # reset and return. if checkout_target and reset_to_remote and not cross_repo: shell.run(['git', 'reset', '--hard', "origin/%s" % checkout_target], echo=True) return # Query whether we have a "detached HEAD", which will mean that # we previously checked out a tag rather than a branch. detached_head = False try: # This git command returns error code 1 if HEAD is detached. # Otherwise there was some other error, and we need to handle # it like other command errors. shell.run(["git", "symbolic-ref", "-q", "HEAD"], echo=False) except Exception as e: if e.ret == 1: detached_head = True else: raise # Pass this error up the chain. # If we have a detached HEAD in this repository, we don't want # to rebase. With a detached HEAD, the fetch will have marked # all the branches in FETCH_HEAD as not-for-merge, and the # "git rebase FETCH_HEAD" will try to rebase the tree from the # default branch's current head, making a mess. # Prior to Git 2.6, this is the way to do a "git pull # --rebase" that respects rebase.autostash. See # http://stackoverflow.com/a/30209750/125349 if not cross_repo and not detached_head: shell.run(["git", "rebase", "FETCH_HEAD"], echo=True) elif detached_head: print(repo_path, "\nDetached HEAD; probably checked out a tag. No need " "to rebase.\n") shell.run(["git", "submodule", "update", "--recursive"], echo=True) except Exception: (type, value, tb) = sys.exc_info() print('Error on repo "%s": %s' % (repo_path, traceback.format_exc())) return value