def query_local_repository_commits_to_existing_tag_from_id( self, git_dir, commit_id, commit_tags): """Returns the list of commit messages to the local repository.""" # pylint: disable=invalid-name id_to_newest_tag = {} for commit_tag in sorted(commit_tags): id_to_newest_tag[commit_tag.commit_id] = commit_tag.tag tag = id_to_newest_tag.get(commit_id) if tag is not None: return tag, [] result = check_subprocess( 'git -C "{dir}" log --pretty=oneline {id}'.format(dir=git_dir, id=commit_id)) lines = result.split('\n') count = 0 for line in lines: line_id = line.split(' ', 1)[0] tag = id_to_newest_tag.get(line_id) if tag: break count += 1 if tag is None: raise ValueError( 'There is no baseline tag for commit "{id}" in {dir}.'.format( id=commit_id, dir=git_dir)) result = check_subprocess( 'git -C "{dir}" log -n {count} --pretty=medium {id}'.format( dir=git_dir, count=count, id=commit_id)) messages = CommitMessage.make_list_from_result(result) return tag, messages
def _do_command(self): """Implements CommandProcessor interface.""" self._ensure_templates_directory() redis_service = 'redis-server' start_redis = run_subprocess( 'sudo service {redis} start'.format(redis=redis_service))[0] != 0 logging.debug('redis-server %s running.', 'IS NOT' if start_redis else 'IS') try: if start_redis: check_subprocess( 'sudo service {redis} start'.format(redis=redis_service)) super(GenerateApiDocsCommand, self)._do_command() finally: if start_redis: # pylint: disable=broad-except try: check_subprocess('sudo service {redis} stop'.format( redis=redis_service)) except Exception as ex: maybe_log_exception( self.name, ex, 'Ignoring exception while stopping temporary {name}.'. format(name=redis_service))
def publish_halyard_version_commits(self): """Publish the halyard build to the bucket. This also writes the built version to <scratch>/halyard/last_version_commit.yml so callers can know what version was written. """ options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = '{base}/{filename}'.format( base=options.halyard_bucket_base_url, filename=self.HALYARD_VERSIONS_BASENAME) named_scratch_dir = os.path.join(options.scratch_dir, 'halyard') summary_path = os.path.join(named_scratch_dir, 'halyard-summary.yml') with open(summary_path, 'r') as stream: summary_info = yaml.load(stream) # This is only because we need a file to gsutil cp # We already need gsutil so its easier to just use it again here. tmp_path = os.path.join(named_scratch_dir, self.HALYARD_VERSIONS_BASENAME) logging.debug('Fetching existing halyard build versions') try: contents = check_subprocess( 'gsutil cat {url}'.format(url=versions_url)) contents += '\n' except subprocess.CalledProcessError as error: output = error.output if output.find('No URLs matched') < 0: raise contents = '' logging.warning('%s did not exist. Creating a new one.', versions_url) new_entry = '{version}: {commit}\n'.format( version=self.__build_version, commit=summary_info['commit_id']) logging.info('Updating %s with %s', versions_url, new_entry) if contents and contents[-1] != '\n': contents += '\n' contents = contents + new_entry with open(tmp_path, 'w') as stream: stream.write(contents) check_subprocess('gsutil cp {path} {url}'.format(path=tmp_path, url=versions_url)) last_version_commit_path = os.path.join(named_scratch_dir, 'last_version_commit.yml') with open(last_version_commit_path, 'w') as stream: stream.write(new_entry)
def push_branch_to_origin(self, git_dir, branch): """Push the given local repository back up to the origin. This has no effect if the repository is not in the given branch. """ in_branch = self.query_local_repository_branch(git_dir) if in_branch != branch: logging.warning( 'Skipping push %s "%s" to origin because branch is "%s".', git_dir, branch, in_branch) return check_subprocess('git -C "{dir}" push origin {branch} --tags'.format( dir=git_dir, branch=branch))
def test_clone_origin(self): git = self.git # Make the origin we're going to test the clone against # This is intentionally different from upstream so that # we can confirm that upstream is also setup properly. origin_user = '******' origin_basedir = os.path.join(self.base_temp_dir, origin_user) os.makedirs(origin_basedir) check_subprocess('git -C "{origin_dir}" clone "{upstream}"'.format( origin_dir=origin_basedir, upstream=self.git_dir)) test_parent = os.path.join(self.base_temp_dir, 'test_clone_origin') os.makedirs(test_parent) test_dir = os.path.join(test_parent, TEST_REPO_NAME) origin_dir = os.path.join(origin_basedir, TEST_REPO_NAME) self.git.clone_repository_to_path(origin_dir, test_dir, upstream_url=self.git_dir) want_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN) have_tags = git.query_tag_commits(test_dir, TAG_VERSION_PATTERN) self.assertEquals(want_tags, have_tags) got = check_subprocess('git -C "{dir}" remote -v'.format(dir=test_dir)) remote = git.ORIGIN_REMOTE_NAME decoy = os.path.join(git.options.scratch_dir, 'nebula_decoys', os.path.basename(self.git_dir)) # Upstream repo is configured for pulls, but not for pushes. self.assertEquals( '\n'.join([ '{remote}\t{origin} (fetch)'.format(remote=remote, origin=origin_dir), '{remote}\t{origin} (push)'.format(remote=remote, origin=origin_dir), 'origin\t{decoy} (fetch)'.format(decoy=decoy), 'origin\t{decoy} (push)'.format(decoy=decoy), 'upstream\t{upstream} (fetch)'.format(upstream=self.git_dir), 'upstream\tdisabled (push)' ]), got) reference = git.determine_remote_git_repository(test_dir) self.assertEquals( RemoteGitRepository.make_from_url( origin_dir, upstream_ref=RemoteGitRepository.make_from_url(self.git_dir)), reference)
def refresh_local_repository(self, git_dir, remote_name, branch): """Refreshes the given local repository from the remote one. Args: git_dir: [string] Which local repository to update. remote_name: [remote_name] Which remote repository to pull from. branch: [string] Which branch to pull. """ repository_name = os.path.basename(git_dir) # pylint: disable=unused-variable retcode, stdout = run_subprocess( 'git -C "{dir}" remote -v | egrep "^{which}.*\\(fetch\\)$"'.format( dir=git_dir, which=remote_name)) if not stdout: logging.warning( 'Skipping pull {remote_name} {branch} in {repository} because' ' it does not have a remote "{remote_name}"'.format( remote_name=remote_name, branch=branch, repository=repository_name)) return local_branch = self.query_local_repository_branch(git_dir) if local_branch != branch: logging.warning( 'Skipping pull {remote_name} {branch} in {repository} because' ' its in branch={local_branch}'.format( remote_name=remote_name, branch=branch, repository=repository_name, local_branch=local_branch)) return try: logging.debug('Refreshing %s from %s branch %s', git_dir, remote_name, branch) command = 'git -C "{dir}" pull {remote_name} {branch} --tags'.format( dir=git_dir, remote_name=remote_name, branch=branch) result = check_subprocess(command) logging.info('%s:\n%s', repository_name, result) except RuntimeError: result = check_subprocess( 'git -C "{dir}" branch -r'.format(dir=git_dir)) if result.find('{which}/{branch}\n'.format(which=remote_name, branch=branch)) >= 0: raise logging.warning( 'WARNING {name} branch={branch} is not known to {which}.\n'. format(name=repository_name, branch=branch, which=remote_name))
def bom_from_version(self, version): """Load a BOM from halyard.""" logging.debug('Loading bom version %s', version) bom_yaml_string = check_subprocess( 'hal version bom {0} --color false --quiet'.format(version), echo=False) return yaml.load(bom_yaml_string)
def test_clone_upstream(self): git = self.git test_parent = os.path.join(self.base_temp_dir, 'test_clone_upstream') os.makedirs(test_parent) test_dir = os.path.join(test_parent, TEST_REPO_NAME) git.clone_repository_to_path(self.git_dir, test_dir) self.assertTrue(os.path.exists(os.path.join(test_dir, 'base_file'))) want_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN) have_tags = git.query_tag_commits(test_dir, TAG_VERSION_PATTERN) self.assertEquals(want_tags, have_tags) got = check_subprocess('git -C "{dir}" remote -v'.format(dir=test_dir)) # Disable pushes to the origni # No upstream since origin is upstream remote = git.ORIGIN_REMOTE_NAME decoy = os.path.join(git.options.scratch_dir, 'nebula_decoys', os.path.basename(self.git_dir)) self.assertEquals( '\n'.join([ '{remote}\t{origin} (fetch)'.format(remote=remote, origin=self.git_dir), '{remote}\tdisabled (push)'.format(remote=remote), 'origin\t{decoy} (fetch)'.format(decoy=decoy), 'origin\t{decoy} (push)'.format(decoy=decoy) ]), got) reference = git.determine_remote_git_repository(test_dir) self.assertEquals(RemoteGitRepository.make_from_url(self.git_dir), reference)
def setUpClass(cls): cls.git = GitRunner(make_default_options()) cls.base_temp_dir = tempfile.mkdtemp(prefix='git_test') cls.git_dir = os.path.join(cls.base_temp_dir, 'commit_message_test') os.makedirs(cls.git_dir) git_dir = cls.git_dir gitify = lambda args: 'git -C "{dir}" {args}'.format(dir=git_dir, args=args) check_subprocess_sequence([ gitify('init'), 'touch "{dir}/base_file"'.format(dir=git_dir), gitify('add "{dir}/base_file"'.format(dir=git_dir)), gitify('commit -a -m "feat(test): added file"'), gitify( 'tag {base_version} HEAD'.format(base_version=VERSION_BASE)), gitify('checkout -b {patch_branch}'.format( patch_branch=cls.PATCH_BRANCH)), 'touch "{dir}/patch_file"'.format(dir=git_dir), gitify('add "{dir}/patch_file"'.format(dir=git_dir)), gitify('commit -a -m "fix(testA): added patch_file"'), gitify('checkout -b {minor_branch}'.format( minor_branch=cls.MINOR_BRANCH)), 'touch "{dir}/minor_file"'.format(dir=git_dir), gitify('add "{dir}/minor_file"'.format(dir=git_dir)), gitify('commit -a -m "chore(testB): added minor_file"'), gitify('checkout -b {major_branch}'.format( major_branch=cls.MAJOR_BRANCH)), 'touch "{dir}/major_file"'.format(dir=git_dir), gitify('add "{dir}/major_file"'.format(dir=git_dir)), gitify('commit -a -m' ' "feat(testC): added major_file\n' '\nInterestingly enough, this is a BREAKING CHANGE.' '"'), gitify('checkout -b {merged_branch}'.format( merged_branch=cls.MERGED_BRANCH)), gitify('reset --hard HEAD~3'), gitify('merge --squash HEAD@{1}') ]) env = dict(os.environ) if os.path.exists('/bin/true'): env['EDITOR'] = '/bin/true' elif os.path.exists('/usr/bin/true'): env['EDITOR'] = '/usr/bin/true' else: raise NotImplementedError('platform not supported for this test') check_subprocess('git -C "{dir}" commit'.format(dir=git_dir), env=env)
def generate_swagger_docs(self, repository, docs_url): """Generate the API from the swagger endpoint.""" options = self.options named_scratch_dir = os.path.join(options.scratch_dir, repository.name) docs_dir = os.path.abspath(os.path.join(named_scratch_dir, 'apidocs')) docs_path = os.path.join(docs_dir, 'docs.json') ensure_dir_exists(docs_dir) logging.info('Generating swagger docs for %s', repository.name) check_subprocess('curl -s {url} -o {docs_path}'.format( url=docs_url, docs_path=docs_path)) check_subprocess( 'java -jar {jar_path} generate -i {docs_path} -l html2' ' -o {scratch} -t {templates_directory}'.format( jar_path=options.swagger_codegen_cli_jar_path, docs_path=docs_path, scratch=docs_dir, templates_directory=self.__templates_directory)) logging.info('Writing docs to directory %s', docs_dir)
def clone_repository_to_path(self, origin_url, git_dir, upstream_url=None, commit=None, branch=None, default_branch=None): """Clone the remote repository at the given commit or branch. If requesting a branch and it is not found, then settle for the default branch, if one was explicitly specified. """ # pylint: disable=too-many-arguments if (commit != None) and (branch != None): raise ValueError( 'At most one of commit or branch can be specified.') logging.debug('Begin cloning %s', origin_url) parent_dir = os.path.dirname(git_dir) ensure_dir_exists(parent_dir) git_command = 'git -C "{parent_dir}" clone {origin_url}'.format( parent_dir=parent_dir, origin_url=origin_url) if branch: branches = [branch] if default_branch: branches.append(default_branch) self.__check_clone_branch(origin_url, git_command, branches) else: check_subprocess(git_command) logging.info('Cloned %s into %s', origin_url, parent_dir) if commit: check_subprocess('git -C "{dir}" checkout {commit} -q'.format( dir=git_dir, commit=commit), echo=True) if upstream_url and not self.same_repo(upstream_url, origin_url): logging.debug('Adding upstream %s with disabled push', upstream_url) check_subprocess( 'git -C "{dir}" remote add upstream {upstream_url}'.format( dir=git_dir, upstream_url=upstream_url)) which = ('upstream' if upstream_url and not self.same_repo(upstream_url, origin_url) else 'origin') check_subprocess( 'git -C "{dir}" remote set-url --push {which} disabled'.format( dir=git_dir, which=which)) logging.debug('Finished cloning %s', origin_url)
def __collect_halconfig_files(self, repository): """Gets the component config files and writes them into the scratch_path.""" name = repository.name if (name not in SPINNAKER_RUNNABLE_REPOSITORIES.keys() and name not in ['spinnaker-monitoring']): logging.debug('%s does not use config files -- skipping', name) return git_dir = self.source_code_manager.get_local_repository_path(name) if name == 'spinnaker-monitoring': config_root = os.path.join(git_dir, 'spinnaker-monitoring-daemon') else: config_root = git_dir options = self.options target_dir = os.path.join(options.scratch_dir, name, 'halconfig') ensure_dir_exists(target_dir) config_path = os.path.join(config_root, 'halconfig') logging.info('Copying configs from %s...', config_path) for profile in os.listdir(config_path): profile_path = os.path.join(config_path, profile) if os.path.isfile(profile_path): shutil.copyfile(profile_path, os.path.join(target_dir, profile)) logging.debug('Copied profile to %s', profile_path) elif not os.path.isdir(profile_path): logging.warning('%s is neither file nor directory -- ignoring', profile_path) continue else: tar_path = os.path.join( target_dir, '{profile}.tar.gz'.format(profile=profile)) file_list = ' '.join(os.listdir(profile_path)) # NOTE: For historic reasons this is not actually compressed # even though the tar_path says ".tar.gz" check_subprocess( 'tar cf {path} -C {profile} {file_list}'.format( path=tar_path, profile=profile_path, file_list=file_list)) logging.debug('Copied profile to %s', tar_path)
def test_determine_tag_at_patch(self): git = self.git test_method = git.query_local_repository_commits_to_existing_tag_from_id tests = [(BRANCH_A, VERSION_A), (BRANCH_B, VERSION_B)] for branch, version in tests: new_version = str(version) new_version = new_version[:-1] + '1' self.run_git('checkout ' + branch) self.run_git('checkout -b {branch}-patch'.format(branch=branch)) pending_messages = [] for change in ['first', 'second']: new_path = os.path.join(self.git_dir, change + '_file') check_subprocess('touch "{path}"'.format(path=new_path)) self.run_git('add "{path}"'.format(path=new_path)) message = 'fix(test): Made {change} change for testing.'.format( change=change) self.run_git( 'commit -a -m "{message}"'.format(message=message)) pending_messages.append(' ' * 4 + message) commit_id = git.query_local_repository_commit_id(self.git_dir) # The pending change shows up for the old tag (and are most recent first) all_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN) tag, messages = test_method(self.git_dir, commit_id, all_tags) self.assertEquals(version, tag) self.assertEquals(len(pending_messages), len(messages)) self.assertEquals(sorted(pending_messages, reverse=True), [m.message for m in messages]) # When we re-tag at this change, # the new tag shows up without pending change. self.run_git('tag {version} HEAD'.format(version=new_version)) all_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN) tag, messages = test_method(self.git_dir, commit_id, all_tags) self.assertEquals(new_version, tag) self.assertEquals([], messages)
def query_tag_commits(self, git_dir, tag_pattern): """Collect the TagCommit for each tag matching the pattern. Returns: list of CommitTag sorted most recent first. """ tag_ref_result = check_subprocess( 'git -C "{dir}" show-ref --tags'.format(dir=git_dir)) ref_lines = tag_ref_result.split('\n') commit_tags = [CommitTag.make(line) for line in ref_lines] matcher = re.compile(tag_pattern) filtered = [ct for ct in commit_tags if matcher.match(ct.tag)] return sorted(filtered, reverse=True)
def determine_remote_git_repository(self, git_dir): """Infter RemoteGitRepository from a local git repository.""" git_text = check_subprocess( 'git -C "{dir}" remote -v'.format(dir=git_dir)) remote_urls = { match.group(1): match.group(2) for match in re.finditer(r'(\w+)\s+(\S+)\s+\(fetch\)', git_text) } origin_url = remote_urls.get('origin') if not origin_url: raise ValueError('{0} has no remote "origin"'.format(git_dir)) upstream_url = remote_urls.get('upstream') upstream = (RemoteGitRepository.make_from_url(upstream_url) if upstream_url else None) return RemoteGitRepository.make_from_url(origin_url, upstream_ref=upstream)
def reinit_local_repository_with_tag(self, git_dir, git_tag, initial_commit_message): """Recreate the given local repository using the current content. The Netflix Nebula gradle plugin that spinnaker uses to build the sources is hardcoded with some incompatible constraints which we are unable to change (the owner is receptive but wont do it for us and we arent familiar with how it works or exactly what needs changed). Therefore we'll wipe out the old tags and just put the one tag we want in order to avoid ambiguity. We'll detatch the old repository to avoid accidental pushes. It's much faster to create a new repo than remove all the existing tags. Args: git_dir: [path] The path to the local git repository. If this has an existing .git directory it will be removed. The directory will be re initialized as a new repository. git_tag: [string] The tag to give the initial commit initial_commit_message: [string] The initial commit message when commiting the existing directory content to the new repo. """ if git_tag is None: git_tag = check_subprocess( 'git -C "{dir}" describe --tags --abbrev=0'.format( dir=git_dir)) escaped_commit_message = initial_commit_message.replace('"', '\\"') logging.info('Removing old .git from %s and starting new at %s', git_dir, git_tag) original_origin = self.determine_remote_git_repository(git_dir) shutil.rmtree(os.path.join(git_dir, '.git')) _git = 'git -C "{dir}" '.format(dir=git_dir) check_subprocess_sequence([ _git + 'init', _git + 'add .', _git + 'remote add {name} {url}'.format( name='origin', url=original_origin.url), _git + 'remote set-url --push {name} disabled'.format(name='origin'), _git + 'commit -q -a -m "{message}"'.format( message=escaped_commit_message), _git + 'tag {tag} HEAD'.format(tag=git_tag) ])
def check_git(self, git_dir, command, **kwargs): """Wrapper around check_subprocess.""" self.__inject_auth(kwargs) return check_subprocess( 'git -C "{dir}" {command}'.format(dir=git_dir, command=command), **kwargs)
def run_git(cls, command): return check_subprocess('git -C "{dir}" {command}'.format( dir=cls.git_dir, command=command))
def tag_head(self, git_dir, tag): """Add tag to the local repository HEAD.""" check_subprocess('git -C "{dir}" tag {tag} HEAD'.format(dir=git_dir, tag=tag))
def query_local_repository_commit_id(self, git_dir): """Returns the current commit for the repository at git_dir.""" result = check_subprocess( 'git -C "{dir}" rev-parse HEAD'.format(dir=git_dir)) return result
def _publish_path(self, path): """Publish a bom path via halyard.""" cmd = '{hal} admin publish bom --color false --bom-path {path}'.format( hal=self.options.hal_path, path=os.path.abspath(path)) check_subprocess(cmd)
def initiate_github_pull_request(self, git_dir, message, base='master', head=None): """Initialize a pull request for the given commit on the given branch. Args: git_dir: [path] The local repository to initiate the pull request with. message: [string] The pull request message. If this is multiple lines then the first line will be the title, subsequent lines will be the PR description. base: [string] The base reference for the pull request. The default is master, but this could be a BRANCH or OWNER:BRANCH head: [string] The branch to use for the pull request. By default this is the current branch state of the the git_dir repository. This too can be BRANCH or OWNER:BRANCH. This branch must have alraedy been pushed to the ORIGIN repository -- not the local repository. """ options = self.options message = message.strip() if options.pr_notify_list: message.append('\n\n@' + ', @'.join(','.split(options.pr_notify_list))) hub_args = [] if base: hub_args.extend(['-b', base]) if head: hub_args.extend(['-h', head]) if options.no_pr: logging.warning( 'SKIPPING the creation of a pull request because --no_pr.' '\nCommand would have been: %s', 'hub -C "{dir}" pull-request {args} -m {msg!r}'.format( dir=git_dir, args=' '.join(hub_args), msg=message)) return message_path = None if message.find('\n') < 0: hub_args.extend(['-m', message]) else: fd, message_path = tempfile.mkstemp(prefix='hubmsg') os.write(fd, message) os.close(fd) hub_args.extend(['-F', message_path]) logging.info( 'Initiating pull request in %s from %s to %s with message:\n%s', git_dir, base, head if head else '<current branch>', message) try: kwargs = {} self.__inject_auth(kwargs) output = check_subprocess( 'hub -C "{dir}" pull-request {args}'.format( dir=git_dir, args=' '.join(hub_args)), **kwargs) logging.info(output) finally: if message_path: os.remove(message_path)