def make_repository_spec(self, name, **kwargs): """Create GitRepositorySpec based on the name and configuration. Args: git_dir: if supplied then use it, otherwise default under the root path. origin: if supplied then use it, even if None. Otherwise default upstream: if supplied then use it, even if None. Otherwise default. kwargs: Additional repository attributes """ git_dir = kwargs.pop('git_dir', os.path.join(self.__root_source_dir, name)) origin = kwargs.pop('origin', self.AUTO) upstream = kwargs.pop('upstream', self.AUTO) if origin == self.AUTO: origin = self.determine_origin(name) if os.path.exists(git_dir): logging.info('Confirming existing %s matches expectations', git_dir) existing = self.__git.determine_git_repository_spec(git_dir) if existing.origin != origin: raise_and_log_error( UnexpectedError( 'Repository "{dir}" origin="{have}" expected="{want}"'.format( dir=git_dir, have=existing.origin, want=origin))) if upstream == self.AUTO: upstream = self.determine_upstream_url(name) return GitRepositorySpec( name, origin=origin, upstream=upstream, git_dir=git_dir, **kwargs)
def next(self, at_index): """Returns the next SemanticVersion from this when bumping up. Args: at_index: [int] The component *_INDEX to bump at. """ if at_index is None: raise_and_log_error(UnexpectedError('Invalid index={0}'.format(at_index))) major = self.major minor = self.minor patch = self.patch if at_index == self.PATCH_INDEX: patch += 1 else: patch = 0 if at_index == self.MINOR_INDEX: minor += 1 elif at_index == self.MAJOR_INDEX: minor = 0 major += 1 else: raise_and_log_error( UnexpectedError('Invalid index={0}'.format(at_index))) return SemanticVersion(self.series_name, major, minor, patch)
def push_tag_and_branch(self, repository): """Pushes a stable branch and git version tag to the origin repository.""" if self.__no_changes: logging.info('No changes in spin since last tag, skipping branch and tag push.') return git_dir = repository.git_dir git = self.__scm.git match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', self.__gate_version) if match is None: raise_and_log_error( ConfigError('gate version {version} is not X.Y.Z-<buildnum>' .format(version=self.__gate_version))) semver_parts = self.__spinnaker_version.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected spinnaker version in the form X.Y.Z-N, got {}' .format(self.__spinnaker_version))) release_branch = 'origin/release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) release_tag = 'version-' + self.__stable_version logging.info('Pushing branch=%s and tag=%s to %s', release_branch, release_tag, repository.origin) git.check_run_sequence( git_dir, [ 'checkout ' + release_branch, 'tag ' + release_tag, ]) git.push_tag_to_origin(git_dir, release_tag)
def _do_repository(self, repository): git_dir = repository.git_dir branch = self.options.spinnaker_version logging.debug('Checking for branch="%s" in "%s"', branch, git_dir) remote_branches = [ line.strip() for line in self.__git.check_run(git_dir, 'branch -r').split('\n')] if 'origin/' + branch in remote_branches: if self.options.skip_existing: logging.info('Branch "%s" already exists in "%s" -- skip', branch, repository.origin) return elif self.options.delete_existing: logging.warning('Branch "%s" already exists in "%s" -- delete', branch, repository.origin) self.__git.delete_branch_on_origin(git_dir, branch) else: raise_and_log_error( ConfigError( 'Branch "{branch}" already exists in "{repo}"'.format( branch=branch, repo=repository.name), cause='branch_exists')) logging.info('Creating and pushing branch "%s" to "%s"', branch, repository.origin) self.__git.check_run(git_dir, 'checkout -b ' + branch) self.__git.push_branch_to_origin(git_dir, branch)
def __check_clone_branch(self, remote_url, base_dir, clone_command, branches): remaining_branches = list(branches) while True: branch = remaining_branches.pop(0) cmd = '{clone} -b {branch}'.format(clone=clone_command, branch=branch) retcode, stdout = self.run_git(base_dir, cmd) if not retcode: return not_found = stdout.find('Remote branch {branch} not found' .format(branch=branch)) >= 0 if not not_found: full_command = 'git -C "{dir}" {cmd}'.format(dir=base_dir, cmd=cmd) raise_and_log_error(ExecutionError(full_command, program='git'), full_command + ' failed with:\n' + stdout) if remaining_branches: logging.warning( 'Branch %s does not exist in %s. Retry with %s', branch, remote_url, remaining_branches[0]) continue lines = stdout.split('\n') stdout = '\n '.join(lines) logging.error('git -C "%s" %s failed with output:\n %s', base_dir, cmd, stdout) raise_and_log_error(ConfigError('Branches {0} do not exist in {1}.' .format(branches, remote_url)))
def __init__(self, factory, options, *pos_args, **kwargs): super(BuildBomCommand, self).__init__(factory, options, *pos_args, **kwargs) if options.refresh_from_bom_path and options.refresh_from_bom_version: raise_and_log_error( ConfigError('Cannot specify both --refresh_from_bom_path="{0}"' ' and --refresh_from_bom_version="{1}"' .format(options.refresh_from_bom_path, options.refresh_from_bom_version))) if options.refresh_from_bom_path: logging.debug('Using base bom from path "%s"', options.refresh_from_bom_path) check_path_exists(options.refresh_from_bom_path, "refresh_from_bom_path") with open(options.refresh_from_bom_path, 'r') as stream: base_bom = yaml.safe_load(stream.read()) elif options.refresh_from_bom_version: logging.debug('Using base bom version "%s"', options.refresh_from_bom_version) base_bom = HalRunner(options).retrieve_bom_version( options.refresh_from_bom_version) else: base_bom = None if base_bom: logging.info('Creating new bom based on version "%s"', base_bom.get('version', 'UNKNOWN')) self.__builder = BomBuilder(self.options, self.scm, self.metrics, base_bom=base_bom)
def prepare_local_repository_files(self, repository): if repository.name != SPINNAKER_GITHUB_IO_REPOSITORY_NAME: raise_and_log_error(UnexpectedError('Got "%s"' % repository.name)) timestamp = '{:%Y-%m-%d %H:%M:%S %Z}'.format(datetime.datetime.now()) version = self.options.spinnaker_version changelog_filename = '{version}-changelog.md'.format(version=version) target_path = os.path.join(repository.git_dir, '_changelogs', changelog_filename) major, minor, _ = version.split('.') logging.debug('Adding changelog file %s', target_path) with open(target_path, 'w') as f: # pylint: disable=trailing-whitespace header = textwrap.dedent( """\ --- title: Version {major}.{minor} changelog_title: Version {version} date: {timestamp} tags: changelogs {major}.{minor} version: {version} --- """.format( version=version, timestamp=timestamp, major=major, minor=minor)) f.write(header) f.write('<script src="%s.js"/>' % self.options.changelog_gist_url) return [target_path]
def promote_spin(self, repository): """Promote an existing build to become the spin CLI stable version.""" git_dir = repository.git_dir git = self.__scm.git if len(self.__spinnaker_version.split('.')) != 3: raise_and_log_error( ConfigError('Expected spinnaker version in the form X.Y.Z-N')) next_spin_semver = bump_spin_patch(git, git_dir, self.__gate_version) gate_major = next_spin_semver.major gate_min = next_spin_semver.minor last_patch = str(max(next_spin_semver.patch - 1, 0)) self.__stable_version = next_spin_semver.to_version() logging.info('calculated new stable version: {}' .format(self.__stable_version)) # NOTE: major and minor for spin binaries are dependent on gate versions. last_tag = 'version-{maj}.{min}.{patch}'.format(maj=gate_major, min=gate_min, patch=last_patch) self.__no_changes = git.query_local_repository_commit_id(git_dir) == git.query_commit_at_tag(git_dir, last_tag) candidate = self.options.spin_version if self.__no_changes: logging.info('No changes in spin since last tag, skipping publish.') else: for d in DIST_ARCH_LIST: source = 'spin/{}/{}/{}/{}'.format(candidate, d.dist, d.arch, d.filename) dest = 'spin/{}/{}/{}/{}'.format(self.__stable_version, d.dist, d.arch, d.filename) self.__gcs_uploader.copy_file(source, dest) self.__update_release_latest_file(gate_major, gate_min) self.__update_global_latest_file()
def bintray_repo_delete_version(self, repo, package_name, repository, build_version=None): """Delete the given bintray repository version if it exsts.""" try: bintray_url = self.__to_bintray_url(repo, package_name, repository, build_version) logging.debug('Checking for %s', bintray_url) request = Request(url=bintray_url) request.get_method = lambda: 'DELETE' self.__add_bintray_auth_header(request) labels = { 'repo': repo, 'repository': repository.name, 'artifact': 'debian' } self.__metrics.count_call( 'DeleteArtifact', labels, urlopen, request) return True except HTTPError as ex: if ex.code == 404: return True raise_and_log_error( ResponseError('Bintray failure: {}'.format(ex), server='bintray.delete'), 'Failed on url=%s: %s' % (bintray_url, exception_to_message(ex)))
def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' options_copy.github_hostname = 'github.com' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) check_options_set(options, ['halyard_version']) match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', options.halyard_version) if match is None: raise_and_log_error( ConfigError('--halyard_version={version} is not X.Y.Z-<buildnum>' .format(version=options.halyard_version))) self.__stable_version = '{major}.{minor}.{patch}'.format( major=match.group(1), minor=match.group(2), patch=match.group(3)) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str
def query_local_repository_branch(self, git_dir): """Returns the branch for the repository at git_dir.""" returncode, stdout = self.run_git(git_dir, 'rev-parse --abbrev-ref HEAD') if returncode: raise_and_log_error( ExecutionError('Could detmine branch', program='git'), 'Could not determine branch in {dir}: {output}'.format( dir=git_dir, output=stdout)) return stdout
def check_bom_service(bom, service_name): services = bom.get('services', {}) entry = services.get(service_name) if entry is None: raise_and_log_error( ConfigError('BOM does not contain service "%s"' % service_name, cause='BadBom'), 'BOM missing "%s": %s' % (service_name, services.keys())) return entry
def determine_repository_version(self, repository): service_name = self.repository_name_to_service_name(repository.name) if not service_name in self.__bom['services'].keys(): raise_and_log_error( UnexpectedError('"%s" is not a BOM repo' % service_name)) service = check_bom_service(self.__bom, service_name) version = service['version'][:service['version'].find('-')] return version
def get_repository_service_build_version(self, repository): if not self.__bom: raise_and_log_error(UnexpectedError('Missing bom', cause='NotReachable')) service_name = self.repository_name_to_service_name(repository.name) service_entry = self.__bom.get('services', {}).get(service_name, {}) if not service_entry: raise_and_log_error(ConfigError('BOM missing service %s' % service_name)) return service_entry['version']
def query_commit_at_tag(self, git_dir, tag): """Return the commit for the given tag, or None if tag is not known.""" retcode, stdout = self.run_git(git_dir, 'show-ref -- ' + tag) if retcode != 0: return None lines = stdout.split('\n') if len(lines) != 1: raise_and_log_error( UnexpectedError('"{tag}" -> "{msg}"'.format(tag=tag, msg=stdout))) return stdout.split(' ')[0]
def check_repository_is_current(self, repository): git_dir = repository.git_dir service_name = self.repository_name_to_service_name(repository.name) have_commit = self.git.query_local_repository_commit_id(git_dir) bom_commit = check_bom_service(self.__bom, service_name)['commit'] if have_commit != bom_commit: raise_and_log_error( UnexpectedError( '"%s" is at the wrong commit "%s"' % (git_dir, bom_commit))) return True
def check_commit_or_no_changes(self, git_dir, commit_commandline_args): """A variant of check_run 'commit' that tolerates 'no changes' errors.""" retcode, stdout = self.run_git( git_dir, 'commit ' + commit_commandline_args) if retcode == 1: last_line = stdout.split('\n')[-1] if last_line.lower().find('nothing to commit') >= 0: logging.debug('No changes to commit -- raw changelog is unchanged.') return stdout log_embedded_output(logging.ERROR, 'command output', stdout) raise_and_log_error(ExecutionError('git failed.')) return stdout
def __init__(self, factory, options, **kwargs): options.github_disable_upstream_push = True super(BuildDebianCommand, self).__init__(factory, options, **kwargs) self.__semaphore = Semaphore(options.max_local_builds) if not os.environ.get('BINTRAY_KEY'): raise_and_log_error(ConfigError('Expected BINTRAY_KEY set.')) if not os.environ.get('BINTRAY_USER'): raise_and_log_error(ConfigError('Expected BINTRAY_USER set.')) check_options_set( options, ['bintray_org', 'bintray_jar_repository', 'bintray_debian_repository', 'bintray_publish_wait_secs'])
def clone_repository_to_path( self, repository, commit=None, branch=None, default_branch=None): """Clone the remote repository at the given commit or branch. If requesting a branch and it is not found, then settle for the default branch, if one was explicitly specified. """ # pylint: disable=too-many-arguments if (commit != None) and (branch != None): raise_and_log_error( ConfigError('At most one of commit or branch can be specified.')) pull_url = self.determine_pull_url(repository) git_dir = repository.git_dir logging.debug('Begin cloning %s', pull_url) parent_dir = os.path.dirname(git_dir) ensure_dir_exists(parent_dir) clone_command = 'clone ' + pull_url if branch: branches = [branch] if default_branch: branches.append(default_branch) self.__check_clone_branch(pull_url, parent_dir, clone_command, branches) else: self.check_run(parent_dir, clone_command) logging.info('Cloned %s into %s', pull_url, parent_dir) if commit: self.check_run(git_dir, 'checkout -q ' + commit, echo=True) upstream = repository.upstream_or_none() origin = repository.origin if upstream and not self.is_same_repo(upstream, origin): logging.debug('Adding upstream %s with disabled push', upstream) self.check_run(git_dir, 'remote add upstream ' + upstream) which = ('upstream' if upstream and not self.is_same_repo(upstream, origin) else 'origin') if self.__options.github_disable_upstream_push: self.check_run( git_dir, 'remote set-url --push {which} disabled'.format(which=which)) if which != 'origin' or not self.__options.github_disable_upstream_push: parts = self.normalize_repo_url(repository.origin) if len(parts) == 3: # Origin is not a local path logging.debug('Fixing origin push url') push_url = self.determine_push_url(repository) self.check_run(git_dir, 'remote set-url --push origin ' + push_url) logging.debug('Finished cloning %s', pull_url)
def make(tag): """Create a new SemanticVersion from the given tag instance. Args: tag: [string] in the form <series_name>-<major>.<minor>.<patch> """ match = SemanticVersion.SEMVER_MATCHER.match(tag) if match is None: raise_and_log_error(UnexpectedError('Malformed tag "%s"' % tag)) # Keep first group as a string, but use integers for the component parts return SemanticVersion(match.group(1), *[int(num) for num in match.groups()[1:]])
def check_source_info(self, repository): """Ensure cached source info is consistent with current repository.""" logging.debug('Checking that cached commit is consistent with %s', repository.git_dir) info = self.lookup_source_info(repository) commit = self.__git.query_local_repository_commit_id(repository.git_dir) cached_commit = info.summary.commit_id if cached_commit != commit: raise_and_log_error( UnexpectedError( 'Cached commit {cache} != current commit {id} in {dir}'.format( cache=cached_commit, id=commit, dir=repository.git_dir))) return info
def __init__(self, factory, options, **kwargs): check_options_set( options, ['build_gce_service_account', 'build_gce_project']) options.github_disable_upstream_push = True super(BuildGceComponentImages, self).__init__(factory, options, **kwargs) artifact_sources = self.source_code_manager.bom['artifactSources'] self.__image_project = artifact_sources['googleImageProject'] if not self.__image_project: raise_and_log_error( ConfigError('BOM has no artifactSources.googleImageProject'))
def __init__(self, factory, options, **kwargs): super(BuildSpinCommand, self).__init__( factory, options, source_repository_names=SPIN_REPOSITORY_NAMES, **kwargs) options_copy = copy.copy(options) self.__gcs_uploader = SpinGcsUploader(options) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__build_version = None # recorded after build bom_contents = BomSourceCodeManager.load_bom(options) gate_entry = bom_contents.get('services', {}).get('gate', {}) if not gate_entry: raise_and_log_error( ConfigError('No gate service entry found in bom {}'.format(bom_contents))) self.__gate_version = gate_entry['version']
def load_halyard_version_commits(self): logging.debug('Fetching existing halyard build versions') retcode, stdout = run_subprocess('gsutil cat ' + self.__versions_url) if not retcode: contents = stdout + '\n' else: if stdout.find('No URLs matched') < 0: raise_and_log_error( ExecutionError('No URLs matched', program='gsutil'), 'Could not fetch "%s": %s' % (self.__versions_url, stdout)) contents = '' logging.warning( '%s did not exist. Creating a new one.', self.__versions_url) return contents
def build_swagger_docs(self, repository, json_path): """Build the API from the swagger endpoint.""" if repository.name != 'gate': raise_and_log_error( UnexpectedError('Repo "%s" != "gate"' % repository.name)) docs_dir = os.path.dirname(json_path) check_subprocess( 'java -jar {jar_path} generate -i {json_path} -l html2' ' -o {output_dir} -t {templates_directory}' .format(jar_path=self.options.swagger_codegen_cli_jar_path, json_path=json_path, output_dir=docs_dir, templates_directory=self.__templates_directory)) logging.info('Writing docs to directory %s', docs_dir)
def wait_for_url(self, url, timeout_secs): """Wait for url to be ready or timeout.""" logging.info('Waiting for %s', url) for _ in range(timeout_secs): try: code = urlopen(url).getcode() if code >= 200 and code < 300: logging.info('%s is ready', url) return except URLError: time.sleep(1) raise_and_log_error( TimeoutError('%s not ready' % url), '%s not ready after %s secs' % (url, timeout_secs))
def check_property(self, name, want): """Check a configuration property meets our needs.""" have = self.__halyard_runtime_config[name] if have == want: logging.debug('Confirmed Halyard server is configured with %s="%s"', name, have) else: raise_and_log_error( ConfigError( 'Halyard server is not configured to support this request.\n' 'It is using {name}={have!r} rather than {want!r}.\n' 'You will need to modify /opt/spinnaker/config/halyard-local.yml' ' and restart the halyard server.'.format( name=name, have=have, want=want), cause='config/halyard'))
def determine_git_repository_spec(self, git_dir): """Infer GitRepositorySpec from a local git repository.""" git_text = self.check_run(git_dir, 'remote -v') remote_urls = { match.group(1): match.group(2) for match in re.finditer(r'(\w+)\s+(\S+)\s+\(fetch\)', git_text) } origin_url = remote_urls.get('origin') if not origin_url: raise_and_log_error( UnexpectedError('{0} has no remote "origin"'.format(git_dir))) return GitRepositorySpec(os.path.basename(git_dir), git_dir=git_dir, origin=origin_url, upstream=remote_urls.get('upstream'))
def ensure_local_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" options = self.options if os.path.exists(repository.git_dir): if options.delete_existing: logging.warning('Deleting existing %s', repository.git_dir) shutil.rmtree(repository.git_dir) elif options.skip_existing: logging.debug('Skipping existing %s', repository.git_dir) else: raise_and_log_error( ConfigError('"{dir}" already exists.' ' Enable "skip_existing" or "delete_existing".' .format(dir=repository.git_dir))) super(FetchSourceCommand, self).ensure_local_repository(repository)
def __init__(self, options): self.__options = options self.__hal_path = options.hal_path logging.debug('Retrieving halyard runtime configuration.') url = 'http://' + options.halyard_daemon + '/resolvedEnv' try: response = urlopen(url) except HTTPError as error: raise_and_log_error( ResponseError( '{url}: {code}\n{body}'.format( url=url, code=error.code, body=response.read()), server='halyard')) self.__halyard_runtime_config = yaml.safe_load(response)
def query_tag_commits(self, git_dir, tag_pattern): """Collect the TagCommit for each tag matching the pattern. Returns: list of CommitTag sorted most recent first. """ retcode, stdout = self.run_git(git_dir, 'show-ref --tags') if retcode and stdout: raise_and_log_error( ExecutionError('git failed in %s' % git_dir, program='git'), 'git -C "%s" show-ref --tags: %s' % (git_dir, stdout)) ref_lines = stdout.split('\n') commit_tags = [CommitTag.make(line) for line in ref_lines if line] matcher = re.compile(tag_pattern) filtered = [ct for ct in commit_tags if matcher.match(ct.tag)] return sorted(filtered, reverse=True)
def __already_have_tag(self, repository, tag): """Determine if we already have the tag in the repository.""" git_dir = repository.git_dir existing_commit = self.__git.query_commit_at_tag(git_dir, tag) if not existing_commit: return False want_commit = self.__git.query_local_repository_commit_id(git_dir) if want_commit == existing_commit: logging.debug('Already have "%s" at %s', tag, want_commit) return True raise_and_log_error( ConfigError( '"{tag}" already exists in "{repo}" at commit {have}, not {want}' .format(tag=tag, repo=git_dir, have=existing_commit, want=want_commit)))
def add_extra_arguments(self, test_name, args, commandline): """Add extra arguments to the commandline. Args: test_name: [string] Name of test specifying the options. args: [dict] Specification of additioanl arguments to pass. Each key is the name of the argument, the value is the value to pass. If the value is preceeded with a '$' then it refers to the value of an option. If the value is None then just add the key without an arg. commandline: [list] The list of command line arguments to append to. """ option_dict = vars(self.options) aliases_dict = self.test_suite.get('aliases', {}) for key, value in args.items(): if isinstance(value, (int, bool)): value = str(value) if key == 'alias': for alias_name in value: if not alias_name in aliases_dict: raise_and_log_error( ConfigError( 'Unknown alias "{name}" referenced in args for "{test}"' .format(name=alias_name, test=test_name))) self.add_extra_arguments(test_name, aliases_dict[alias_name], commandline) continue elif value is None: pass elif value.startswith('$'): option_name = value[1:] if option_name in option_dict: value = option_dict[option_name] or '""' elif option_name in self.__extra_test_bindings: value = self.__extra_test_bindings[option_name] or '""' elif option_name in os.environ: value = os.environ[option_name] else: raise_and_log_error( ConfigError( 'Unknown option "{name}" referenced in args for "{test}"' .format(name=option_name, test=test_name))) if value is None: commandline.append('--' + key) else: commandline.extend(['--' + key, value])
def ingest_bom(self, line): """Function to ingest a single bom into the result map.""" bom = self.load_bom_from_url(line) if not bom: return try: if bom['version'] + '.yml' != line[line.rfind('/') + 1:]: message = 'BOM version "%s" != filename "%s"' % (bom['version'], line) self.__bad_files[self.url_to_bom_name(line.strip())] = message logging.warning(message) raise_and_log_error(UnexpectedError(message)) self.analyze_bom(bom) except Exception as ex: self.__bad_files[self.url_to_bom_name(line.strip())] = ( exception_to_message(ex)) maybe_log_exception('analyze_bom', ex, action_msg='Skipping %s' % line)
def check_property(self, name, want): """Check a configuration property meets our needs.""" have = self.__halyard_runtime_config[name] if have == want: logging.debug( 'Confirmed Halyard server is configured with %s="%s"', name, have) else: raise_and_log_error( ConfigError( 'Halyard server is not configured to support this request.\n' 'It is using {name}={have!r} rather than {want!r}.\n' 'You will need to modify /opt/spinnaker/config/halyard-local.yml' ' and restart the halyard server.'.format(name=name, have=have, want=want), cause='config/halyard'))
def _do_command(self): """Implements CommandProcessor interface.""" options = self.options spinnaker_version = options.spinnaker_version options_copy = copy.copy(options) options_copy.git_branch = "master" # push to master in spinnaker.io publish_changelog_command = PublishChangelogFactory().make_command( options_copy) changelog_gist_url = options.changelog_gist_url # Make sure changelog exists already. # If it does not then fail. try: logging.debug("Verifying changelog ready at %s", changelog_gist_url) urlopen(changelog_gist_url) except HTTPError: logging.error(exception_to_message) raise_and_log_error( ConfigError( 'Changelog gist "{url}" must exist before publising a release.' .format(url=changelog_gist_url), cause="ChangelogMissing", )) bom = self.__hal.retrieve_bom_version(self.options.bom_version) bom["version"] = spinnaker_version bom_path = os.path.join(self.get_output_dir(), spinnaker_version + ".yml") write_to_path(yaml.safe_dump(bom, default_flow_style=False), bom_path) self.__hal.publish_bom_path(bom_path) self.push_branches_and_tags(bom) self.__hal.publish_spinnaker_release( spinnaker_version, options.spinnaker_release_alias, changelog_gist_url, options.min_halyard_version, ) prior_version = get_prior_version(spinnaker_version) if prior_version is not None: self.__hal.deprecate_spinnaker_release(prior_version) logging.info("Publishing changelog") publish_changelog_command()
def register(self, registry, subparsers, defaults): """Registers a command factory. Args: registry: [dict] The registry to add to, keyed by command name. subparsers: [ArgumentParser subparsers] for adding command arguments defaults: [dict] optional default values for command arguments """ factory = self name = factory.name if name in registry.keys(): raise_and_log_error( UnexpectedError( 'CommandFactory "{name}" already exists.'.format(name=name))) factory.add_argparser(subparsers, defaults) registry[name] = factory
def consider_debian_on_bintray(self, repository, build_version): """Check whether desired version already exists on bintray.""" options = self.__options exists = [] missing = [] # technically we publish to both maven and debian repos. # we can be in a state where we are in one but not the other. # let's not worry about this for now. for bintray_repo in [options.bintray_debian_repository]:#, # options.bintray_jar_repository]: package_name = repository.name if bintray_repo == options.bintray_debian_repository: if package_name == 'spinnaker-monitoring': package_name = 'spinnaker-monitoring-daemon' elif not package_name.startswith('spinnaker'): package_name = 'spinnaker-' + package_name if self.bintray_repo_has_version( bintray_repo, package_name, repository, build_version): exists.append(bintray_repo) else: missing.append(bintray_repo) if exists: if options.skip_existing: if missing: raise_and_log_error( ConfigError('Have {name} version for {exists} but not {missing}' .format(name=repository.name, exists=exists[0], missing=missing[0]))) logging.info('Already have %s -- skipping build', repository.name) labels = {'repository': repository.name, 'artifact': 'debian'} self.__metrics.inc_counter( 'ReuseArtifact', labels, 'Kept existing desired debian package version.') return True if options.delete_existing: for repo in exists: self.bintray_repo_delete_version(repo, package_name, repository, build_version=build_version) else: raise_and_log_error( ConfigError('Already have debian for {name}'.format( name=repository.name))) return False
def __init__(self, options): self.__options = options self.__hal_path = options.hal_path logging.debug("Retrieving halyard runtime configuration.") url = "http://" + options.halyard_daemon + "/resolvedEnv" try: response = urlopen(url) except HTTPError as error: raise_and_log_error( ResponseError( "{url}: {code}\n{body}".format(url=url, code=error.code, body=error.read()), server="halyard", )) self.__halyard_runtime_config = yaml.safe_load(response)
def fetch_bintray_url(self, bintray_url): request = Request(bintray_url) if self.__basic_auth: request.add_header('Authorization', self.__basic_auth) try: response = urlopen(request) headers = response.info() payload = response.read() content = json.JSONDecoder(encoding='utf-8').decode(payload) except HTTPError as ex: raise_and_log_error( ResponseError('Bintray failure: {}'.format(ex), server='bintray.api'), 'Failed on url=%s: %s' % (bintray_url, exception_to_message(ex))) except Exception as ex: raise return headers, content
def bump_spin_patch(git, git_dir, gate_version): '''Calculates the next spin version from the gate version and previous spin tags. Spin is coupled to the Gate major and minor version. Gate is a routing server, so features and breaking changes in Gate must be reflected in spin since it is a client. :param git: git support helper class. :param git_dir: spin git directory. :param gate_version: gate version to align spin version with in <maj>.<min>.<patch>-<buildnum> format. :return: (SemanticVersion) Next semver spin version. ''' gate_version_parts = gate_version.split('-') if len(gate_version_parts) != 2: raise_and_log_error( ValueError('Malformed gate version {}'.format(gate_version))) # SemanticVersion.make() expects a tag, so formulate the input gate version as a tag. gate_semver = SemanticVersion.make('version-{}'.format( gate_version_parts[0])) tag_pattern = r'version-{maj}.{min}.(\d+)'.format(maj=gate_semver.major, min=gate_semver.minor) tag_matcher = re.compile(tag_pattern) tags = git.fetch_tags(git_dir) logging.info('searching git tags {} for patterns matching {}'.format( tags, tag_pattern)) matching_semvers = [ SemanticVersion.make(t) for t in tags if tag_matcher.match(t) ] logging.info('found matching semvers: {}'.format(matching_semvers)) if matching_semvers: max_semver = max(matching_semvers) next_semver = max_semver.next(SemanticVersion.PATCH_INDEX) patch = next_semver.patch else: patch = '0' # SemanticVersion.make() expects a tag, so formulate the input gate version as a tag. spin_semver = SemanticVersion.make( 'version-{major}.{minor}.{patch}'.format(major=gate_semver.major, minor=gate_semver.minor, patch=patch)) logging.info('calculated next spin patch version: {}'.format(spin_semver)) return spin_semver
def have_image(self, repository): """Determine if we already have an image for the repository or not.""" bom = self.source_code_manager.bom dependencies = bom['dependencies'] services = bom['services'] service_name = self.scm.repository_name_to_service_name( repository.name) if service_name in dependencies: build_version = dependencies[service_name]['version'] else: build_version = services[service_name]['version'] options = self.options image_name = 'spinnaker-{repo}-{version}'.format( repo=repository.name, version=build_version.replace('.', '-').replace(':', '-')) lookup_command = [ 'gcloud', '--account', options.build_gce_service_account, 'compute', 'images', 'list', '--filter', image_name, '--project', self.__image_project, '--quiet', '--format=json' ] logging.debug('Checking for existing image for "%s"', repository.name) got = check_subprocess(' '.join(lookup_command)) if got.strip() == '[]': return False labels = {'repository': repository.name, 'artifact': 'gce-image'} if self.options.skip_existing: logging.info('Already have %s -- skipping build', image_name) self.metrics.inc_counter('ReuseArtifact', labels) return True if not self.options.delete_existing: raise_and_log_error( ConfigError( 'Already have image "{name}"'.format(name=image_name))) delete_command = [ 'gcloud', '--account', options.gcb_service_account, 'compute', 'images', 'delete', image_name, '--project', options.build_gce_project, '--quiet' ] logging.debug('Deleting existing image %s', image_name) self.metrics.count_call('DeleteArtifact', labels, 'Attempts to delete existing GCE images.', check_subprocess, ' '.join(delete_command)) return False
def build_swagger_docs(self, repository, json_path): """Build the API from the swagger endpoint.""" if repository.name != "gate": raise_and_log_error( UnexpectedError('Repo "%s" != "gate"' % repository.name) ) docs_dir = os.path.dirname(json_path) check_subprocess( "java -jar {jar_path} generate -i {json_path} -l html2" " -o {output_dir} -t {templates_directory}".format( jar_path=self.options.swagger_codegen_cli_jar_path, json_path=json_path, output_dir=docs_dir, templates_directory=self.__templates_directory, ) ) logging.info("Writing docs to directory %s", docs_dir)
def determine_git_repository_spec(self, git_dir): """Infer GitRepositorySpec from a local git repository.""" git_text = self.check_run(git_dir, "remote -v") remote_urls = { match.group(1): match.group(2) for match in re.finditer(r"(\w+)\s+(\S+)\s+\(fetch\)", git_text) } origin_url = remote_urls.get("origin") if not origin_url: raise_and_log_error( UnexpectedError('{0} has no remote "origin"'.format(git_dir)) ) return GitRepositorySpec( os.path.basename(git_dir), git_dir=git_dir, origin=origin_url, upstream=remote_urls.get("upstream"), )
def __init__(self, factory, options, **kwargs): super(BuildSpinCommand, self).__init__(factory, options, source_repository_names=SPIN_REPOSITORY_NAMES, **kwargs) options_copy = copy.copy(options) self.__gcs_uploader = SpinGcsUploader(options) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__build_version = None # recorded after build bom_contents = BomSourceCodeManager.load_bom(options) gate_entry = bom_contents.get('services', {}).get('gate', {}) if not gate_entry: raise_and_log_error( ConfigError('No gate service entry found in bom {}'.format( bom_contents))) self.__gate_version = gate_entry['version']
def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.git_branch = 'master' options_copy.github_hostname = 'github.com' super(PublishSpinCommand, self).__init__(factory, options_copy, **kwargs) check_options_set(options, ['spin_version']) # Ensure we have a version to promote. bom_contents = BomSourceCodeManager.load_bom(options_copy) gate_entry = bom_contents.get('services', {}).get('gate', {}) if not gate_entry: raise_and_log_error( ConfigError('No gate service entry found in bom {}'.format(bom_contents))) self.__spinnaker_version = options.bom_version or bom_contents['version'] self.__gate_version = gate_entry['version'] self.__stable_version = None # Set after promote_spin. self.__no_changes = False # Set after promote_spin. self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__gcs_uploader = SpinGcsUploader(options)
def check_repository_is_current(self, repository): git_dir = repository.git_dir commit = repository.commit_or_none() if commit is not None: have_commit = self.git.query_local_repository_commit_id(git_dir) if have_commit != commit: raise_and_log_error( UnexpectedError( '"%s" is at the wrong commit "%s" vs "%s"' % (git_dir, have_commit, commit))) return True branch = self.options.git_branch or 'master' have_branch = self.git.query_local_repository_branch(git_dir) if have_branch != branch: raise_and_log_error( UnexpectedError('"%s" is at the wrong branch "%s" vs "%s"' % (git_dir, have_branch, branch))) return True
def build_swagger_docs(self, repository, docs_url): """Build the API from the swagger endpoint.""" if repository.name != 'gate': raise_and_log_error( UnexpectedError('Repo "%s" != "gate"' % repository.name)) docs_dir = self.get_output_dir() ensure_dir_exists(docs_dir) docs_path = os.path.join(docs_dir, 'docs.json') logging.info('Generating swagger docs for %s', repository.name) check_subprocess('curl -s {url} -o {docs_path}' .format(url=docs_url, docs_path=docs_path)) check_subprocess( 'java -jar {jar_path} generate -i {docs_path} -l html2' ' -o {output_dir} -t {templates_directory}' .format(jar_path=self.options.swagger_codegen_cli_jar_path, docs_path=docs_path, output_dir=docs_dir, templates_directory=self.__templates_directory)) logging.info('Writing docs to directory %s', docs_dir)
def bintray_repo_has_version(self, repo, package_name, repository, build_version): """See if the given bintray repository has the package version to build.""" try: bintray_url = self.__to_bintray_url(repo, package_name, repository, build_version) logging.debug('Checking for %s', bintray_url) request = urllib2.Request(url=bintray_url) self.__add_bintray_auth_header(request) urllib2.urlopen(request) return True except urllib2.HTTPError as ex: if ex.code == 404: return False raise_and_log_error( ResponseError('Bintray failure: {}'.format(ex), server='bintray.check'), 'Failed on url=%s: %s' % (bintray_url, ex.message)) except Exception as ex: raise
def check_subprocess(cmd, stream=None, **kwargs): """Run_subprocess and raise CalledProcessError if it fails.""" embed_errors = kwargs.pop('embed_errors', True) retcode, stdout = run_subprocess(cmd, stream=stream, **kwargs) if retcode == 0: return stdout.strip() if embed_errors: log_embedded_output(logging.ERROR, 'command output', stdout) logging.error('Command failed. See embedded output above.') else: lines = stdout.split('\n') if lines > 10: lines = lines[-10:] log_embedded_output(logging.ERROR, 'Command failed with last %d lines' % len(lines), '\n'.join(lines)) program = os.path.basename(shlex.split(cmd)[0]) raise_and_log_error(ExecutionError(program + ' failed.', program=program))
def __init__(self, factory, options, **kwargs): # Use own repository to avoid race conditions when commands are # running concurrently. if options.relative_to_bom_path and options.relative_to_bom_version: raise_and_log_error( ConfigError('Cannot specify both --relative_to_bom_path' ' and --relative_to_bom_version.')) options_copy = copy.copy(options) options_copy.github_disable_upstream_push = True if options.relative_to_bom_path: with open(options.relative_to_bom_path, 'r') as stream: self.__relative_bom = yaml.safe_load(stream.read()) elif options.relative_to_bom_version: self.__relative_bom = HalRunner(options).retrieve_bom_version( options.relative_to_bom_version) else: self.__relative_bom = None super(BuildChangelogCommand, self).__init__(factory, options_copy, **kwargs)
def __init__(self, factory, options, **kwargs): super(PublishChangelogCommand, self).__init__( factory, make_options_with_fallback(options), source_repository_names=[SPINNAKER_IO_REPOSITORY_NAME], **kwargs ) check_options_set(options, ["spinnaker_version", "changelog_gist_url"]) try: logging.debug( 'Verifying changelog gist exists at "%s"', options.changelog_gist_url ) urlopen(options.changelog_gist_url) except HTTPError as error: raise_and_log_error( ConfigError( 'Changelog gist "{url}": {error}'.format( url=options.changelog_gist_url, error=error.message ) ) )
def __determine_repo_install_args(self, repository): """Determine --spinnaker_dev-github_[owner|user] args for install script.""" options = self.options branch = options.git_branch owner = ('spinnaker' if options.github_owner in ('default', 'upstream') else options.github_owner) git_dir = os.path.dirname(__file__) if not branch: branch = GitRunner(options).query_local_repository_branch(git_dir) if not owner: url = repository.origin match = re.search('github.com/([^/]+)/', url) if not match: raise_and_log_error( UnexpectedError('Cannot determine owner from url=%s' % url, cause='BadUrl')) owner = match.group(1) return [ '--spinnaker_dev_github_owner', owner, '--spinnaker_dev_github_branch', branch ]
def _do_repository(self, repository): if repository.name != SPINNAKER_IO_REPOSITORY_NAME: raise_and_log_error(UnexpectedError('Got "%s"' % repository.name)) base_branch = "master" self.scm.ensure_git_path(repository, branch=base_branch) version = self.options.spinnaker_version if self.options.git_allow_publish_master_branch: branch_flag = "" head_branch = "master" else: branch_flag = "-B" head_branch = version + "-changelog" files_added = self.prepare_local_repository_files(repository) git_dir = repository.git_dir message = "doc(changelog): Spinnaker Version " + version local_git_commands = [ # These commands are accomodating to a branch already existing # because the branch is on the version, not build. A rejected # build for some reason that is re-tried will have the same version # so the branch may already exist from the earlier attempt. "fetch origin " + base_branch, "checkout " + base_branch, "checkout {flag} {branch}".format(flag=branch_flag, branch=head_branch), "add " + " ".join([os.path.abspath(path) for path in files_added]), ] logging.debug( 'Commiting changes into local repository "%s" branch=%s', repository.git_dir, head_branch, ) git = self.git git.check_run_sequence(git_dir, local_git_commands) git.check_commit_or_no_changes(git_dir, '-m "{msg}"'.format(msg=message)) logging.info('Pushing branch="%s" into "%s"', head_branch, repository.origin) git.push_branch_to_origin(git_dir, branch=head_branch)
def __check_clone_branch(self, remote_url, base_dir, clone_command, branches): remaining_branches = list(branches) while True: branch = remaining_branches.pop(0) cmd = "{clone} -b {branch}".format(clone=clone_command, branch=branch) retcode, stdout = self.run_git(base_dir, cmd) if not retcode: return not_found = ( stdout.find("Remote branch {branch} not found".format(branch=branch)) >= 0 ) if not not_found: full_command = 'git -C "{dir}" {cmd}'.format(dir=base_dir, cmd=cmd) raise_and_log_error( ExecutionError(full_command, program="git"), full_command + " failed with:\n" + stdout, ) if remaining_branches: logging.warning( "Branch %s does not exist in %s. Retry with %s", branch, remote_url, remaining_branches[0], ) continue lines = stdout.split("\n") stdout = "\n ".join(lines) logging.error( 'git -C "%s" %s failed with output:\n %s', base_dir, cmd, stdout ) raise_and_log_error( ConfigError( "Branches {0} do not exist in {1}.".format(branches, remote_url) ) )
def make(entry): """Create a new CommitMessage from an individual entry""" match = CommitMessage._MEDIUM_PRETTY_COMMIT_MATCHER.match(entry) if match is None: raise_and_log_error( UnexpectedError('Unexpected commit entry {0}'.format(entry))) text = entry[match.end(3):] # strip trailing spaces on each line lines = [line.rstrip() for line in text.split('\n')] # remove blank lines from beginning and end of text while lines and not lines[0]: del lines[0] while lines and not lines[-1]: del lines[-1] # new string may have initial spacing but no leading/trailing blank lines. text = '\n'.join(lines) return CommitMessage(match.group(1), match.group(2), match.group(3), text)
def determine_halyard_commit(self): """Determine the commit_id that we want to publish.""" options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = "{base}/{filename}".format( base=options.halyard_bucket_base_url, filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME, ) if os.path.exists(versions_url): logging.debug("Loading halyard version info from file %s", versions_url) with open(versions_url, "r") as stream: version_data = stream.read() else: logging.debug("Loading halyard version info from bucket %s", versions_url) gsutil_output = check_subprocess( "gsutil cat {url}".format(url=versions_url), stderr=subprocess.PIPE) # The latest version of gsutil prints a bunch of python warnings to stdout # (see b/152449160). This file is a series of lines that look like... # 0.41.0-180209172926: 05f1e832ab438e5a980d1102e84cdb348a0ab055 # ...so we'll just throw out any lines that don't start with digits. valid_lines = [ line for line in gsutil_output.splitlines() if line[0].isdigit() ] version_data = "\n".join(valid_lines) commit = yaml.safe_load(version_data).get(options.halyard_version) if commit is None: raise_and_log_error( ConfigError( 'Unknown halyard version "{version}" in "{url}"'.format( version=options.halyard_version, url=versions_url))) return commit
def query_local_repository_commits_to_existing_tag_from_id( self, git_dir, commit_id, commit_tags): """Returns the list of commit messages to the local repository.""" # pylint: disable=invalid-name id_to_newest_tag = {} for commit_tag in sorted(commit_tags): id_to_newest_tag[commit_tag.commit_id] = commit_tag.tag tag = id_to_newest_tag.get(commit_id) if tag is not None: return tag, [] result = self.check_run(git_dir, 'log --pretty=oneline ' + commit_id) lines = result.split('\n') count = 0 for line in lines: line_id = line.split(' ', 1)[0] tag = id_to_newest_tag.get(line_id) if tag: break count += 1 if tag is None: if self.options.git_allow_no_baseline_tag: logging.warning( 'No baseline tag for "%s", but that is allowed.', git_dir) tag = '' else: raise_and_log_error( ConfigError( 'There is no baseline tag for commit "{id}" in {dir}.'. format(id=commit_id, dir=git_dir))) result = self.check_run( git_dir, 'log -n {count} --pretty=medium {id}'.format(count=count, id=commit_id)) messages = CommitMessage.make_list_from_result(result) return tag, messages
def load_bom(options): """Helper function for initializing the BOM if one was specified.""" bom_path = options.bom_path if hasattr(options, 'bom_path') else None bom_version = (options.bom_version if hasattr(options, 'bom_version') else None) have_bom_path = 1 if bom_path else 0 have_bom_version = 1 if bom_version else 0 if have_bom_path + have_bom_version != 1: raise_and_log_error( ConfigError('Expected exactly one of: "bom_path", or "bom_version"')) if bom_path: check_path_exists(bom_path, why='options.bom_path') return BomSourceCodeManager.bom_from_path(bom_path) if bom_version: logging.debug('Retrieving bom version %s', bom_version) return HalRunner(options).retrieve_bom_version(bom_version) raise_and_log_error(UnexpectedError('Not reachable', cause='NotReachable'))
def __init__(self, factory, options, **kwargs): if options.bintray_org is None != options.bintray_debian_repository is None: raise_and_log_error( ConfigError('Either neither or both "bintray_org"' ' and "bintray_debian_repository" should be specified')) self.__bad_files = {} self.__non_standard_boms = {} # We're going to have a bunch of threads each writing into different keys # in order to deconflict with one another lockless. Then we'll aggregate # it all together when we're done processing for a single aggregate result. self.__per_thread_result_map = {} self.__expect_docker_registry = options.docker_registry self.__expect_debian_repository = ( 'https://dl.bintray.com/%s/%s' % (options.bintray_org, options.bintray_debian_repository) if options.bintray_org else None) super(CollectBomVersions, self).__init__( factory, options, **kwargs)