def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str
def __init__(self, factory, options, *pos_args, **kwargs): super(BuildBomCommand, self).__init__(factory, options, *pos_args, **kwargs) if options.refresh_from_bom_path and options.refresh_from_bom_version: raise_and_log_error( ConfigError('Cannot specify both --refresh_from_bom_path="{0}"' ' and --refresh_from_bom_version="{1}"' .format(options.refresh_from_bom_path, options.refresh_from_bom_version))) if options.refresh_from_bom_path: logging.debug('Using base bom from path "%s"', options.refresh_from_bom_path) check_path_exists(options.refresh_from_bom_path, "refresh_from_bom_path") with open(options.refresh_from_bom_path, 'r') as stream: base_bom = yaml.safe_load(stream.read()) elif options.refresh_from_bom_version: logging.debug('Using base bom version "%s"', options.refresh_from_bom_version) base_bom = HalRunner(options).retrieve_bom_version( options.refresh_from_bom_version) else: base_bom = None if base_bom: logging.info('Creating new bom based on version "%s"', base_bom.get('version', 'UNKNOWN')) self.__builder = BomBuilder(self.options, self.scm, self.metrics, base_bom=base_bom)
def init_argparser(self, parser, defaults): """Adds command-specific arguments.""" super(BuildChangelogFactory, self).init_argparser(parser, defaults) self.add_argument( parser, 'include_changelog_details', defaults, False, action='store_true', help='Include a "details" section with the full commit messages' ' in time sequence in the changelog.') HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'relative_to_bom_path', defaults, None, help='If specified then produce the changelog relative to the' ' commits found in the specified bom rather than the previous' ' repository tag.') self.add_argument( parser, 'relative_to_bom_version', defaults, None, help='If specified then produce the changelog relative to the' ' commits found in the specified bom rather than the previous' ' repository tag.')
def init_argparser(self, parser, defaults): super(PublishSpinnakerFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) GitRunner.add_parser_args(parser, defaults) GitRunner.add_publishing_parser_args(parser, defaults) PublishChangelogFactory().init_argparser(parser, defaults) self.add_argument(parser, 'spinnaker_release_alias', defaults, None, help='The spinnaker version alias to publish as.') self.add_argument( parser, 'halyard_bom_bucket', defaults, 'halconfig', help='The bucket manaing halyard BOMs and config profiles.') self.add_argument( parser, 'bom_version', defaults, None, help='The existing bom version usef for this release.') self.add_argument(parser, 'min_halyard_version', defaults, None, help='The minimum halyard version required.')
def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'spinnaker_release_alias', 'bom_version', 'changelog_gist_url', 'github_owner', 'min_halyard_version' ]) major, minor, _ = self.options.spinnaker_version.split('.') self.__branch = 'release-{major}.{minor}.x'.format(major=major, minor=minor) options_copy = copy.copy(options) self.__bom_scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket) if options.only_repositories: self.__only_repositories = options.only_repositories.split(',') else: self.__only_repositories = [] options_copy.git_branch = self.__branch self.__branch_scm = BranchSourceCodeManager(options_copy, self.get_input_dir())
def __init__(self, factory, options, *pos_args, **kwargs): super(BuildBomCommand, self).__init__(factory, options, *pos_args, **kwargs) if options.refresh_from_bom_path and options.refresh_from_bom_version: raise_and_log_error( ConfigError('Cannot specify both --refresh_from_bom_path="{0}"' ' and --refresh_from_bom_version="{1}"'.format( options.refresh_from_bom_path, options.refresh_from_bom_version))) if options.refresh_from_bom_path: logging.debug('Using base bom from path "%s"', options.refresh_from_bom_path) check_path_exists(options.refresh_from_bom_path, "refresh_from_bom_path") with open(options.refresh_from_bom_path, 'r') as stream: base_bom = yaml.load(stream.read()) elif options.refresh_from_bom_version: logging.debug('Using base bom version "%s"', options.refresh_from_bom_version) base_bom = HalRunner(options).retrieve_bom_version( options.refresh_from_bom_version) else: base_bom = None if base_bom: logging.info('Creating new bom based on version "%s"', base_bom.get('version', 'UNKNOWN')) self.__builder = BomBuilder(self.options, self.scm, base_bom=base_bom)
def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' options_copy.github_hostname = 'github.com' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) check_options_set(options, ['halyard_version']) match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', options.halyard_version) if match is None: raise_and_log_error( ConfigError('--halyard_version={version} is not X.Y.Z-<buildnum>' .format(version=options.halyard_version))) self.__stable_version = '{major}.{minor}.{patch}'.format( major=match.group(1), minor=match.group(2), patch=match.group(3)) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str
def init_argparser(self, parser, defaults): super(BuildBomCommandFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) buildtool.container_commands.add_bom_parser_args(parser, defaults) buildtool.debian_commands.add_bom_parser_args(parser, defaults) buildtool.image_commands.add_bom_parser_args(parser, defaults) self.add_argument( parser, 'build_number', defaults, DEFAULT_BUILD_NUMBER, help='The build number for this specific bom.') self.add_argument( parser, 'bom_path', defaults, None, help='The path to the local BOM file copy to write out.') self.add_argument( parser, 'bom_dependencies_path', defaults, None, help='The path to YAML file specifying the BOM dependencies section' ' if overriding.') self.add_argument( parser, 'refresh_from_bom_path', defaults, None, help='If specified then use the existing bom_path as a prototype' ' to refresh. Use with --only_repositories to create a new BOM.' ' using only the new versions and build numbers for select repos' ' while keeping the existing versions and build numbers for' ' others.') self.add_argument( parser, 'refresh_from_bom_version', defaults, None, help='Similar to refresh_from_bom_path but using a version obtained.' ' from halyard.') self.add_argument( parser, 'git_fallback_branch', defaults, None, help='The branch to pull for the BOM if --git_branch isnt found.' ' This is intended only for speculative development where' ' some repositories are being modified and the remaing are' ' to come from a release branch.')
def init_argparser(self, parser, defaults): super(PublishBomCommandFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'halyard_bom_bucket', defaults, 'halconfig', help='The bucket manaing halyard BOMs and config profiles.') self.add_argument( parser, 'bom_alias', defaults, None, help='Also publish the BOM using this alias name.')
def init_argparser(self, parser, defaults): super(PublishHalyardCommandFactory, self).init_argparser( parser, defaults) GradleCommandFactory.add_bom_parser_args(parser, defaults) SpinnakerSourceCodeManager.add_parser_args(parser, defaults) GradleRunner.add_parser_args(parser, defaults) GitRunner.add_publishing_parser_args(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'build_number', defaults, DEFAULT_BUILD_NUMBER, help='Publishing halyard requires a rebuild. This is the build number' ' to use when rebuilding halyard.') self.add_argument( parser, 'halyard_version', defaults, None, help='The semantic version of the release to publish.') self.add_argument( parser, 'halyard_version_commits_url', defaults, None, help='URL to file containing version and git commit for successful' ' nightly builds. By default this will be' ' "{filename}" in the' ' --halyard_bucket_base_url.'.format( filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME)) self.add_argument( parser, 'halyard_docker_image_base', defaults, None, help='Base Docker image name for writing halyard builds.') self.add_argument( parser, 'halyard_bucket_base_url', defaults, None, help='Base Google Cloud Storage URL for writing halyard builds.') self.add_argument(parser, 'docs_repo_owner', defaults, None, help='Owner of the docs repo if one was' ' specified. The default is --github_owner.') self.add_argument( parser, 'skip_existing', defaults, False, type=bool, help='Skip builds if the desired version already exists on bintray.') self.add_argument( parser, 'delete_existing', defaults, None, type=bool, help='Delete pre-existing desired versions if from bintray.') self.add_argument( parser, 'gcb_project', defaults, None, help='The GCP project ID when using the GCP Container Builder.') self.add_argument( parser, 'gcb_service_account', defaults, None, help='Google Service Account when using the GCP Container Builder.') self.add_argument( parser, 'artifact_registry', defaults, None, help='Artifact registry to push the container images to.')
def init_argparser(self, parser, defaults): super(BuildGceComponentImagesFactory, self).init_argparser( parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'halyard_release_track', defaults, 'stable', choices=['nightly', 'stable'], help='Which halyard release track to use when installing images.') self.add_argument( parser, 'skip_existing', defaults, False, type=bool, help='Skip builds if the desired image already exists in GCE.') self.add_argument( parser, 'delete_existing', defaults, None, type=bool, help='Delete pre-existing desired images from GCE.') self.add_argument( parser, 'build_gce_service_account', defaults, None, help='Service account for building images.') self.add_argument( parser, 'build_gce_project', defaults, None, help='Project to build image in.') self.add_argument( parser, 'build_gce_zone', defaults, 'us-central1-f', help='Zone to build image in.') halyard_install_sh = 'dev/halyard_install_component.sh' self.add_argument( parser, 'install_image_script', defaults, halyard_install_sh, help='Script for installing images.') publish_image_sh = os.path.join( os.path.dirname(__file__), '..', '..', 'google', 'dev', 'publish_gce_release.sh') self.add_argument( parser, 'publish_gce_image_script', defaults, publish_image_sh, help='Script for publishing images to a project.') self.add_argument( parser, 'git_branch', defaults, None, help='Github branch to get install scripts from.' ' If none, then use the source repo branch that this script' ' is running from.') self.add_argument( parser, 'bintray_org', defaults, None, help='The bintray organization for the bintray_*_repositories.') self.add_argument( parser, 'bintray_debian_repository', defaults, None, help='Repository where built debians were placed.') self.add_argument( parser, 'halyard_bom_bucket', defaults, 'halconfig', help='The bucket manaing halyard BOMs and config profiles.')
def add_parser_args(parser, defaults): """Add standard parser arguments used by SourceCodeManager.""" if hasattr(parser, 'added_bom_scm'): return parser.added_bom_scm = True SpinnakerSourceCodeManager.add_parser_args(parser, defaults) HalRunner.add_parser_args(parser, defaults) add_parser_argument( parser, 'bom_path', defaults, None, help='Use the sources specified in the BOM path.') add_parser_argument( parser, 'bom_version', defaults, None, help='Use the sources specified in the BOM version.')
def __init__(self, factory, options, **kwargs): options.github_disable_upstream_push = True super(PublishBomCommand, self).__init__(factory, options, **kwargs) self.__hal_runner = HalRunner(options) logging.debug('Verifying halyard server is consistent') # Halyard is configured with fixed endpoints, however when we # pubish we want to be explicit about where we are publishing to. # There isnt a way to control this in halyard on a per-request basis # so make sure halyard was configured consistent with where we want # these BOMs to go. self.__hal_runner.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket)
def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'bom_version', 'github_owner', 'min_halyard_version' ]) options_copy = copy.copy(options) self.__scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket)
def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'spinnaker_release_alias', 'bom_version', 'changelog_gist_url', 'github_owner', 'min_halyard_version' ]) major, minor, _ = self.options.spinnaker_version.split('.') self.__branch = 'release-{major}.{minor}.x'.format( major=major, minor=minor) options_copy = copy.copy(options) self.__bom_scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property( 'spinnaker.config.input.bucket', options.halyard_bom_bucket) if options.only_repositories: self.__only_repositories = options.only_repositories.split(',') else: self.__only_repositories = [] options_copy.git_branch = self.__branch self.__branch_scm = BranchSourceCodeManager( options_copy, self.get_input_dir())
def init_argparser(self, parser, defaults): super(PublishHalyardCommandFactory, self).init_argparser( parser, defaults) GradleCommandFactory.add_bom_parser_args(parser, defaults) SpinnakerSourceCodeManager.add_parser_args(parser, defaults) GradleRunner.add_parser_args(parser, defaults) GitRunner.add_publishing_parser_args(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'build_number', defaults, DEFAULT_BUILD_NUMBER, help='Publishing halyard requires a rebuild. This is the build number' ' to use when rebuilding halyard.') self.add_argument( parser, 'halyard_version', defaults, None, help='The semantic version of the release to publish.') self.add_argument( parser, 'halyard_version_commits_url', defaults, None, help='URL to file containing version and git commit for successful' ' nightly builds. By default this will be' ' "{filename}" in the' ' --halyard_bucket_base_url.'.format( filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME)) self.add_argument( parser, 'halyard_docker_image_base', defaults, None, help='Base Docker image name for writing halyard builds.') self.add_argument( parser, 'halyard_bucket_base_url', defaults, None, help='Base Google Cloud Storage URL for writing halyard builds.') self.add_argument(parser, 'docs_repo_owner', defaults, None, help='Owner of the docs repo if one was' ' specified. The default is --github_owner.') self.add_argument( parser, 'skip_existing', defaults, False, type=bool, help='Skip builds if the desired version already exists on bintray.') self.add_argument( parser, 'delete_existing', defaults, None, type=bool, help='Delete pre-existing desired versions if from bintray.')
def init_argparser(self, parser, defaults): super(PublishSpinnakerFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) GitRunner.add_parser_args(parser, defaults) GitRunner.add_publishing_parser_args(parser, defaults) PublishChangelogFactory().init_argparser(parser, defaults) self.add_argument( parser, 'spinnaker_release_alias', defaults, None, help='The spinnaker version alias to publish as.') self.add_argument( parser, 'halyard_bom_bucket', defaults, 'halconfig', help='The bucket manaing halyard BOMs and config profiles.') self.add_argument( parser, 'bom_version', defaults, None, help='The existing bom version usef for this release.') self.add_argument( parser, 'min_halyard_version', defaults, None, help='The minimum halyard version required.')
def __init__(self, factory, options, **kwargs): options.github_disable_upstream_push = True super(PublishBomCommand, self).__init__(factory, options, **kwargs) self.__hal_runner = HalRunner(options) logging.debug('Verifying halyard server is consistent') # Halyard is configured with fixed endpoints, however when we # pubish we want to be explicit about where we are publishing to. # There isnt a way to control this in halyard on a per-request basis # so make sure halyard was configured consistent with where we want # these BOMs to go. self.__hal_runner.check_property( 'spinnaker.config.input.bucket', options.halyard_bom_bucket)
def init_argparser(self, parser, defaults): """Adds command-specific arguments.""" super(BuildChangelogFactory, self).init_argparser( parser, defaults) self.add_argument( parser, 'include_changelog_details', defaults, False, action='store_true', help='Include a "details" section with the full commit messages' ' in time sequence in the changelog.') HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'relative_to_bom_path', defaults, None, help='If specified then produce the changelog relative to the' ' commits found in the specified bom rather than the previous' ' repository tag.') self.add_argument( parser, 'relative_to_bom_version', defaults, None, help='If specified then produce the changelog relative to the' ' commits found in the specified bom rather than the previous' ' repository tag.')
def init_argparser(self, parser, defaults): super(BuildBomCommandFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) buildtool.container_commands.add_bom_parser_args(parser, defaults) buildtool.debian_commands.add_bom_parser_args(parser, defaults) self.add_argument( parser, 'publish_gce_image_project', defaults, None, help='Project to publish images to.') self.add_argument( parser, 'build_number', defaults, DEFAULT_BUILD_NUMBER, help='The build number for this specific bom.') self.add_argument( parser, 'bom_path', defaults, None, help='The path to the local BOM file copy to write out.') self.add_argument( parser, 'bom_dependencies_path', defaults, None, help='The path to YAML file specifying the BOM dependencies section' ' if overriding.') self.add_argument( parser, 'refresh_from_bom_path', defaults, None, help='If specified then use the existing bom_path as a prototype' ' to refresh. Use with --only_repositories to create a new BOM.' ' using only the new versions and build numbers for select repos' ' while keeping the existing versions and build numbers for' ' others.') self.add_argument( parser, 'refresh_from_bom_version', defaults, None, help='Similar to refresh_from_bom_path but using a version obtained.' ' from halyard.') self.add_argument( parser, 'git_fallback_branch', defaults, None, help='The branch to pull for the BOM if --git_branch isnt found.' ' This is intended only for speculative development where' ' some repositories are being modified and the remaing are' ' to come from a release branch.')
def __init__(self, factory, options, **kwargs): # Use own repository to avoid race conditions when commands are # running concurrently. if options.relative_to_bom_path and options.relative_to_bom_version: raise_and_log_error( ConfigError('Cannot specify both --relative_to_bom_path' ' and --relative_to_bom_version.')) options_copy = copy.copy(options) options_copy.github_disable_upstream_push = True if options.relative_to_bom_path: with open(options.relative_to_bom_path, 'r') as stream: self.__relative_bom = yaml.safe_load(stream.read()) elif options.relative_to_bom_version: self.__relative_bom = HalRunner(options).retrieve_bom_version( options.relative_to_bom_version) else: self.__relative_bom = None super(BuildChangelogCommand, self).__init__(factory, options_copy, **kwargs)
def load_bom(options): """Helper function for initializing the BOM if one was specified.""" bom_path = options.bom_path if hasattr(options, 'bom_path') else None bom_version = (options.bom_version if hasattr(options, 'bom_version') else None) have_bom_path = 1 if bom_path else 0 have_bom_version = 1 if bom_version else 0 if have_bom_path + have_bom_version != 1: raise_and_log_error( ConfigError('Expected exactly one of: "bom_path", or "bom_version"')) if bom_path: check_path_exists(bom_path, why='options.bom_path') return BomSourceCodeManager.bom_from_path(bom_path) if bom_version: logging.debug('Retrieving bom version %s', bom_version) return HalRunner(options).retrieve_bom_version(bom_version) raise_and_log_error(UnexpectedError('Not reachable', cause='NotReachable'))
class PublishSpinnakerCommand(CommandProcessor): """"Implements the publish_spinnaker command.""" # pylint: disable=too-few-public-methods def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'bom_version', 'changelog_gist_url', 'github_owner', 'min_halyard_version' ]) options_copy = copy.copy(options) self.__scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket) self.__only_repositories = options.only_repositories.split(',') def push_branches_and_tags(self, bom): """Update the release branches and tags in each of the BOM repositires.""" major, minor, _ = self.options.spinnaker_version.split('.') branch = 'release-{major}.{minor}.x'.format(major=major, minor=minor) logging.info('Tagging each of the BOM service repos') # Run in two passes so we dont push anything if we hit a problem # in the tagging pass. Since we are spread against multiple repositiories, # we cannot do this atomically. The two passes gives us more protection # from a partial push due to errors in a repo. for which in ['tag', 'push']: for name, spec in bom['services'].items(): if name in ['monitoring-third-party', 'defaultArtifact']: # Ignore this, it is redundant to monitoring-daemon continue if name == 'monitoring-daemon': name = 'spinnaker-monitoring' if self.__only_repositories and name not in self.__only_repositories: logging.debug('Skipping %s because of --only_repositories', name) continue if spec is None: logging.warning('HAVE bom.services.%s = None', name) continue repository = self.__scm.make_repository_spec(name) self.__scm.ensure_local_repository(repository) if which == 'tag': self.__branch_and_tag_repository(repository, branch) else: self.__push_branch_and_tag_repository(repository, branch) def __branch_and_tag_repository(self, repository, branch): """Create a branch and/or verison tag in the repository, if needed.""" version = self.__scm.determine_repository_version(repository) tag = 'version-' + version self.__git.check_run(repository.git_dir, 'tag ' + tag) def __push_branch_and_tag_repository(self, repository, branch): """Push the branch and verison tag to the origin.""" source_info = self.__scm.lookup_source_info(repository) tag = 'version-' + source_info.summary.version self.__git.push_branch_to_origin(repository.git_dir, branch) self.__git.push_tag_to_origin(repository.git_dir, tag) def _do_command(self): """Implements CommandProcessor interface.""" options = self.options spinnaker_version = options.spinnaker_version publish_changelog_command = PublishChangelogFactory().make_command( options) changelog_gist_url = options.changelog_gist_url # Make sure changelog exists already. # If it does not then fail. try: logging.debug('Verifying changelog ready at %s', changelog_gist_url) urllib2.urlopen(changelog_gist_url) except urllib2.HTTPError as error: logging.error(error.message) raise_and_log_error( ConfigError( 'Changelog gist "{url}" must exist before publising a release.' .format(url=changelog_gist_url), cause='ChangelogMissing')) bom = self.__hal.retrieve_bom_version(self.options.bom_version) bom['version'] = spinnaker_version bom_path = os.path.join(self.get_output_dir(), spinnaker_version + '.yml') write_to_path(yaml.dump(bom, default_flow_style=False), bom_path) self.push_branches_and_tags(bom) self.__hal.publish_spinnaker_release(spinnaker_version, options.spinnaker_release_alias, changelog_gist_url, options.min_halyard_version) logging.info('Publishing changelog') publish_changelog_command()
class PublishHalyardCommand(CommandProcessor): """Publish halyard version to the public repository.""" def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' options_copy.github_hostname = 'github.com' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) check_options_set(options, ['halyard_version']) match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', options.halyard_version) if match is None: raise_and_log_error( ConfigError('--halyard_version={version} is not X.Y.Z-<buildnum>' .format(version=options.halyard_version))) self.__stable_version = '{major}.{minor}.{patch}'.format( major=match.group(1), minor=match.group(2), patch=match.group(3)) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str def determine_halyard_commit(self): """Determine the commit_id that we want to publish.""" options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = '{base}/{filename}'.format( base=options.halyard_bucket_base_url, filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME) if os.path.exists(versions_url): logging.debug('Loading halyard version info from file %s', versions_url) with open(versions_url, 'r') as stream: version_data = stream.read() else: logging.debug('Loading halyard version info from bucket %s', versions_url) version_data = check_subprocess( 'gsutil cat {url}'.format(url=versions_url)) commit = yaml.safe_load(version_data).get(options.halyard_version) if commit is None: raise_and_log_error( ConfigError('Unknown halyard version "{version}" in "{url}"'.format( version=options.halyard_version, url=versions_url))) return commit def _prepare_repository(self): """Prepare a local repository to build for release. Were rebuilding it only to have nebula give a new distribution tag. However we will also use the repository to tag and branch the release into github so want to at least clone the repo regardless. """ logging.debug('Preparing repository for publishing a halyard release.') commit = self.determine_halyard_commit() repository = self.__scm.make_repository_spec( SPINNAKER_HALYARD_REPOSITORY_NAME, commit_id=commit) git_dir = repository.git_dir if os.path.exists(git_dir): logging.info('Deleting existing %s to build fresh.', git_dir) shutil.rmtree(git_dir) git = self.__scm.git git.clone_repository_to_path(repository, commit=commit) return repository def _promote_halyard(self, repository): """Promote an existing build to become the halyard stable version.""" options = self.options logfile = self.get_logfile_path('promote-all') env = dict(os.environ) env.update({ 'PUBLISH_HALYARD_BUCKET_BASE_URL': options.halyard_bucket_base_url, 'PUBLISH_HALYARD_DOCKER_IMAGE_BASE': options.halyard_docker_image_base }) check_subprocesses_to_logfile( 'Promote Halyard', logfile, ['gcloud docker -a', # if repo is private it needs authenticated './release/promote-all.sh {candidate} {stable}'.format( candidate=options.halyard_version, stable=self.__stable_version), './release/promote-all.sh {candidate} stable'.format( candidate=options.halyard_version)], env=env, cwd=repository.git_dir) def _build_release(self, repository): """Rebuild the actual release debian package. We dont necessarily need to rebuild here. We just need to push as debian to the "-stable". However there isnt an easy way to do this. Note that this is not the promoted version. For safety[*] and simplicity we'll promote the candidate whose version was used to build this. Ideally this function can go away. [*] Safety because the candidate was tested whereas this build was not. """ # Ideally we would just modify the existing bintray version to add # *-stable to the distributions, however it does not appear possible # to patch the debian attributes of a bintray version, only the # version metadata. Therefore, we'll rebuild it. # Alternatively we could download the existing and push a new one, # however I dont see how to get at the existing debian metadata and # dont want to ommit something git_dir = repository.git_dir summary = self.__scm.git.collect_repository_summary(git_dir) args = self.__gradle.get_common_args() args.extend(self.__gradle.get_debian_args( 'trusty-stable,xenial-stable,bionic-stable')) build_number = self.options.build_number self.__gradle.check_run( args, self, repository, 'candidate', 'build-release', version=self.__release_version, build_number=build_number, gradle_dir=git_dir) info_path = os.path.join(self.get_output_dir(), 'halyard_info.yml') logging.debug('Writing build information to %s', info_path) write_to_path(summary.to_yaml(), info_path) def write_target_docs(self, source_repository, target_repository): source_path = os.path.join(source_repository.git_dir, self.__halyard_repo_md_path) target_rel_path = os.path.join('reference', 'halyard', 'commands.md') target_path = os.path.join(target_repository.git_dir, target_rel_path) now = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') logging.debug('Writing documentation into %s', target_path) header = textwrap.dedent( """\ --- layout: single title: "Commands" sidebar: nav: reference --- Published: {now} """.format(now=now)) with open(source_path, 'r') as source: body = source.read() with open(target_path, 'w') as stream: stream.write(header) stream.write(body) return target_rel_path def push_docs(self, repository): base_branch = 'master' target_repository = self.__scm.make_repository_spec( SPINNAKER_GITHUB_IO_REPOSITORY_NAME) self.__scm.ensure_git_path(target_repository) target_rel_path = self.write_target_docs(repository, target_repository) if self.options.git_allow_publish_master_branch: head_branch = 'master' branch_flag = '' else: head_branch = self.__release_version + '-haldocs' branch_flag = '-b' logging.debug('Commiting changes into local repository "%s" branch=%s', target_repository.git_dir, head_branch) git_dir = target_repository.git_dir message = 'docs(halyard): ' + self.__release_version local_git_commands = [ # These commands are accomodating to a branch already existing # because the branch is on the version, not build. A rejected # build for some reason that is re-tried will have the same version # so the branch may already exist from the earlier attempt. 'checkout ' + base_branch, 'checkout {flag} {branch}'.format( flag=branch_flag, branch=head_branch), 'add ' + target_rel_path, ] logging.debug('Commiting changes into local repository "%s" branch=%s', target_repository.git_dir, head_branch) git = self.__scm.git git.check_run_sequence(git_dir, local_git_commands) git.check_commit_or_no_changes( git_dir, '-m "{msg}" {path}'.format(msg=message, path=target_rel_path)) logging.info('Pushing halyard docs to %s branch="%s"', target_repository.origin, head_branch) git.push_branch_to_origin( target_repository.git_dir, branch=head_branch, force=True) def _do_command(self): """Implements CommandProcessor interface.""" repository = self._prepare_repository() self._build_release(repository) self._promote_halyard(repository) build_halyard_docs(self, repository) self.push_docs(repository) self.push_tag_and_branch(repository) self.__hal.publish_halyard_release(self.__release_version) def push_tag_and_branch(self, repository): """Pushes a stable branch and git version tag to the origin repository.""" git_dir = repository.git_dir git = self.__scm.git release_url = git.determine_push_url(repository) logging.info('Pushing branch=%s and tag=%s to %s', self.__release_branch, self.__release_tag, release_url) existing_commit = git.query_commit_at_tag(git_dir, self.__release_tag) if existing_commit: want_commit = git.query_local_repository_commit_id(git_dir) if want_commit == existing_commit: logging.debug('Already have "%s" at %s', self.__release_tag, want_commit) return git.check_run_sequence( git_dir, [ 'checkout -b ' + self.__release_branch, 'remote add release ' + release_url, 'push release ' + self.__release_branch, 'tag ' + self.__release_tag, 'push release ' + self.__release_tag ])
def init_argparser(self, parser, defaults): super(BuildGceComponentImagesFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, "halyard_release_track", defaults, "stable", choices=["nightly", "stable"], help="Which halyard release track to use when installing images.", ) self.add_argument( parser, "skip_existing", defaults, False, type=bool, help="Skip builds if the desired image already exists in GCE.", ) self.add_argument( parser, "delete_existing", defaults, None, type=bool, help="Delete pre-existing desired images from GCE.", ) self.add_argument( parser, "build_gce_service_account", defaults, None, help="Service account for building images.", ) self.add_argument( parser, "build_gce_project", defaults, None, help="Project to build image in.", ) self.add_argument( parser, "build_gce_zone", defaults, "us-central1-f", help="Zone to build image in.", ) halyard_install_sh = "dev/halyard_install_component.sh" self.add_argument( parser, "install_image_script", defaults, halyard_install_sh, help="Script for installing images.", ) publish_image_sh = os.path.join( os.path.dirname(__file__), "..", "..", "google", "dev", "publish_gce_release.sh", ) self.add_argument( parser, "publish_gce_image_script", defaults, publish_image_sh, help="Script for publishing images to a project.", ) self.add_argument( parser, "git_branch", defaults, None, help="Github branch to get install scripts from." " If none, then use the source repo branch that this script" " is running from.", ) self.add_argument( parser, "bintray_org", defaults, None, help="The bintray organization for the bintray_*_repositories.", ) self.add_argument( parser, "bintray_debian_repository", defaults, None, help="Repository where built debians were placed.", ) self.add_argument( parser, "halyard_bom_bucket", defaults, "halconfig", help="The bucket manaing halyard BOMs and config profiles.", )
class PublishBomCommand(RepositoryCommandProcessor): """Implements publish_bom""" def __init__(self, factory, options, **kwargs): options.github_disable_upstream_push = True super(PublishBomCommand, self).__init__(factory, options, **kwargs) self.__hal_runner = HalRunner(options) logging.debug('Verifying halyard server is consistent') # Halyard is configured with fixed endpoints, however when we # pubish we want to be explicit about where we are publishing to. # There isnt a way to control this in halyard on a per-request basis # so make sure halyard was configured consistent with where we want # these BOMs to go. self.__hal_runner.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket) def _do_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" self.source_code_manager.ensure_local_repository(repository) self.__collect_halconfig_files(repository) def _do_postprocess(self, _): """Implements RepositoryCommandProcessor interface.""" options = self.options bom_path = _determine_bom_path(self) self.__hal_runner.publish_bom_path(bom_path) self.__publish_configs(bom_path) if options.bom_alias: alias = options.bom_alias logging.info('Publishing bom alias %s = %s', alias, os.path.basename(bom_path)) with open(bom_path, 'r') as stream: bom = yaml.load(stream) alias_path = os.path.join(os.path.dirname(bom_path), alias + '.yml') with open(alias_path, 'w') as stream: bom['version'] = options.bom_alias yaml.dump(bom, stream, default_flow_style=False) self.__hal_runner.publish_bom_path(alias_path) def __publish_configs(self, bom_path): """Publish each of the halconfigs for the bom at the given path.""" def publish_repo_config(repository): """Helper function to publish individual repository.""" name = self.scm.repository_name_to_service_name(repository.name) config_dir = os.path.join(self.get_output_dir(), 'halconfig', name) if not os.path.exists(config_dir): logging.warning('No profiles for %s', name) return logging.debug('Publishing profiles for %s', name) for profile in os.listdir(config_dir): profile_path = os.path.join(config_dir, profile) self.__hal_runner.publish_profile(name, profile_path, bom_path) logging.info('Publishing halyard configs...') self.source_code_manager.foreach_source_repository( self.source_repositories, publish_repo_config) def __collect_halconfig_files(self, repository): """Gets the component config files and writes them into the output_dir.""" name = repository.name if (name not in SPINNAKER_BOM_REPOSITORY_NAMES or name in ['spinnaker', 'spin']): logging.debug('%s does not use config files -- skipping', name) return if name == 'spinnaker-monitoring': config_root = os.path.join(repository.git_dir, 'spinnaker-monitoring-daemon') else: config_root = repository.git_dir service_name = self.scm.repository_name_to_service_name( repository.name) target_dir = os.path.join(self.get_output_dir(), 'halconfig', service_name) ensure_dir_exists(target_dir) config_path = os.path.join(config_root, 'halconfig') logging.info('Copying configs from %s...', config_path) for profile in os.listdir(config_path): profile_path = os.path.join(config_path, profile) if os.path.isfile(profile_path): shutil.copyfile(profile_path, os.path.join(target_dir, profile)) logging.debug('Copied profile to %s', profile_path) elif not os.path.isdir(profile_path): logging.warning('%s is neither file nor directory -- ignoring', profile_path) continue else: tar_path = os.path.join( target_dir, '{profile}.tar.gz'.format(profile=profile)) file_list = ' '.join(os.listdir(profile_path)) # NOTE: For historic reasons this is not actually compressed # even though the tar_path says ".tar.gz" check_subprocess( 'tar cf {path} -C {profile} {file_list}'.format( path=tar_path, profile=profile_path, file_list=file_list)) logging.debug('Copied profile to %s', tar_path)
class PublishHalyardCommand(CommandProcessor): """Publish halyard version to the public repository.""" def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' options_copy.github_hostname = 'github.com' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) check_options_set(options, ['halyard_version']) match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', options.halyard_version) if match is None: raise_and_log_error( ConfigError( '--halyard_version={version} is not X.Y.Z-<buildnum>'. format(version=options.halyard_version))) self.__stable_version = '{major}.{minor}.{patch}'.format( major=match.group(1), minor=match.group(2), patch=match.group(3)) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str def determine_halyard_commit(self): """Determine the commit_id that we want to publish.""" options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = '{base}/{filename}'.format( base=options.halyard_bucket_base_url, filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME) if os.path.exists(versions_url): logging.debug('Loading halyard version info from file %s', versions_url) with open(versions_url, 'r') as stream: version_data = stream.read() else: logging.debug('Loading halyard version info from bucket %s', versions_url) gsutil_output = check_subprocess( 'gsutil cat {url}'.format(url=versions_url), stderr=subprocess.PIPE) # The latest version of gsutil prints a bunch of python warnings to stdout # (see b/152449160). This file is a series of lines that look like... # 0.41.0-180209172926: 05f1e832ab438e5a980d1102e84cdb348a0ab055 # ...so we'll just throw out any lines that don't start with digits. valid_lines = [ line for line in gsutil_output.splitlines() if line[0].isdigit() ] version_data = "\n".join(valid_lines) commit = yaml.safe_load(version_data).get(options.halyard_version) if commit is None: raise_and_log_error( ConfigError( 'Unknown halyard version "{version}" in "{url}"'.format( version=options.halyard_version, url=versions_url))) return commit def _prepare_repository(self): """Prepare a local repository to build for release. Were rebuilding it only to have nebula give a new distribution tag. However we will also use the repository to tag and branch the release into github so want to at least clone the repo regardless. """ logging.debug('Preparing repository for publishing a halyard release.') commit = self.determine_halyard_commit() repository = self.__scm.make_repository_spec( SPINNAKER_HALYARD_REPOSITORY_NAME, commit_id=commit) git_dir = repository.git_dir if os.path.exists(git_dir): logging.info('Deleting existing %s to build fresh.', git_dir) shutil.rmtree(git_dir) git = self.__scm.git git.clone_repository_to_path(repository, commit=commit) return repository def _promote_halyard(self, repository): """Promote an existing build to become the halyard stable version.""" options = self.options logfile = self.get_logfile_path('promote-all') env = dict(os.environ) env.update({ 'PUBLISH_HALYARD_ARTIFACT_DOCKER_IMAGE_SRC_BASE': options.halyard_artifact_registry_image_base, 'PUBLISH_HALYARD_BUCKET_BASE_URL': options.halyard_bucket_base_url, 'PUBLISH_HALYARD_DOCKER_IMAGE_BASE': options.halyard_docker_image_base }) check_subprocesses_to_logfile( 'Promote Halyard', logfile, [ 'gcloud docker -a', # if repo is private it needs authenticated './release/promote-all.sh {candidate} {stable}'.format( candidate=options.halyard_version, stable=self.__stable_version), './release/promote-all.sh {candidate} stable'.format( candidate=options.halyard_version) ], env=env, cwd=repository.git_dir) def _build_release(self, repository): """Rebuild the actual release debian package. We dont necessarily need to rebuild here. We just need to push as debian to the "-stable". However there isnt an easy way to do this. Note that this is not the promoted version. For safety[*] and simplicity we'll promote the candidate whose version was used to build this. Ideally this function can go away. [*] Safety because the candidate was tested whereas this build was not. """ # Ideally we would just modify the existing bintray version to add # *-stable to the distributions, however it does not appear possible # to patch the debian attributes of a bintray version, only the # version metadata. Therefore, we'll rebuild it. # Alternatively we could download the existing and push a new one, # however I dont see how to get at the existing debian metadata and # dont want to ommit something options = self.options git_dir = repository.git_dir summary = self.__scm.git.collect_repository_summary(git_dir) config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'cloudbuild', 'debs.yml') substitutions = { '_BRANCH_NAME': options.git_branch, '_BRANCH_TAG': re.sub(r'\W', '_', options.git_branch), '_BUILD_NUMBER': options.build_number, '_IMAGE_NAME': 'halyard', '_VERSION': summary.version } # Convert it to the format expected by gcloud: "_FOO=bar,_BAZ=qux" substitutions_arg = ','.join('='.join((str(k), str(v))) for k, v in substitutions.items()) command = ('gcloud builds submit ' ' --account={account} --project={project}' ' --substitutions={substitutions_arg},' ' --config={config} .'.format( account=options.gcb_service_account, project=options.gcb_project, substitutions_arg=substitutions_arg, config=config_path)) logfile = self.get_logfile_path('build-deb') check_subprocesses_to_logfile('building deb with published version', logfile, [command], cwd=git_dir) def write_target_docs(self, source_repository, target_repository): source_path = os.path.join(source_repository.git_dir, self.__halyard_repo_md_path) target_rel_path = os.path.join('reference', 'halyard', 'commands.md') target_path = os.path.join(target_repository.git_dir, target_rel_path) now = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') logging.debug('Writing documentation into %s', target_path) header = textwrap.dedent("""\ --- layout: single title: "Commands" sidebar: nav: reference --- Published: {now} """.format(now=now)) with open(source_path, 'r') as source: body = source.read() with open(target_path, 'w') as stream: stream.write(header) stream.write(body) return target_rel_path def push_docs(self, repository): base_branch = 'master' target_repository = self.__scm.make_repository_spec( SPINNAKER_GITHUB_IO_REPOSITORY_NAME) self.__scm.ensure_git_path(target_repository) target_rel_path = self.write_target_docs(repository, target_repository) if self.options.git_allow_publish_master_branch: head_branch = 'master' branch_flag = '' else: head_branch = self.__release_version + '-haldocs' branch_flag = '-b' logging.debug('Commiting changes into local repository "%s" branch=%s', target_repository.git_dir, head_branch) git_dir = target_repository.git_dir message = 'docs(halyard): ' + self.__release_version local_git_commands = [ # These commands are accomodating to a branch already existing # because the branch is on the version, not build. A rejected # build for some reason that is re-tried will have the same version # so the branch may already exist from the earlier attempt. 'checkout ' + base_branch, 'checkout {flag} {branch}'.format(flag=branch_flag, branch=head_branch), 'add ' + target_rel_path, ] logging.debug('Commiting changes into local repository "%s" branch=%s', target_repository.git_dir, head_branch) git = self.__scm.git git.check_run_sequence(git_dir, local_git_commands) git.check_commit_or_no_changes( git_dir, '-m "{msg}" {path}'.format(msg=message, path=target_rel_path)) logging.info('Pushing halyard docs to %s branch="%s"', target_repository.origin, head_branch) git.push_branch_to_origin(target_repository.git_dir, branch=head_branch, force=True) def _do_command(self): """Implements CommandProcessor interface.""" repository = self._prepare_repository() self._build_release(repository) self._promote_halyard(repository) build_halyard_docs(self, repository) self.push_docs(repository) self.push_tag_and_branch(repository) self.__hal.publish_halyard_release(self.__release_version) def push_tag_and_branch(self, repository): """Pushes a stable branch and git version tag to the origin repository.""" git_dir = repository.git_dir git = self.__scm.git release_url = git.determine_push_url(repository.origin) logging.info('Pushing branch=%s and tag=%s to %s', self.__release_branch, self.__release_tag, release_url) existing_commit = git.query_commit_at_tag(git_dir, self.__release_tag) if existing_commit: want_commit = git.query_local_repository_commit_id(git_dir) if want_commit == existing_commit: logging.debug('Already have "%s" at %s', self.__release_tag, want_commit) return git.check_run_sequence(git_dir, [ 'checkout -b ' + self.__release_branch, 'remote add release ' + release_url, 'push release ' + self.__release_branch, 'tag ' + self.__release_tag, 'push release ' + self.__release_tag ])
def init_argparser(self, parser, defaults): super(BuildBomCommandFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, "publish_gce_image_project", defaults, None, help="Project to publish images to.", ) self.add_argument( parser, "build_number", defaults, DEFAULT_BUILD_NUMBER, help="The build number for this specific bom.", ) self.add_argument( parser, "bom_path", defaults, None, help="The path to the local BOM file copy to write out.", ) self.add_argument( parser, "bom_dependencies_path", defaults, None, help="The path to YAML file specifying the BOM dependencies section" " if overriding.", ) self.add_argument( parser, "refresh_from_bom_path", defaults, None, help="If specified then use the existing bom_path as a prototype" " to refresh. Use with --only_repositories to create a new BOM." " using only the new versions and build numbers for select repos" " while keeping the existing versions and build numbers for" " others.", ) self.add_argument( parser, "refresh_from_bom_version", defaults, None, help="Similar to refresh_from_bom_path but using a version obtained." " from halyard.", ) self.add_argument( parser, "git_fallback_branch", defaults, None, help="The branch to pull for the BOM if --git_branch isnt found." " This is intended only for speculative development where" " some repositories are being modified and the remaing are" " to come from a release branch.", )
class PublishSpinnakerCommand(CommandProcessor): """"Implements the publish_spinnaker command.""" # pylint: disable=too-few-public-methods def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'spinnaker_release_alias', 'bom_version', 'changelog_gist_url', 'github_owner', 'min_halyard_version' ]) major, minor, _ = self.options.spinnaker_version.split('.') self.__branch = 'release-{major}.{minor}.x'.format(major=major, minor=minor) options_copy = copy.copy(options) self.__bom_scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket) if options.only_repositories: self.__only_repositories = options.only_repositories.split(',') else: self.__only_repositories = [] options_copy.git_branch = self.__branch self.__branch_scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) def push_branches_and_tags(self, bom): """Update the release branches and tags in each of the BOM repositires.""" logging.info('Tagging each of the BOM service repos') bom_scm = self.__bom_scm branch_scm = self.__branch_scm # Run in two passes so we dont push anything if we hit a problem # in the tagging pass. Since we are spread against multiple repositiories, # we cannot do this atomically. The two passes gives us more protection # from a partial push due to errors in a repo. names_to_push = set([]) for which in ['tag', 'push']: for name, spec in bom['services'].items(): if name in ['monitoring-third-party', 'defaultArtifact']: # Ignore this, it is redundant to monitoring-daemon continue if name == 'monitoring-daemon': name = 'spinnaker-monitoring' if self.__only_repositories and name not in self.__only_repositories: logging.debug('Skipping %s because of --only_repositories', name) continue if spec is None: logging.warning('HAVE bom.services.%s = None', name) continue repository = bom_scm.make_repository_spec(name) bom_scm.ensure_local_repository(repository) version = bom_scm.determine_repository_version(repository) if which == 'tag': added = self.__branch_and_tag_repository( repository, self.__branch, version) if added: names_to_push.add(name) else: self.__push_branch_and_maybe_tag_repository( repository, self.__branch, version, name in names_to_push) additional_repositories = list(SPINNAKER_PROCESS_REPOSITORY_NAMES) for name in additional_repositories: if self.__only_repositories and name not in self.__only_repositories: logging.debug('Skipping %s because of --only_repositories', name) continue repository = branch_scm.make_repository_spec(name) branch_scm.ensure_local_repository(repository) git_summary = self.__git.collect_repository_summary( repository.git_dir) version = git_summary.version if self.__branch_and_tag_repository(repository, self.__branch, version): self.__push_branch_and_maybe_tag_repository( repository, self.__branch, version, True) def __already_have_tag(self, repository, tag): """Determine if we already have the tag in the repository.""" git_dir = repository.git_dir existing_commit = self.__git.query_commit_at_tag(git_dir, tag) if not existing_commit: return False want_commit = self.__git.query_local_repository_commit_id(git_dir) if want_commit == existing_commit: logging.debug('Already have "%s" at %s', tag, want_commit) return True raise_and_log_error( ConfigError( '"{tag}" already exists in "{repo}" at commit {have}, not {want}' .format(tag=tag, repo=git_dir, have=existing_commit, want=want_commit))) def __branch_and_tag_repository(self, repository, branch, version): """Create a branch and/or verison tag in the repository, if needed.""" tag = 'version-' + version if self.__already_have_tag(repository, tag): return False self.__git.check_run(repository.git_dir, 'tag ' + tag) return True def __push_branch_and_maybe_tag_repository(self, repository, branch, version, also_tag): """Push the branch and verison tag to the origin.""" tag = 'version-' + version self.__git.push_branch_to_origin(repository.git_dir, branch) if also_tag: self.__git.push_tag_to_origin(repository.git_dir, tag) else: logging.info('%s was already tagged with "%s" -- skip', repository.git_dir, tag) def _do_command(self): """Implements CommandProcessor interface.""" options = self.options spinnaker_version = options.spinnaker_version options_copy = copy.copy(options) options_copy.git_branch = 'master' # push to master in spinnaker.github.io publish_changelog_command = PublishChangelogFactory().make_command( options_copy) changelog_gist_url = options.changelog_gist_url # Make sure changelog exists already. # If it does not then fail. try: logging.debug('Verifying changelog ready at %s', changelog_gist_url) urlopen(changelog_gist_url) except HTTPError: logging.error(exception_to_message) raise_and_log_error( ConfigError( 'Changelog gist "{url}" must exist before publising a release.' .format(url=changelog_gist_url), cause='ChangelogMissing')) bom = self.__hal.retrieve_bom_version(self.options.bom_version) bom['version'] = spinnaker_version bom_path = os.path.join(self.get_output_dir(), spinnaker_version + '.yml') write_to_path(yaml.safe_dump(bom, default_flow_style=False), bom_path) self.__hal.publish_bom_path(bom_path) self.push_branches_and_tags(bom) self.__hal.publish_spinnaker_release(spinnaker_version, options.spinnaker_release_alias, changelog_gist_url, options.min_halyard_version) logging.info('Publishing changelog') publish_changelog_command()
def init_argparser(self, parser, defaults): super(BuildGceComponentImagesFactory, self).init_argparser(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, 'halyard_release_track', defaults, 'stable', choices=['nightly', 'stable'], help='Which halyard release track to use when installing images.') self.add_argument( parser, 'skip_existing', defaults, False, type=bool, help='Skip builds if the desired image already exists in GCE.') self.add_argument(parser, 'delete_existing', defaults, None, type=bool, help='Delete pre-existing desired images from GCE.') self.add_argument(parser, 'build_gce_service_account', defaults, None, help='Service account for building images.') self.add_argument(parser, 'build_gce_project', defaults, None, help='Project to build image in.') self.add_argument(parser, 'build_gce_zone', defaults, 'us-central1-f', help='Zone to build image in.') halyard_install_sh = 'dev/halyard_install_component.sh' self.add_argument(parser, 'install_image_script', defaults, halyard_install_sh, help='Script for installing images.') publish_image_sh = os.path.join(os.path.dirname(__file__), '..', '..', 'google', 'dev', 'publish_gce_release.sh') self.add_argument(parser, 'publish_gce_image_script', defaults, publish_image_sh, help='Script for publishing images to a project.') self.add_argument( parser, 'git_branch', defaults, None, help='Github branch to get install scripts from.' ' If none, then use the source repo branch that this script' ' is running from.') self.add_argument( parser, 'bintray_org', defaults, None, help='The bintray organization for the bintray_*_repositories.') self.add_argument(parser, 'bintray_debian_repository', defaults, None, help='Repository where built debians were placed.') self.add_argument( parser, 'halyard_bom_bucket', defaults, 'halconfig', help='The bucket manaing halyard BOMs and config profiles.')
class PublishSpinnakerCommand(CommandProcessor): """"Implements the publish_spinnaker command.""" # pylint: disable=too-few-public-methods def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'bom_version', 'github_owner', 'min_halyard_version' ]) options_copy = copy.copy(options) self.__scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property('spinnaker.config.input.bucket', options.halyard_bom_bucket) def push_branches_and_tags(self, bom): """Update the release branches and tags in each of the BOM repositires.""" major, minor, _ = self.options.spinnaker_version.split('.') branch = 'release-{major}.{minor}.x'.format(major=major, minor=minor) logging.info('Tagging each of the BOM service repos') # Run in two passes so we dont push anything if we hit a problem # in the tagging pass. Since we are spread against multiple repositiories, # we cannot do this atomically. The two passes gives us more protection # from a partial push due to errors in a repo. for which in ['tag', 'push']: for name, spec in bom['services'].items(): if name in ['monitoring-third-party', 'defaultArtifact']: # Ignore this, it is redundant to monitoring-daemon continue if spec is None: logging.warning('HAVE bom.services.%s = None', name) continue if name == 'monitoring-daemon': name = 'spinnaker-monitoring' repository = self.__scm.make_repository_spec(name) self.__scm.ensure_local_repository(repository) if which == 'tag': self.__branch_and_tag_repository(repository, branch) else: self.__push_branch_and_tag_repository(repository, branch) def __branch_and_tag_repository(self, repository, branch): """Create a branch and/or verison tag in the repository, if needed.""" source_info = self.__scm.lookup_source_info(repository) tag = 'version-' + source_info.summary.version self.__git.check_run(repository.git_dir, 'tag ' + tag) def __push_branch_and_tag_repository(self, repository, branch): """Push the branch and verison tag to the origin.""" source_info = self.__scm.lookup_source_info(repository) tag = 'version-' + source_info.summary.version self.__git.push_branch_to_origin(repository.git_dir, branch) self.__git.push_tag_to_origin(repository.git_dir, tag) def _do_command(self): """Implements CommandProcessor interface.""" options = self.options spinnaker_version = options.spinnaker_version bom = self.__hal.retrieve_bom_version(self.options.bom_version) bom['version'] = spinnaker_version self.push_branches_and_tags(bom) bom_path = os.path.join(self.get_output_dir(), spinnaker_version + '.yml') changelog_base_url = 'https://www.spinnaker.io/%s' % options.github_owner changelog_filename = '%s-changelog' % spinnaker_version.replace( '.', '-') changelog_uri = '%s/community/releases/versions/%s' % ( changelog_base_url, changelog_filename) write_to_path(yaml.dump(bom, default_flow_style=False), bom_path) self.__hal.publish_spinnaker_release(spinnaker_version, options.spinnaker_release_alias, changelog_uri, options.min_halyard_version)
class PublishSpinnakerCommand(CommandProcessor): """"Implements the publish_spinnaker command.""" # pylint: disable=too-few-public-methods def __init__(self, factory, options, **kwargs): super(PublishSpinnakerCommand, self).__init__(factory, options, **kwargs) check_options_set(options, [ 'spinnaker_version', 'spinnaker_release_alias', 'bom_version', 'changelog_gist_url', 'github_owner', 'min_halyard_version' ]) major, minor, _ = self.options.spinnaker_version.split('.') self.__branch = 'release-{major}.{minor}.x'.format( major=major, minor=minor) options_copy = copy.copy(options) self.__bom_scm = BomSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options) self.__git = GitRunner(options) self.__hal.check_property( 'spinnaker.config.input.bucket', options.halyard_bom_bucket) if options.only_repositories: self.__only_repositories = options.only_repositories.split(',') else: self.__only_repositories = [] options_copy.git_branch = self.__branch self.__branch_scm = BranchSourceCodeManager( options_copy, self.get_input_dir()) def push_branches_and_tags(self, bom): """Update the release branches and tags in each of the BOM repositires.""" logging.info('Tagging each of the BOM service repos') bom_scm = self.__bom_scm branch_scm = self.__branch_scm # Run in two passes so we dont push anything if we hit a problem # in the tagging pass. Since we are spread against multiple repositiories, # we cannot do this atomically. The two passes gives us more protection # from a partial push due to errors in a repo. names_to_push = set([]) for which in ['tag', 'push']: for name, spec in bom['services'].items(): if name in ['monitoring-third-party', 'defaultArtifact']: # Ignore this, it is redundant to monitoring-daemon continue if name == 'monitoring-daemon': name = 'spinnaker-monitoring' if self.__only_repositories and name not in self.__only_repositories: logging.debug('Skipping %s because of --only_repositories', name) continue if spec is None: logging.warning('HAVE bom.services.%s = None', name) continue repository = bom_scm.make_repository_spec(name) bom_scm.ensure_local_repository(repository) version = bom_scm.determine_repository_version(repository) if which == 'tag': added = self.__branch_and_tag_repository( repository, self.__branch, version) if added: names_to_push.add(name) else: self.__push_branch_and_maybe_tag_repository( repository, self.__branch, version, name in names_to_push) additional_repositories = list(SPINNAKER_PROCESS_REPOSITORY_NAMES) for name in additional_repositories: if self.__only_repositories and name not in self.__only_repositories: logging.debug('Skipping %s because of --only_repositories', name) continue repository = branch_scm.make_repository_spec(name) branch_scm.ensure_local_repository(repository) git_summary = self.__git.collect_repository_summary(repository.git_dir) version = git_summary.version if self.__branch_and_tag_repository( repository, self.__branch, version): self.__push_branch_and_maybe_tag_repository( repository, self.__branch, version, True) def __already_have_tag(self, repository, tag): """Determine if we already have the tag in the repository.""" git_dir = repository.git_dir existing_commit = self.__git.query_commit_at_tag(git_dir, tag) if not existing_commit: return False want_commit = self.__git.query_local_repository_commit_id(git_dir) if want_commit == existing_commit: logging.debug('Already have "%s" at %s', tag, want_commit) return True raise_and_log_error( ConfigError( '"{tag}" already exists in "{repo}" at commit {have}, not {want}' .format(tag=tag, repo=git_dir, have=existing_commit, want=want_commit))) return False # not reached def __branch_and_tag_repository(self, repository, branch, version): """Create a branch and/or version tag in the repository, if needed.""" tag = 'version-' + version if self.__already_have_tag(repository, tag): return False self.__git.check_run(repository.git_dir, 'tag ' + tag) return True def __push_branch_and_maybe_tag_repository(self, repository, branch, version, also_tag): """Push the branch and version tag to the origin.""" tag = 'version-' + version self.__git.push_branch_to_origin(repository.git_dir, branch) if also_tag: self.__git.push_tag_to_origin(repository.git_dir, tag) else: logging.info('%s was already tagged with "%s" -- skip', repository.git_dir, tag) def _do_command(self): """Implements CommandProcessor interface.""" options = self.options spinnaker_version = options.spinnaker_version options_copy = copy.copy(options) options_copy.git_branch = 'master' # push to master in spinnaker.github.io publish_changelog_command = PublishChangelogFactory().make_command( options_copy) changelog_gist_url = options.changelog_gist_url # Make sure changelog exists already. # If it does not then fail. try: logging.debug('Verifying changelog ready at %s', changelog_gist_url) urlopen(changelog_gist_url) except HTTPError: logging.error(exception_to_message) raise_and_log_error( ConfigError( 'Changelog gist "{url}" must exist before publising a release.' .format(url=changelog_gist_url), cause='ChangelogMissing')) bom = self.__hal.retrieve_bom_version(self.options.bom_version) bom['version'] = spinnaker_version bom_path = os.path.join(self.get_output_dir(), spinnaker_version + '.yml') write_to_path(yaml.safe_dump(bom, default_flow_style=False), bom_path) self.__hal.publish_bom_path(bom_path) self.push_branches_and_tags(bom) self.__hal.publish_spinnaker_release( spinnaker_version, options.spinnaker_release_alias, changelog_gist_url, options.min_halyard_version) logging.info('Publishing changelog') publish_changelog_command()
class PublishHalyardCommand(CommandProcessor): """Publish halyard version to the public repository.""" def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str def determine_commit(self, repository): """Determine the commit_id that we want to publish.""" if repository.name != 'halyard': raise_and_log_error( ConfigError('Unexpected repository "%s"' % repository.name)) options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = '{base}/{filename}'.format( base=options.halyard_bucket_base_url, filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME) if os.path.exists(versions_url): logging.debug('Loading halyard version info from file %s', versions_url) with open(versions_url, 'r') as stream: version_data = stream.read() else: logging.debug('Loading halyard version info from bucket %s', versions_url) version_data = check_subprocess( 'gsutil cat {url}'.format(url=versions_url)) commit = yaml.load(version_data).get(options.halyard_version) if commit is None: raise_and_log_error( ConfigError( 'Unknown halyard version "{version}" in "{url}"'.format( version=options.halyard_version, url=versions_url))) def _prepare_repository(self): """Prepare a local repository to build for release. Were rebuilding it only to have nebula give a new distribution tag. However we will also use the repository to tag and branch the release into github so want to at least clone the repo regardless. """ logging.debug('Preparing repository for publishing a halyard release.') repository = self.__scm.make_repository_spec( SPINNAKER_HALYARD_REPOSITORY_NAME) commit = self.determine_commit(repository) git_dir = repository.git_dir if os.path.exists(git_dir): logging.info('Deleting existing %s to build fresh.', git_dir) shutil.rmtree(git_dir) git = self.__scm.git git.clone_repository_to_path(repository, commit=commit) self.__scm.refresh_source_info(repository, self.options.build_number) return repository def _build_release(self, repository): """Rebuild the actual release. We dont necessarily need to rebuild here. We just need to push as debian to the "-stable". """ # Ideally we would just modify the existing bintray version to add # trusty-stable to the distributions, however it does not appear possible # to patch the debian attributes of a bintray version, only the # version metadata. Therefore, we'll rebuild it. # Alternatively we could download the existing and push a new one, # however I dont see how to get at the existing debian metadata and # dont want to ommit something git_dir = repository.git_dir summary = self.__scm.git.collect_repository_summary(git_dir) args = self.__gradle.get_common_args() args.extend( self.__gradle.get_debian_args('trusty-stable,xenial-stable')) build_number = self.options.build_number if not self.__gradle.consider_debian_on_bintray( repository, build_number=build_number): self.__gradle.check_run(args, self, repository, 'candidate', 'build-release', version=self.__release_version, build_number=build_number, gradle_dir=git_dir) info_path = os.path.join(self.get_output_dir(), 'halyard_info.yml') logging.debug('Writing build information to %s', info_path) write_to_path(summary.to_yaml(), info_path) def write_target_docs(self, source_repository, target_repository): source_path = os.path.join(source_repository.git_dir, self.__halyard_repo_md_path) target_rel_path = os.path.join('reference', 'halyard', 'commands.md') target_path = os.path.join(target_repository.git_dir, target_rel_path) now = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') logging.debug('Writing documentation into %s', target_path) header = textwrap.dedent("""\ --- layout: single title: "Commands" sidebar: nav: reference --- Published: {now} """.format(now=now)) with open(source_path, 'r') as source: body = source.read() with open(target_path, 'w') as stream: stream.write(header) stream.write(body) return target_rel_path def push_docs(self, repository): base_branch = 'master' target_repository = self.__scm.make_repository_spec( SPINNAKER_GITHUB_IO_REPOSITORY_NAME) self.__scm.ensure_git_path(target_repository) target_rel_path = self.write_target_docs(repository, target_repository) if self.options.git_allow_publish_master_branch: head_branch = 'master' branch_flag = '' else: head_branch = self.__release_version + '-haldocs' branch_flag = '-b' logging.debug('Commiting changes into local repository "%s" branch=%s', target_repository.git_dir, head_branch) git_dir = target_repository.git_dir message = 'docs(halyard): ' + self.__release_version local_git_commands = [ # These commands are accomodating to a branch already existing # because the branch is on the version, not build. A rejected # build for some reason that is re-tried will have the same version # so the branch may already exist from the earlier attempt. 'checkout ' + base_branch, 'checkout {flag} {branch}'.format(flag=branch_flag, branch=head_branch), 'add ' + target_rel_path, 'commit -m "{msg}" {path}'.format(msg=message, path=target_rel_path), ] logging.debug('Commiting changes into local repository "%s" branch=%s', target_repository.git_dir, head_branch) git = self.__scm.git git.check_run_sequence(git_dir, local_git_commands) logging.info('Pushing halyard docs to %s branch="%s"', target_repository.origin, head_branch) git.push_branch_to_origin(target_repository.git_dir, branch=head_branch) def _do_command(self): """Implements CommandProcessor interface.""" repository = self._prepare_repository() self._build_release(repository) build_halyard_docs(self, repository) self.push_docs(repository) self.push_tag_and_branch(repository) self.__hal.publish_halyard_release(self.__release_version) def push_tag_and_branch(self, repository): """Pushes a stable branch and git version tag to the origin repository.""" git_dir = repository.git_dir git = self.__scm.git release_url = repository.origin logging.info('Pushing branch=%s and tag=%s to %s', self.__release_tag, self.__release_branch, release_url) git.check_run_sequence(git_dir, [ 'checkout -b ' + self.__release_branch, 'remote add release ' + release_url, 'push release ' + self.__release_branch, 'tag ' + self.__release_tag, 'push release ' + self.__release_tag ])
class PublishBomCommand(RepositoryCommandProcessor): """Implements publish_bom""" def __init__(self, factory, options, **kwargs): options.github_disable_upstream_push = True super(PublishBomCommand, self).__init__(factory, options, **kwargs) self.__hal_runner = HalRunner(options) logging.debug('Verifying halyard server is consistent') # Halyard is configured with fixed endpoints, however when we # pubish we want to be explicit about where we are publishing to. # There isnt a way to control this in halyard on a per-request basis # so make sure halyard was configured consistent with where we want # these BOMs to go. self.__hal_runner.check_property( 'spinnaker.config.input.bucket', options.halyard_bom_bucket) def _do_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" self.source_code_manager.ensure_local_repository(repository) self.__collect_halconfig_files(repository) def _do_postprocess(self, _): """Implements RepositoryCommandProcessor interface.""" options = self.options bom_path = _determine_bom_path(self) self.__hal_runner.publish_bom_path(bom_path) self.__publish_configs(bom_path) if options.bom_alias: alias = options.bom_alias logging.info('Publishing bom alias %s = %s', alias, os.path.basename(bom_path)) with open(bom_path, 'r') as stream: bom = yaml.safe_load(stream) alias_path = os.path.join(os.path.dirname(bom_path), alias + '.yml') with open(alias_path, 'w') as stream: bom['version'] = options.bom_alias yaml.safe_dump(bom, stream, default_flow_style=False) self.__hal_runner.publish_bom_path(alias_path) def __publish_configs(self, bom_path): """Publish each of the halconfigs for the bom at the given path.""" def publish_repo_config(repository): """Helper function to publish individual repository.""" name = self.scm.repository_name_to_service_name(repository.name) config_dir = os.path.join(self.get_output_dir(), 'halconfig', name) if not os.path.exists(config_dir): logging.warning('No profiles for %s', name) return logging.debug('Publishing profiles for %s', name) for profile in os.listdir(config_dir): profile_path = os.path.join(config_dir, profile) self.__hal_runner.publish_profile(name, profile_path, bom_path) logging.info('Publishing halyard configs...') self.source_code_manager.foreach_source_repository( self.source_repositories, publish_repo_config) def __collect_halconfig_files(self, repository): """Gets the component config files and writes them into the output_dir.""" name = repository.name if (name not in SPINNAKER_BOM_REPOSITORY_NAMES or name in ['spin']): logging.debug('%s does not use config files -- skipping', name) return if name == 'spinnaker-monitoring': config_root = os.path.join( repository.git_dir, 'spinnaker-monitoring-daemon') else: config_root = repository.git_dir service_name = self.scm.repository_name_to_service_name(repository.name) target_dir = os.path.join(self.get_output_dir(), 'halconfig', service_name) ensure_dir_exists(target_dir) config_path = os.path.join(config_root, 'halconfig') logging.info('Copying configs from %s...', config_path) for profile in os.listdir(config_path): profile_path = os.path.join(config_path, profile) if os.path.isfile(profile_path): shutil.copyfile(profile_path, os.path.join(target_dir, profile)) logging.debug('Copied profile to %s', profile_path) elif not os.path.isdir(profile_path): logging.warning('%s is neither file nor directory -- ignoring', profile_path) continue else: tar_path = os.path.join( target_dir, '{profile}.tar.gz'.format(profile=profile)) file_list = ' '.join(os.listdir(profile_path)) # NOTE: For historic reasons this is not actually compressed # even though the tar_path says ".tar.gz" check_subprocess( 'tar cf {path} -C {profile} {file_list}'.format( path=tar_path, profile=profile_path, file_list=file_list)) logging.debug('Copied profile to %s', tar_path)
def init_argparser(self, parser, defaults): super(PublishHalyardCommandFactory, self).init_argparser(parser, defaults) GradleCommandFactory.add_bom_parser_args(parser, defaults) SpinnakerSourceCodeManager.add_parser_args(parser, defaults) GradleRunner.add_parser_args(parser, defaults) GitRunner.add_publishing_parser_args(parser, defaults) HalRunner.add_parser_args(parser, defaults) self.add_argument( parser, "build_number", defaults, DEFAULT_BUILD_NUMBER, help= "Publishing halyard requires a rebuild. This is the build number" " to use when rebuilding halyard.", ) self.add_argument( parser, "halyard_version", defaults, None, help="The semantic version of the release to publish.", ) self.add_argument( parser, "halyard_version_commits_url", defaults, None, help="URL to file containing version and git commit for successful" " nightly builds. By default this will be" ' "{filename}" in the' " --halyard_bucket_base_url.".format( filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME), ) self.add_argument( parser, "halyard_docker_image_base", defaults, None, help="Base Docker image name for writing halyard builds.", ) self.add_argument( parser, "halyard_artifact_registry_image_base", defaults, None, help= "Base Artifact Registry image name for writing halyard builds.", ) self.add_argument( parser, "halyard_bucket_base_url", defaults, None, help="Base Google Cloud Storage URL for writing halyard builds.", ) self.add_argument( parser, "docs_repo_owner", defaults, None, help="Owner of the docs repo if one was" " specified. The default is --github_owner.", ) self.add_argument( parser, "skip_existing", defaults, False, type=bool, help= "Skip builds if the desired version already exists on bintray.", ) self.add_argument( parser, "delete_existing", defaults, None, type=bool, help="Delete pre-existing desired versions if from bintray.", ) self.add_argument( parser, "gcb_project", defaults, None, help="The GCP project ID when using the GCP Container Builder.", ) self.add_argument( parser, "gcb_service_account", defaults, None, help="Google Service Account when using the GCP Container Builder.", ) self.add_argument( parser, "artifact_registry", defaults, None, help="Artifact Registry to push the container images to.", )