Esempio n. 1
0
    def _build_release(self, repository):
        """Rebuild the actual release.

    We dont necessarily need to rebuild here. We just need to push as
    debian to the "-stable".
    """
        # Ideally we would just modify the existing bintray version to add
        # trusty-stable to the distributions, however it does not appear possible
        # to patch the debian attributes of a bintray version, only the
        # version metadata. Therefore, we'll rebuild it.
        # Alternatively we could download the existing and push a new one,
        # however I dont see how to get at the existing debian metadata and
        # dont want to ommit something

        git_dir = repository.git_dir
        summary = self.__scm.git.collect_repository_summary(git_dir)

        args = self.__gradle.get_common_args()
        args.extend(
            self.__gradle.get_debian_args('trusty-stable,xenial-stable'))
        build_number = self.options.build_number
        if not self.__gradle.consider_debian_on_bintray(
                repository, build_number=build_number):
            self.__gradle.check_run(args,
                                    self,
                                    repository,
                                    'candidate',
                                    'build-release',
                                    version=self.__release_version,
                                    build_number=build_number,
                                    gradle_dir=git_dir)

        info_path = os.path.join(self.get_output_dir(), 'halyard_info.yml')
        logging.debug('Writing build information to %s', info_path)
        write_to_path(summary.to_yaml(), info_path)
Esempio n. 2
0
  def _build_release(self, repository):
    """Rebuild the actual release debian package.

    We dont necessarily need to rebuild here. We just need to push as
    debian to the "-stable". However there isnt an easy way to do this.

    Note that this is not the promoted version. For safety[*] and simplicity
    we'll promote the candidate whose version was used to build this.
    Ideally this function can go away.

    [*] Safety because the candidate was tested whereas this build was not.
    """
    # Ideally we would just modify the existing bintray version to add
    # *-stable to the distributions, however it does not appear possible
    # to patch the debian attributes of a bintray version, only the
    # version metadata. Therefore, we'll rebuild it.
    # Alternatively we could download the existing and push a new one,
    # however I dont see how to get at the existing debian metadata and
    # dont want to ommit something

    git_dir = repository.git_dir
    summary = self.__scm.git.collect_repository_summary(git_dir)
    args = self.__gradle.get_common_args()

    args.extend(self.__gradle.get_debian_args(
        'trusty-stable,xenial-stable,bionic-stable'))
    build_number = self.options.build_number
    self.__gradle.check_run(
        args, self, repository, 'candidate', 'build-release',
        version=self.__release_version, build_number=build_number,
        gradle_dir=git_dir)

    info_path = os.path.join(self.get_output_dir(), 'halyard_info.yml')
    logging.debug('Writing build information to %s', info_path)
    write_to_path(summary.to_yaml(), info_path)
Esempio n. 3
0
  def collect_gcb_versions(self, pool):
    options = self.options
    logging.debug('Collecting GCB versions from %s', options.docker_registry)
    command_parts = ['gcloud',
                     '--format=json',
                     'container images list',
                     '--repository', options.docker_registry]
    if options.gcb_service_account:
      logging.debug('Using account %s', options.gcb_service_account)
      command_parts.extend(['--account', options.gcb_service_account])

    response = check_subprocess(' '.join(command_parts))
    images = [entry['name']
              for entry in json.JSONDecoder(encoding='utf-8').decode(response)]
    image_versions = pool.map(self.query_gcr_image_versions, images)

    image_map = {}
    for name, versions in image_versions:
      image_map[name] = versions

    path = os.path.join(
        self.get_output_dir(),
        options.docker_registry.replace('/', '__') + '__gcb_versions.yml')
    logging.info('Writing %s versions to %s', options.docker_registry, path)
    write_to_path(yaml.dump(image_map,
                            allow_unicode=True,
                            default_flow_style=False), path)
    return image_map
Esempio n. 4
0
  def _do_command(self):
    pool = ThreadPool(16)
    bintray_jars, bintray_debians = self.collect_bintray_versions(pool)
    self.collect_gcb_versions(pool)
    self.collect_gce_image_versions()
    pool.close()
    pool.join()

    missing_jars = self.find_missing_jar_versions(
        bintray_jars, bintray_debians)
    missing_debians = self.find_missing_debian_versions(
        bintray_jars, bintray_debians)

    options = self.options
    for which in [(options.bintray_jar_repository, missing_jars),
                  (options.bintray_debian_repository, missing_debians)]:
      if not which[1]:
        logging.info('%s is all accounted for.', which[0])
        continue
      path = os.path.join(self.get_output_dir(), 'missing_%s.yml' % which[0])
      logging.info('Writing to %s', path)
      write_to_path(
          yaml.dump(which[1], allow_unicode=True, default_flow_style=False),
          path)

    config = {
        'bintray_org': options.bintray_org,
        'bintray_jar_repository': options.bintray_jar_repository,
        'bintray_debian_repository': options.bintray_debian_repository,
        'docker_registry': options.docker_registry,
        'googleImageProject': options.publish_gce_image_project
    }
    path = os.path.join(self.get_output_dir(), 'config.yml')
    logging.info('Writing to %s', path)
    write_to_path(yaml.dump(config, default_flow_style=False), path)
Esempio n. 5
0
  def refresh_source_info(self, repository, build_number):
    """Extract the source info from repository and cache with build number.

    We associate the build number because the different builds
    (debian, container, etc) need to have the same builder number so that the
    eventual BOM is consitent. Since we dont build everything at once, we'll
    need to remember it.

    We extract out the repository summary info, particularly the commit it is
    at, to ensure that future operations are consistent and operating on the
    same commit.
    """
    summary = self.__git.collect_repository_summary(repository.git_dir)
    expect_build_number = (self.__options.build_number
                           if hasattr(self.__options, 'build_number')
                           else build_number)
    info = SourceInfo(expect_build_number, summary)

    filename = repository.name + '-meta.yml'
    dir_path = os.path.join(self.__options.output_dir, 'source_info')
    cache_path = os.path.join(dir_path, filename)
    logging.debug(
        'Refreshing source info for %s and caching to %s for buildnum=%s',
        repository.name, cache_path, build_number)
    write_to_path(info.summary.to_yaml(), cache_path)
    return info
Esempio n. 6
0
  def _build_release(self, repository):
    """Rebuild the actual release debian package.

    We dont necessarily need to rebuild here. We just need to push as
    debian to the "-stable". However there isnt an easy way to do this.

    Note that this is not the promoted version. For safety[*] and simplicity
    we'll promote the candidate whose version was used to build this.
    Ideally this function can go away.

    [*] Safety because the candidate was tested whereas this build was not.
    """
    # Ideally we would just modify the existing bintray version to add
    # trusty-stable to the distributions, however it does not appear possible
    # to patch the debian attributes of a bintray version, only the
    # version metadata. Therefore, we'll rebuild it.
    # Alternatively we could download the existing and push a new one,
    # however I dont see how to get at the existing debian metadata and
    # dont want to ommit something

    git_dir = repository.git_dir
    summary = self.__scm.git.collect_repository_summary(git_dir)

    args = self.__gradle.get_common_args()
    args.extend(self.__gradle.get_debian_args('trusty-stable,xenial-stable'))
    build_number = self.options.build_number
    self.__gradle.check_run(
        args, self, repository, 'candidate', 'build-release',
        version=self.__release_version, build_number=build_number,
        gradle_dir=git_dir)

    info_path = os.path.join(self.get_output_dir(), 'halyard_info.yml')
    logging.debug('Writing build information to %s', info_path)
    write_to_path(summary.to_yaml(), info_path)
Esempio n. 7
0
  def refresh_source_info(self, repository, build_number):
    """Extract the source info from repository and cache with build number.

    We associate the build number because the different builds
    (debian, container, etc) need to have the same builder number so that the
    eventual BOM is consitent. Since we dont build everything at once, we'll
    need to remember it.

    We extract out the repository summary info, particularly the commit it is
    at, to ensure that future operations are consistent and operating on the
    same commit.
    """
    summary = self.__git.collect_repository_summary(repository.git_dir)
    expect_build_number = (self.__options.build_number
                           if hasattr(self.__options, 'build_number')
                           else build_number)
    info = SourceInfo(expect_build_number, summary)

    filename = repository.name + '-meta.yml'
    dir_path = os.path.join(self.__options.output_dir, 'source_info')
    cache_path = os.path.join(dir_path, filename)
    logging.debug(
        'Refreshing source info for %s and caching to %s for buildnum=%s',
        repository.name, cache_path, build_number)
    write_to_path(info.summary.to_yaml(), cache_path)
    return info
Esempio n. 8
0
    def _do_postprocess(self, _):
        """Construct BOM and write it to the configured path."""
        bom = self.__builder.build()
        bom_text = yaml.dump(bom, default_flow_style=False)

        path = _determine_bom_path(self)
        write_to_path(bom_text, path)
        logging.info('Wrote bom to %s', path)
Esempio n. 9
0
 def maybe_write_log(what, data):
   if not data:
     return
   path = os.path.join(self.get_output_dir(), 'audit_' + what + '.yml')
   logging.info('Writing %s', path)
   write_to_path(
       yaml.dump(data, allow_unicode=True, default_flow_style=False),
       path)
Esempio n. 10
0
    def __build_with_gcb(self, repository):
        source_info = self.source_code_manager.check_source_info(repository)
        if self.__check_gcb_image(repository, source_info.to_build_version()):
            return

        name = repository.name
        gcb_config = self.__derive_gcb_config(repository, source_info)
        if gcb_config is None:
            logging.info('Skipping GCB for %s because there is config for it',
                         name)
            return

        options = self.options

        # Use an absolute path here because we're going to
        # pass this to the gcloud command, which will be running
        # in a different directory so relative paths wont hold.
        config_dir = os.path.abspath(self.get_output_dir())
        config_path = os.path.join(config_dir, name + '-gcb.yml')
        write_to_path(gcb_config, config_path)

        # Local .gradle dir stomps on GCB's .gradle directory when the gradle
        # wrapper is installed, so we need to delete the local one.
        # The .gradle dir is transient and will be recreated on the next gradle
        # build, so this is OK.
        #
        # This can still be shared among components as long as the
        # output directory remains around.
        git_dir = repository.git_dir
        if options.force_clean_gradle_cache:
            # If we're going to delete existing ones, then keep each component
            # separate so they dont stomp on one another
            gradle_cache = os.path.abspath(os.path.join(git_dir, '.gradle'))
        else:
            # Otherwise allow all the components to share a common gradle directory
            gradle_cache = os.path.abspath(
                os.path.join(options.output_dir, '.gradle'))

        if options.force_clean_gradle_cache and os.path.isdir(gradle_cache):
            shutil.rmtree(gradle_cache)

        # Note this command assumes a cwd of git_dir
        command = ('gcloud container builds submit '
                   ' --account={account} --project={project}'
                   ' --config="{config_path}" .'.format(
                       account=options.gcb_service_account,
                       project=options.gcb_project,
                       config_path=config_path))

        logfile = self.get_logfile_path(name + '-gcb-build')
        labels = {'repository': repository.name}
        self.metrics.time_call('GcrBuild',
                               labels,
                               'Attempts to build GCR container images.',
                               check_subprocesses_to_logfile,
                               name + ' container build',
                               logfile, [command],
                               cwd=git_dir)
Esempio n. 11
0
    def __flush_snapshot(self, snapshot):
        """Writes metric snapshot to file."""
        text = json.JSONEncoder(indent=2, separators=(",", ": ")).encode(snapshot)

        # Use intermediate temp file to not clobber old snapshot metrics on failure.
        metrics_path = self.__metrics_path
        tmp_path = metrics_path + ".tmp"
        write_to_path(text, tmp_path)
        os.rename(tmp_path, metrics_path)
Esempio n. 12
0
  def __flush_snapshot(self, snapshot):
    """Writes metric snapshot to file."""
    text = json.JSONEncoder(indent=2, separators=(',', ': ')).encode(snapshot)

    # Use intermediate temp file to not clobber old snapshot metrics on failure.
    metrics_path = self.__metrics_path
    tmp_path = metrics_path + '.tmp'
    write_to_path(text, tmp_path)
    os.rename(tmp_path, metrics_path)
 def make_test_options(self):
   options = super(TestBomRepositoryCommandProcessor, self).make_test_options()
   options.bom_path = os.path.join(self.test_root, 'bom.yml')
   options.one_at_a_time = False
   options.only_repositories = None
   options.exclude_repositories = None
   options.github_disable_upstream_push = True
   options.git_branch = PATCH_BRANCH
   write_to_path(yaml.safe_dump(self.golden_bom), options.bom_path)
   return options
Esempio n. 14
0
 def make_test_options(self):
     options = super(TestBomRepositoryCommandProcessor,
                     self).make_test_options()
     options.bom_path = os.path.join(self.test_root, 'bom.yml')
     options.one_at_a_time = False
     options.only_repositories = None
     options.github_disable_upstream_push = True
     options.git_branch = PATCH_BRANCH
     write_to_path(yaml.dump(self.golden_bom), options.bom_path)
     return options
Esempio n. 15
0
    def __flush_snapshot(self, snapshot):
        """Writes metric snapshot to file."""
        text = json.JSONEncoder(indent=2,
                                separators=(',', ': ')).encode(snapshot)

        # Use intermediate temp file to not clobber old snapshot metrics on failure.
        metrics_path = self.__metrics_path
        tmp_path = metrics_path + '.tmp'
        write_to_path(text, tmp_path)
        os.rename(tmp_path, metrics_path)
        logging.debug('Wrote metric snapshot to %s', metrics_path)
Esempio n. 16
0
    def _do_postprocess(self, _):
        """Construct BOM and write it to the configured path."""
        bom = self.__builder.build()
        if bom == self.__builder.base_bom:
            logging.info('Bom has not changed from version %s @ %s',
                         bom['version'], bom['timestamp'])

        bom_text = yaml.dump(bom, default_flow_style=False)

        path = _determine_bom_path(self)
        write_to_path(bom_text, path)
        logging.info('Wrote bom to %s', path)
Esempio n. 17
0
  def _do_postprocess(self, _):
    """Construct BOM and write it to the configured path."""
    bom = self.__builder.build()
    if bom == self.__builder.base_bom:
      logging.info('Bom has not changed from version %s @ %s',
                   bom['version'], bom['timestamp'])

    bom_text = yaml.safe_dump(bom, default_flow_style=False)

    path = _determine_bom_path(self)
    write_to_path(bom_text, path)
    logging.info('Wrote bom to %s', path)
Esempio n. 18
0
  def _do_postprocess(self, result_dict):
    """Construct changelog from the collected summary, then write it out."""
    options = self.options
    path = os.path.join(self.get_output_dir(), 'changelog.md')

    builder = ChangelogBuilder(with_detail=options.include_changelog_details)
    repository_map = {repository.name: repository
                      for repository in self.source_repositories}
    for name, summary in result_dict.items():
      builder.add_repository(repository_map[name], summary)
    changelog_text = builder.build()
    write_to_path(changelog_text, path)
    logging.info('Wrote changelog to %s', path)
Esempio n. 19
0
  def _do_postprocess(self, result_dict):
    """Construct changelog from the collected summary, then write it out."""
    options = self.options
    path = os.path.join(self.get_output_dir(), 'changelog.md')

    builder = ChangelogBuilder(with_detail=options.include_changelog_details)
    repository_map = {repository.name: repository
                      for repository in self.source_repositories}
    for name, summary in result_dict.items():
      builder.add_repository(repository_map[name], summary)
    changelog_text = builder.build()
    write_to_path(changelog_text, path)
    logging.info('Wrote changelog to %s', path)
Esempio n. 20
0
  def __build_with_gcb(self, repository, build_version):
    name = repository.name
    gcb_config = self.__derive_gcb_config(repository, build_version)
    if gcb_config is None:
      logging.info('Skipping GCB for %s because there is config for it',
                   name)
      return

    options = self.options

    # Use an absolute path here because we're going to
    # pass this to the gcloud command, which will be running
    # in a different directory so relative paths wont hold.
    config_dir = os.path.abspath(self.get_output_dir())
    config_path = os.path.join(config_dir, name + '-gcb.yml')
    write_to_path(gcb_config, config_path)

    # Local .gradle dir stomps on GCB's .gradle directory when the gradle
    # wrapper is installed, so we need to delete the local one.
    # The .gradle dir is transient and will be recreated on the next gradle
    # build, so this is OK.
    #
    # This can still be shared among components as long as the
    # output directory remains around.
    git_dir = repository.git_dir
    if options.force_clean_gradle_cache:
      # If we're going to delete existing ones, then keep each component
      # separate so they dont stomp on one another
      gradle_cache = os.path.abspath(os.path.join(git_dir, '.gradle'))
    else:
      # Otherwise allow all the components to share a common gradle directory
      gradle_cache = os.path.abspath(
          os.path.join(options.output_dir, '.gradle'))

    if options.force_clean_gradle_cache and os.path.isdir(gradle_cache):
      shutil.rmtree(gradle_cache)

    # Note this command assumes a cwd of git_dir
    command = ('gcloud builds submit '
               ' --account={account} --project={project}'
               ' --config="{config_path}" .'
               .format(account=options.gcb_service_account,
                       project=options.gcb_project,
                       config_path=config_path))

    logfile = self.get_logfile_path(name + '-gcb-build')
    labels = {'repository': repository.name}
    self.metrics.time_call(
        'GcrBuild', labels, self.metrics.default_determine_outcome_labels,
        check_subprocesses_to_logfile,
        name + ' container build', logfile, [command], cwd=git_dir)
Esempio n. 21
0
  def suggest_prunings(self):
    path = os.path.join(os.path.dirname(self.get_output_dir()),
                        'collect_bom_versions', 'config.yml')
    with open(path, 'r') as stream:
      bom_config = yaml.load(stream.read())
    path = os.path.join(os.path.dirname(self.get_output_dir()),
                        'collect_artifact_versions', 'config.yml')
    with open(path, 'r') as stream:
      art_config = yaml.load(stream.read())

    urls = []
    if self.__prune_boms:
      path = os.path.join(self.get_output_dir(), 'prune_boms.txt')
      logging.info('Writing to %s', path)
      write_to_path('\n'.join(sorted(self.__prune_boms)), path)

    jar_repo_path = 'packages/%s/%s' % (
        art_config['bintray_org'], art_config['bintray_jar_repository'])
    debian_repo_path = 'packages/%s/%s' % (
        art_config['bintray_org'], art_config['bintray_debian_repository'])
    artifact_prefix_func = {
        'jar': lambda name: 'https://api.bintray.com/%s/%s/versions/' % (
            jar_repo_path, name),
        'debian': lambda name: 'https://api.bintray.com/%s/%s/versions/' % (
            debian_repo_path,
            name if name == 'spinnaker' else 'spinnaker-' + name),
        'container': lambda name: '%s/%s:' % (
            art_config['docker_registry'], name),
        'image': lambda name: 'spinnaker-%s-' % name
    }
    artifact_version_func = {
        'jar': lambda version: version,
        'debian': lambda version: version,
        'container': lambda version: version,
        'image': lambda version: version.replace('.', '-')
    }
    for art_type, art_map in [('jar', self.__prune_jars),
                              ('debian', self.__prune_debians),
                              ('container', self.__prune_containers),
                              ('image', self.__prune_gce_images)]:
      urls = []
      for service, art_list in art_map.items():
        prefix = artifact_prefix_func[art_type](service)
        version_func = artifact_version_func[art_type]
        urls.extend([prefix + version_func(version) for version in art_list])
      if urls:
        path = os.path.join(self.get_output_dir(), 'prune_%ss.txt' % art_type)
        logging.info('Writing to %s', path)
        write_to_path('\n'.join(sorted(urls)), path)
Esempio n. 22
0
    def _do_command(self):
        """Implements CommandProcessor interface."""
        options = self.options
        spinnaker_version = options.spinnaker_version
        options_copy = copy.copy(options)
        options_copy.git_branch = "master"  # push to master in spinnaker.io
        publish_changelog_command = PublishChangelogFactory().make_command(
            options_copy)
        changelog_gist_url = options.changelog_gist_url

        # Make sure changelog exists already.
        # If it does not then fail.
        try:
            logging.debug("Verifying changelog ready at %s",
                          changelog_gist_url)
            urlopen(changelog_gist_url)
        except HTTPError:
            logging.error(exception_to_message)
            raise_and_log_error(
                ConfigError(
                    'Changelog gist "{url}" must exist before publising a release.'
                    .format(url=changelog_gist_url),
                    cause="ChangelogMissing",
                ))

        bom = self.__hal.retrieve_bom_version(self.options.bom_version)
        bom["version"] = spinnaker_version
        bom_path = os.path.join(self.get_output_dir(),
                                spinnaker_version + ".yml")
        write_to_path(yaml.safe_dump(bom, default_flow_style=False), bom_path)
        self.__hal.publish_bom_path(bom_path)
        self.push_branches_and_tags(bom)

        self.__hal.publish_spinnaker_release(
            spinnaker_version,
            options.spinnaker_release_alias,
            changelog_gist_url,
            options.min_halyard_version,
        )

        prior_version = get_prior_version(spinnaker_version)
        if prior_version is not None:
            self.__hal.deprecate_spinnaker_release(prior_version)

        logging.info("Publishing changelog")
        publish_changelog_command()
Esempio n. 23
0
    def _do_command(self):
        """Implements CommandProcessor interface."""
        options = self.options
        spinnaker_version = options.spinnaker_version
        bom = self.__hal.retrieve_bom_version(self.options.bom_version)
        bom['version'] = spinnaker_version

        self.push_branches_and_tags(bom)
        bom_path = os.path.join(self.get_output_dir(),
                                spinnaker_version + '.yml')
        changelog_base_url = 'https://www.spinnaker.io/%s' % options.github_owner
        changelog_filename = '%s-changelog' % spinnaker_version.replace(
            '.', '-')
        changelog_uri = '%s/community/releases/versions/%s' % (
            changelog_base_url, changelog_filename)

        write_to_path(yaml.dump(bom, default_flow_style=False), bom_path)
        self.__hal.publish_spinnaker_release(spinnaker_version,
                                             options.spinnaker_release_alias,
                                             changelog_uri,
                                             options.min_halyard_version)
Esempio n. 24
0
    def _do_command(self):
        """Implements CommandProcessor interface."""
        options = self.options
        spinnaker_version = options.spinnaker_version
        options_copy = copy.copy(options)
        options_copy.git_branch = 'master'  # push to master in spinnaker.github.io
        publish_changelog_command = PublishChangelogFactory().make_command(
            options_copy)
        changelog_gist_url = options.changelog_gist_url

        # Make sure changelog exists already.
        # If it does not then fail.
        try:
            logging.debug('Verifying changelog ready at %s',
                          changelog_gist_url)
            urllib2.urlopen(changelog_gist_url)
        except urllib2.HTTPError as error:
            logging.error(error.message)
            raise_and_log_error(
                ConfigError(
                    'Changelog gist "{url}" must exist before publising a release.'
                    .format(url=changelog_gist_url),
                    cause='ChangelogMissing'))

        bom = self.__hal.retrieve_bom_version(self.options.bom_version)
        bom['version'] = spinnaker_version
        bom_path = os.path.join(self.get_output_dir(),
                                spinnaker_version + '.yml')
        write_to_path(yaml.dump(bom, default_flow_style=False), bom_path)
        self.__hal.publish_bom_path(bom_path)
        self.push_branches_and_tags(bom)

        self.__hal.publish_spinnaker_release(spinnaker_version,
                                             options.spinnaker_release_alias,
                                             changelog_gist_url,
                                             options.min_halyard_version)

        logging.info('Publishing changelog')
        publish_changelog_command()
Esempio n. 25
0
  def collect_bintray_versions(self, pool):
    options = self.options
    repos = [('jar', options.bintray_jar_repository),
             ('debian', options.bintray_debian_repository)]
    results = []
    for repo_type, bintray_repo in repos:
      subject_repo = '%s/%s' % (options.bintray_org, bintray_repo)
      packages = self.list_bintray_packages(subject_repo)
      package_versions = pool.map(self.query_bintray_package_versions, packages)

      package_map = {}
      for name, versions in package_versions:
        package_map[name] = versions
      results.append(package_map)

      path = os.path.join(
          self.get_output_dir(),
          '%s__%s_versions.yml' % (bintray_repo, repo_type))
      logging.info('Writing %s versions to %s', bintray_repo, path)
      write_to_path(yaml.dump(package_map,
                              allow_unicode=True,
                              default_flow_style=False), path)
    return results[0], results[1]
Esempio n. 26
0
  def _do_command(self):
    """Implements CommandProcessor interface."""
    options = self.options
    spinnaker_version = options.spinnaker_version
    options_copy = copy.copy(options)
    options_copy.git_branch = 'master'  # push to master in spinnaker.github.io
    publish_changelog_command = PublishChangelogFactory().make_command(
        options_copy)
    changelog_gist_url = options.changelog_gist_url

    # Make sure changelog exists already.
    # If it does not then fail.
    try:
      logging.debug('Verifying changelog ready at %s', changelog_gist_url)
      urlopen(changelog_gist_url)
    except HTTPError:
      logging.error(exception_to_message)
      raise_and_log_error(
          ConfigError(
              'Changelog gist "{url}" must exist before publising a release.'
              .format(url=changelog_gist_url),
              cause='ChangelogMissing'))

    bom = self.__hal.retrieve_bom_version(self.options.bom_version)
    bom['version'] = spinnaker_version
    bom_path = os.path.join(self.get_output_dir(), spinnaker_version + '.yml')
    write_to_path(yaml.safe_dump(bom, default_flow_style=False), bom_path)
    self.__hal.publish_bom_path(bom_path)
    self.push_branches_and_tags(bom)

    self.__hal.publish_spinnaker_release(
        spinnaker_version, options.spinnaker_release_alias, changelog_gist_url,
        options.min_halyard_version)

    logging.info('Publishing changelog')
    publish_changelog_command()
Esempio n. 27
0
  def collect_gce_image_versions(self):
    options = self.options
    project = options.publish_gce_image_project
    logging.debug('Collecting GCE image versions from %s', project)
    command_parts = ['gcloud', '--format=json',
                     'compute images list', '--project', project,
                     '--filter spinnaker-']
    if options.build_gce_service_account:
      logging.debug('Using account %s', options.build_gce_service_account)
      command_parts.extend(['--account', options.build_gce_service_account])

    response = check_subprocess(' '.join(command_parts))
    images = [entry['name']
              for entry in json.JSONDecoder(encoding='utf-8').decode(response)]
    image_map = {}
    for name in images:
      parts = name.split('-', 2)
      if len(parts) != 3:
        logging.warning('Skipping malformed %s', name)
        continue
      _, module, build_version = parts
      parts = build_version.split('-')
      if len(parts) != 4:
        logging.warning('Skipping malformed %s', name)
        continue
      version_list = image_map.get(module, [])
      version_list.append('{}.{}.{}-{}'.format(*parts))
      image_map[module] = version_list

    path = os.path.join(
        self.get_output_dir(), project + '__gce_image_versions.yml')
    logging.info('Writing gce image versions to %s', path)
    write_to_path(yaml.dump(image_map,
                            allow_unicode=True,
                            default_flow_style=False), path)
    return image_map
Esempio n. 28
0
 def test_write_to_path_unicode(self):
   path = os.path.join(self.base_temp_dir, 'test_write', 'file')
   content = u'First Line\nSecond Line'
   write_to_path(content, path)
   with open(path, 'r') as f:
     self.assertEqual(content, f.read())
Esempio n. 29
0
 def test_write_to_path_unicode(self):
     path = os.path.join(self.base_temp_dir, "test_write", "file")
     content = "First Line\nSecond Line"
     write_to_path(content, path)
     with open(path, "r") as f:
         self.assertEqual(content, f.read())
Esempio n. 30
0
 def __emit_last_commit_entry(self, entry):
     last_version_commit_path = os.path.join(self.get_output_dir(),
                                             'last_version_commit.yml')
     write_to_path(entry, last_version_commit_path)
Esempio n. 31
0
 def __emit_last_commit_entry(self, entry):
   last_version_commit_path = os.path.join(
       self.get_output_dir(), 'last_version_commit.yml')
   write_to_path(entry, last_version_commit_path)
Esempio n. 32
0
  def _do_command(self):
    """Reads the list of boms, then concurrently processes them.

    Ultimately it will write out the analysis into bom_service_map.yml
    """
    options = self.options
    url_prefix = 'gs://%s/bom/' % options.halyard_bom_bucket
    if options.version_name_prefix:
      url_prefix += options.version_name_prefix
    logging.debug('Listing BOM urls')
    results = self.list_bom_urls(url_prefix)
    write_to_path('\n'.join(sorted(results)),
                  os.path.join(self.get_output_dir(), 'bom_list.txt'))
    result_map = self.ingest_bom_list(results)

    path = os.path.join(self.get_output_dir(), 'all_bom_service_map.yml')
    logging.info('Writing bom analysis to %s', path)
    write_to_path(yaml.dump(result_map, default_flow_style=False), path)

    partition_names = ['released', 'unreleased']
    partitions = self.partition_service_map(result_map)
    for index, data in enumerate(partitions):
      path = os.path.join(self.get_output_dir(),
                          partition_names[index] + '_bom_service_map.yml')
      logging.info('Writing bom analysis to %s', path)
      write_to_path(yaml.dump(data, default_flow_style=False), path)

    if self.__bad_files:
      path = os.path.join(self.get_output_dir(), 'bad_boms.txt')
      logging.warning('Writing %d bad URLs to %s', len(self.__bad_files), path)
      write_to_path(yaml.dump(self.__bad_files, default_flow_style=False), path)

    if self.__non_standard_boms:
      path = os.path.join(self.get_output_dir(), 'nonstandard_boms.txt')
      logging.warning('Writing %d nonstandard boms to %s',
                      len(self.__non_standard_boms), path)
      write_to_path(
          yaml.dump(self.__non_standard_boms, default_flow_style=False), path)

    config = {
        'halyard_bom_bucket': options.halyard_bom_bucket
    }
    path = os.path.join(self.get_output_dir(), 'config.yml')
    logging.info('Writing to %s', path)
    write_to_path(yaml.dump(config, default_flow_style=False), path)