Exemplo n.º 1
0
    def _do_repository(self, repository):
        """Implements RepositoryCommandProcessor interface."""
        name = repository.name
        build_component_image_sh = os.path.join(
            os.path.dirname(__file__), '..', 'build_google_component_image.sh')

        options = self.options
        command_line = [
            build_component_image_sh, '--artifact ', name, '--account',
            options.build_gce_service_account, '--build_project',
            options.build_gce_project, '--install_script',
            options.install_image_script, '--publish_project',
            options.publish_gce_image_project, '--publish_script',
            options.publish_gce_image_script, '--version',
            options.spinnaker_version, '--zone', options.build_gce_zone
        ]
        command_line.extend(self.__determine_repo_install_args())

        if options.build_bintray_repository:
            bintray_url = 'https://dl.bintray.com/' + options.build_bintray_repository
            extra_install_args = [
                '--halyard_repository', bintray_url, '--spinnaker_repository',
                bintray_url
            ]
            command_line.extend([
                '--extra_install_script_args',
                '"{0}"'.format(' '.join(extra_install_args))
            ])

        command = ' '.join(command_line)
        logfile = determine_logfile_path(options, name, 'gce-image')

        what = '{name} component image'.format(name=name)
        check_subprocesses_to_logfile(what, logfile, [command])
        return what
Exemplo n.º 2
0
  def build_all_halyard_deployments(self, name):
    """Helper function for building halyard."""
    options = self.options

    nebula_repo_path = self._get_nebula_repository_path(name)
    raw_version = self._get_nebula_repository_version(name)
    self.__build_version = '{version}-{build}'.format(
        version=raw_version, build=options.build_number)

    cmd = './release/all.sh {version} nightly'.format(
        version=self.__build_version)
    env = dict(os.environ)
    logging.info(
        'Preparing the environment variables for release/all.sh:\n'
        '    PUBLISH_HALYARD_DOCKER_IMAGE_BASE=%s\n'
        '    PUBLISH_HALYARD_BUCKET_BASE_URL=%s',
        options.halyard_docker_image_base,
        options.halyard_bucket_base_url)
    env['PUBLISH_HALYARD_DOCKER_IMAGE_BASE'] = options.halyard_docker_image_base
    env['PUBLISH_HALYARD_BUCKET_BASE_URL'] = options.halyard_bucket_base_url

    logfile = determine_logfile_path(options, name, 'jar-build')
    check_subprocesses_to_logfile(
        '{name} build'.format(name='halyard'), logfile,
        [cmd], cwd=nebula_repo_path, env=env)
Exemplo n.º 3
0
    def _do_repository(self, repository):
        """Implements RepositoryCommandProcessor interface."""
        name = repository.name
        options = self.options

        extra_args = self._make_gradle_args(name)

        # Nebula insists on publishing tag to the ORIGIN when using candidate.
        # we dont want to do that because it doesnt make sense to push the tag
        # before we validate it. The point is moot because nebula also insists on
        # the specific tag it pushes, which is the tag netflix uses for internal
        # use so is not available to us.
        #
        # Supposedly we can use 'snapshot' here instead which wont push a tag.
        # However snapshot brings its own set of opinions and doesnt even work
        # pushing to bintray for reasons I dont understand, so we'll stick with
        # candidate.
        #
        # The implication here is that we need to trick nebula by having our remote
        # ORIGIN not be the github "origin", but some bogus bitbucket so that the
        # tag pushes have no actual effect.
        target = 'candidate'
        cmd = './gradlew {extra} {target}'.format(extra=' '.join(extra_args),
                                                  target=target)

        gradle_root = self.determine_gradle_root(repository)
        logfile = determine_logfile_path(options, name, 'debian-build')
        check_subprocesses_to_logfile(
            '{name} gradle build'.format(name=name),
            logfile, [cmd],
            cwd=gradle_root,
            postprocess_hook=self.make_gradle_metric_hook(repository, target))

        return gradle_root
Exemplo n.º 4
0
    def __build_with_gcb(self, repository):
        name = repository.name
        nebula_dir = self._get_nebula_repository_path(name)

        version = self._get_nebula_repository_version(name)
        gcb_config = self.__derive_gcb_config(name, version)
        if gcb_config is None:
            logging.info('Skipping GCB for %s because there is config for it',
                         name)
            return

        options = self.options
        log_flags = '--log-http' if options.log_level == 'debug' else ''
        name_scratch_dir = os.path.join(options.scratch_dir, name)

        # Use an absoluate path here because we're going to
        # pass this to the gcloud command, which will be running
        # in a different directory so relative paths wont hold.
        config_path = os.path.abspath(
            os.path.join(name_scratch_dir, '{name}-gcb.yml'.format(name=name)))
        write_to_path(gcb_config, config_path)

        # Local .gradle dir stomps on GCB's .gradle directory when the gradle
        # wrapper is installed, so we need to delete the local one.
        # The .gradle dir is transient and will be recreated on the next gradle
        # build, so this is OK.
        #
        # This can still be shared among components as long as the
        # scratch directory remains around.
        if options.force_clean_gradle_cache:
            # If we're going to delete existing ones, then keep each component
            # separate so they dont stomp on one another
            gradle_cache = os.path.abspath(os.path.join(nebula_dir, '.gradle'))
        else:
            # Otherwise allow all the components to share a common gradle directory
            gradle_cache = os.path.abspath(
                os.path.join(options.scratch_dir, '.gradle'))

        if options.force_clean_gradle_cache and os.path.isdir(gradle_cache):
            shutil.rmtree(gradle_cache)

        # Note this command assumes a cwd of nebula_dir
        cmd = ('gcloud container builds submit {log_flags}'
               ' --account={account} --project={project}'
               ' --config="{config_path}" .'.format(
                   log_flags=log_flags,
                   account=options.gcb_service_account,
                   project=options.gcb_project,
                   config_path=config_path))

        logfile = os.path.join(name_scratch_dir,
                               '{name}-gcb-build.log'.format(name=name))
        check_subprocesses_to_logfile(
            '{name} container build'.format(name=name),
            logfile, [cmd],
            cwd=nebula_dir)
Exemplo n.º 5
0
  def _do_repository(self, repository):
    """Implements RepositoryCommandProcessor interface."""
    name = repository.name
    options = self.options

    gradle_root = self._get_nebula_repository_path(name)
    extra_args = self.__make_gradle_args(name)

    target = 'candidate'
    cmd = './gradlew {extra} {target}'.format(
        extra=' '.join(extra_args), target=target)

    logfile = determine_logfile_path(options, name, 'debian-build')
    check_subprocesses_to_logfile(
        '{name} gradle build'.format(name=name), logfile,
        [cmd], cwd=gradle_root)

    return gradle_root
Exemplo n.º 6
0
    def __build_with_docker(self, repository):
        logging.warning('DOCKER builds are still under development')
        name = repository.name
        version = self._get_nebula_repository_version(name)
        docker_tag = '{reg}/{name}:{version}'.format(
            reg=self.options.build_docker_registry, name=name, version=version)

        cmds = [
            'docker build -f Dockerfile -t {docker_tag} .'.format(
                docker_tag=docker_tag),
            'docker push {docker_tag}'.format(docker_tag=docker_tag)
        ]

        gradle_root = self.determine_gradle_root(repository)
        logfile = determine_logfile_path(self.options, name, 'docker-build')
        check_subprocesses_to_logfile('{name} docker build'.format(name=name),
                                      logfile,
                                      cmds,
                                      cwd=gradle_root)
Exemplo n.º 7
0
    def _do_repository(self, repository):
        """Implements RepositoryCommandProcessor interface."""
        name = repository.name
        options = self.options

        extra_args = self._make_gradle_args(name)

        target = 'buildRpm'
        cmd = './gradlew {extra} {target}'.format(extra=' '.join(extra_args),
                                                  target=target)

        gradle_root = self.determine_gradle_root(repository)
        logfile = determine_logfile_path(options, name, 'rpm-build')
        check_subprocesses_to_logfile(
            '{name} gradle build'.format(name=name),
            logfile, [cmd],
            cwd=gradle_root,
            postprocess_hook=self.make_gradle_metric_hook(repository, target))

        return gradle_root