def _do_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" name = repository.name build_component_image_sh = os.path.join( os.path.dirname(__file__), '..', 'build_google_component_image.sh') options = self.options command_line = [ build_component_image_sh, '--artifact ', name, '--account', options.build_gce_service_account, '--build_project', options.build_gce_project, '--install_script', options.install_image_script, '--publish_project', options.publish_gce_image_project, '--publish_script', options.publish_gce_image_script, '--version', options.spinnaker_version, '--zone', options.build_gce_zone ] command_line.extend(self.__determine_repo_install_args()) if options.build_bintray_repository: bintray_url = 'https://dl.bintray.com/' + options.build_bintray_repository extra_install_args = [ '--halyard_repository', bintray_url, '--spinnaker_repository', bintray_url ] command_line.extend([ '--extra_install_script_args', '"{0}"'.format(' '.join(extra_install_args)) ]) command = ' '.join(command_line) logfile = determine_logfile_path(options, name, 'gce-image') what = '{name} component image'.format(name=name) check_subprocesses_to_logfile(what, logfile, [command]) return what
def build_all_halyard_deployments(self, name): """Helper function for building halyard.""" options = self.options nebula_repo_path = self._get_nebula_repository_path(name) raw_version = self._get_nebula_repository_version(name) self.__build_version = '{version}-{build}'.format( version=raw_version, build=options.build_number) cmd = './release/all.sh {version} nightly'.format( version=self.__build_version) env = dict(os.environ) logging.info( 'Preparing the environment variables for release/all.sh:\n' ' PUBLISH_HALYARD_DOCKER_IMAGE_BASE=%s\n' ' PUBLISH_HALYARD_BUCKET_BASE_URL=%s', options.halyard_docker_image_base, options.halyard_bucket_base_url) env['PUBLISH_HALYARD_DOCKER_IMAGE_BASE'] = options.halyard_docker_image_base env['PUBLISH_HALYARD_BUCKET_BASE_URL'] = options.halyard_bucket_base_url logfile = determine_logfile_path(options, name, 'jar-build') check_subprocesses_to_logfile( '{name} build'.format(name='halyard'), logfile, [cmd], cwd=nebula_repo_path, env=env)
def _do_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" name = repository.name options = self.options extra_args = self._make_gradle_args(name) # Nebula insists on publishing tag to the ORIGIN when using candidate. # we dont want to do that because it doesnt make sense to push the tag # before we validate it. The point is moot because nebula also insists on # the specific tag it pushes, which is the tag netflix uses for internal # use so is not available to us. # # Supposedly we can use 'snapshot' here instead which wont push a tag. # However snapshot brings its own set of opinions and doesnt even work # pushing to bintray for reasons I dont understand, so we'll stick with # candidate. # # The implication here is that we need to trick nebula by having our remote # ORIGIN not be the github "origin", but some bogus bitbucket so that the # tag pushes have no actual effect. target = 'candidate' cmd = './gradlew {extra} {target}'.format(extra=' '.join(extra_args), target=target) gradle_root = self.determine_gradle_root(repository) logfile = determine_logfile_path(options, name, 'debian-build') check_subprocesses_to_logfile( '{name} gradle build'.format(name=name), logfile, [cmd], cwd=gradle_root, postprocess_hook=self.make_gradle_metric_hook(repository, target)) return gradle_root
def __build_with_gcb(self, repository): name = repository.name nebula_dir = self._get_nebula_repository_path(name) version = self._get_nebula_repository_version(name) gcb_config = self.__derive_gcb_config(name, version) if gcb_config is None: logging.info('Skipping GCB for %s because there is config for it', name) return options = self.options log_flags = '--log-http' if options.log_level == 'debug' else '' name_scratch_dir = os.path.join(options.scratch_dir, name) # Use an absoluate path here because we're going to # pass this to the gcloud command, which will be running # in a different directory so relative paths wont hold. config_path = os.path.abspath(os.path.join( name_scratch_dir, '{name}-gcb.yml'.format(name=name))) write_to_path(gcb_config, config_path) # Local .gradle dir stomps on GCB's .gradle directory when the gradle # wrapper is installed, so we need to delete the local one. # The .gradle dir is transient and will be recreated on the next gradle # build, so this is OK. # # This can still be shared among components as long as the # scratch directory remains around. if options.force_clean_gradle_cache: # If we're going to delete existing ones, then keep each component # separate so they dont stomp on one another gradle_cache = os.path.abspath(os.path.join(nebula_dir, '.gradle')) else: # Otherwise allow all the components to share a common gradle directory gradle_cache = os.path.abspath( os.path.join(options.scratch_dir, '.gradle')) if options.force_clean_gradle_cache and os.path.isdir(gradle_cache): shutil.rmtree(gradle_cache) # Note this command assumes a cwd of nebula_dir cmd = ('gcloud container builds submit {log_flags}' ' --account={account} --project={project}' ' --config="{config_path}" .' .format(log_flags=log_flags, account=options.gcb_service_account, project=options.gcb_project, config_path=config_path)) logfile = determine_logfile_path(options, name, 'gcb-build') check_subprocesses_to_logfile( '{name} container build'.format(name=name), logfile, [cmd], cwd=nebula_dir)
def _do_repository(self, repository): """Implements CommandProcessor interface.""" docs_url_path = SWAGGER_URL_PATHS[repository.name] scm = self.source_code_manager git_dir = scm.get_local_repository_path(repository.name) if self.options.fetch_bom_path or self.options.fetch_bom_version: scm.pull_source_from_bom(repository.name, git_dir, self.__bom) env = dict(os.environ) port = unused_port() env['SERVER_PORT'] = str(port) base_url = 'http://localhost:{port}'.format(port=port) gate_logfile = determine_logfile_path(self.options, repository.name, 'apidocs') logging.info( 'Starting up prototype %s so we can extract docs from it.' ' We will log this instance to %s', repository.name, gate_logfile) boot_run_cmd = './gradlew bootRun' ensure_dir_exists(os.path.dirname(gate_logfile)) gate_logstream = open(gate_logfile, 'w') process = start_subprocess(boot_run_cmd, stream=gate_logstream, stdout=gate_logstream, cwd=git_dir, env=env) max_wait_secs = self.options.max_wait_secs_startup # pylint: disable=broad-except try: logging.info('Waiting up to %s secs for %s to be ready on port %d', max_wait_secs, repository.name, port) self.wait_for_url(base_url + '/health', max_wait_secs) self.generate_swagger_docs( repository, '{base}/{path}'.format(base=base_url, path=docs_url_path)) finally: try: gate_logstream.flush() gate_logstream.write( '\n{time} ***** buildtool is killing subprocess *****\n'. format(time=timestring())) logging.info( 'Killing %s subprocess %s now that we are done with it', repository.name, process.pid) process.kill() wait_subprocess(process) gate_logstream.close() except Exception as ex: maybe_log_exception( self.name, ex, 'Ignoring exception while stopping {name} subprocess {pid}.' .format(name=repository.name, pid=process.pid))
def _do_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" name = repository.name options = self.options gradle_root = self._get_nebula_repository_path(name) extra_args = self.__make_gradle_args(name) target = 'candidate' cmd = './gradlew {extra} {target}'.format( extra=' '.join(extra_args), target=target) logfile = determine_logfile_path(options, name, 'debian-build') check_subprocesses_to_logfile( '{name} gradle build'.format(name=name), logfile, [cmd], cwd=gradle_root) return gradle_root
def __build_with_docker(self, repository): logging.warning('DOCKER builds are still under development') name = repository.name version = self._get_nebula_repository_version(name) docker_tag = '{reg}/{name}:{version}'.format( reg=self.options.build_docker_registry, name=name, version=version) cmds = [ 'docker build -f Dockerfile -t {docker_tag} .'.format( docker_tag=docker_tag), 'docker push {docker_tag}'.format(docker_tag=docker_tag) ] gradle_root = self.determine_gradle_root(repository) logfile = determine_logfile_path(self.options, name, 'docker-build') check_subprocesses_to_logfile('{name} docker build'.format(name=name), logfile, cmds, cwd=gradle_root)
def _do_repository(self, repository): """Implements RepositoryCommandProcessor interface.""" name = repository.name options = self.options extra_args = self._make_gradle_args(name) target = 'buildRpm' cmd = './gradlew {extra} {target}'.format(extra=' '.join(extra_args), target=target) gradle_root = self.determine_gradle_root(repository) logfile = determine_logfile_path(options, name, 'rpm-build') check_subprocesses_to_logfile( '{name} gradle build'.format(name=name), logfile, [cmd], cwd=gradle_root, postprocess_hook=self.make_gradle_metric_hook(repository, target)) return gradle_root