def find_own_running_build(): ''' Determines the current build job running on concourse by relying on the "meta" contract ( see steps/meta), which prints a JSON document containing a UUID. By iterating through all current build jobs (considering running jobs only), and comparing the UUID read via file system and the UUID from build log output, it is possible to tell whether or not a given build job is the one from which this function was invoked. ''' if not _running_on_ci(): raise RuntimeError( 'Can only find own running build if running on CI infrastructure.') meta_dir = os.path.join(os.path.abspath(check_env('CC_ROOT_DIR')), concourse.model.traits.meta.META_INFO_DIR_NAME) meta_info_file = os.path.join( meta_dir, concourse.steps.meta.jobmetadata_filename, ) with open(meta_info_file, 'r') as f: metadata_json = json.load(f) build_job_uuid = metadata_json['uuid'] pipeline_metadata = get_pipeline_metadata() config_set = ctx().cfg_factory().cfg_set( pipeline_metadata.current_config_set_name) concourse_cfg = config_set.concourse() client = concourse.client.from_cfg(concourse_cfg, pipeline_metadata.team_name) # only consider limited amount of jobs to avoid large number of requests in case we do not # find ourself (assumption: there are only few running jobs in parallel at a given time) consider_builds = 20 builds = client.job_builds(pipeline_metadata.pipeline_name, pipeline_metadata.job_name) builds = [ build for build in builds if build.status() is concourse.client.model.BuildStatus.RUNNING ][:consider_builds] # avoid parsing too much output. usually, there will be only one line (our JSON output) # sometimes (new image version is retrieved), there will be a few lines more. for build in builds: build_events = build.events() build_plan = build.plan() meta_task_id = build_plan.task_id( concourse.model.traits.meta.META_STEP_NAME) last_line = None try: # "our" output is always the last line (ignore download logs from image retrieval) for last_line in build_events.iter_buildlog(meta_task_id): pass except StopIteration: pass uuid_json = json.loads(last_line) if uuid_json['uuid'] == build_job_uuid: return build else: raise RuntimeError('Could not determine own Concourse job.')
def _retrieve_model_element(cfg_type: str, cfg_name: str): cfg_factory = ctx().cfg_factory() return cfg_factory._cfg_element(cfg_type_name=cfg_type, cfg_name=cfg_name)
def deploy_or_upgrade_landscape( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), components: CliHint( type=LandscapeComponent, typehint=[LandscapeComponent], choices=[component for component in LandscapeComponent], help= "list of components to deploy. By default, ALL components will be deployed." ) = None, webhook_dispatcher_chart_dir: CliHint( typehint=str, help="directory of webhook dispatcher chart", ) = None, concourse_deployment_name: CliHint( typehint=str, help="namespace and deployment name for Concourse") = 'concourse', timeout_seconds: CliHint( typehint=int, help="how long to wait for concourse startup") = 180, webhook_dispatcher_deployment_name: str = 'webhook-dispatcher', dry_run: bool = True, ): '''Deploys the given components of the Concourse landscape. ''' # handle default (all known components) if not components: components = [component for component in LandscapeComponent] # Validate if LandscapeComponent.WHD in components: if not webhook_dispatcher_chart_dir: raise ValueError( f"--webhook-dispatcher-chart-dir must be given if component " f"'{LandscapeComponent.WHD.value}' is to be deployed.") else: webhook_dispatcher_chart_dir = existing_dir( webhook_dispatcher_chart_dir) _display_info( dry_run=dry_run, operation="DEPLOYED", deployment_name=concourse_deployment_name, components=components, ) if dry_run: return cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(config_set_name) concourse_cfg = config_set.concourse() # Set the global kubernetes cluster context to the cluster specified in the ConcourseConfig kubernetes_config_name = concourse_cfg.kubernetes_cluster_config() kubernetes_cfg = cfg_factory.kubernetes(kubernetes_config_name) kube_ctx.set_kubecfg(kubernetes_cfg.kubeconfig()) ensure_cluster_version(kubernetes_cfg) if LandscapeComponent.SECRETS_SERVER in components: info('Deploying Secrets Server') deploy_secrets_server(config_set_name=config_set_name, ) if LandscapeComponent.CONCOURSE in components: info('Deploying Concourse') deploy_or_upgrade_concourse( config_set_name=config_set_name, deployment_name=concourse_deployment_name, timeout_seconds=timeout_seconds, ) if LandscapeComponent.WHD in components: info('Deploying Webhook Dispatcher') deploy_or_upgrade_webhook_dispatcher( config_set_name=config_set_name, chart_dir=webhook_dispatcher_chart_dir, deployment_name=webhook_dispatcher_deployment_name, ) if LandscapeComponent.MONITORING in components: info('Deploying Monitoring stack') deploy_or_upgrade_monitoring(config_set_name=config_set_name, ) if LandscapeComponent.CLAMAV in components: info('Deploying ClamAV') deploy_or_upgrade_clamav(config_set_name=config_set_name, )
def render_pipelines( cfg_name: str, out_dir: str, template_path: str=_template_path(), org: str=None, # if set, filter for org repo: str=None, # if set, filter for repo host: str=None, # if set, filter for gh-host ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=cfg_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping(concourse_cfg.job_mapping_cfg_name()) template_include_dir = template_path if repo: repository_filter = lambda repo_obj: repo_obj.name == repo else: repository_filter = None def org_names(job_mapping): for org in job_mapping.github_organisations(): yield org.org_name() def remove_github_org_configs(job_mapping, org: str, host: str): def want_gh_org(org_cfg: model.concourse.GithubOrganisationConfig): if org and org_cfg.org_name() != org: return False gh_cfg: model.github.GithubConfig = cfg_factory.github(org_cfg.github_cfg_name()) if host and gh_cfg.hostname() != host: return False return True gh_orgs = { ghorg.name(): ghorg.raw for ghorg in job_mapping.github_organisations() if want_gh_org(ghorg) } job_mapping.raw['github_orgs'] = gh_orgs job_mappings = [] for job_mapping in job_mapping_set.job_mappings().values(): job_mapping: ccm.JobMapping if org and not org in org_names(job_mapping): continue if org or host: remove_github_org_configs(job_mapping, org, host) job_mappings.append(job_mapping) def_enumerators = [ GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set, repository_filter=repository_filter, ) for job_mapping in job_mappings ] preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, render_origin=RenderOrigin.LOCAL, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()
def _current_concourse_config(): if not _running_on_ci(): raise RuntimeError( 'Can only determine own concourse config if running on CI') return ctx().cfg_set().concourse()
def deploy_or_upgrade_landscape( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), components: CliHint( type=LandscapeComponent, typehint=[LandscapeComponent], choices=[component for component in LandscapeComponent], help="list of components to deploy. By default, ALL components will be deployed." )=None, webhook_dispatcher_chart_dir: CliHint( typehint=str, help="directory of webhook dispatcher chart", )=None, gardenlinux_cache_chart_dir: CliHint( typehint=str, help="directory of gardenlinux-cache chart", )=None, concourse_deployment_name: CliHint( typehint=str, help="namespace and deployment name for Concourse" )='concourse', whitesource_backend_chart_dir: CliHint( typehint=str, help="directory of Whitesource Backend chart", )=None, whitesource_backend_deployment_name: CliHint( typehint=str, help="namespace and deployment name for Whitesource" )='whitesource-backend', whitesource_cfg_name: CliHint( typehint=str, help='Whitesource Config', )='gardener', timeout_seconds: CliHint(typehint=int, help="how long to wait for concourse startup")=180, webhook_dispatcher_deployment_name: str='webhook-dispatcher', gardenlinux_cache_deployment_name: str='gardenlinux-cache', dry_run: bool=True, ): '''Deploys the given components of the Concourse landscape. ''' # handle default (all known components) if not components: components = [component for component in LandscapeComponent] # Validate if LandscapeComponent.WHD in components: if not webhook_dispatcher_chart_dir: raise ValueError( f"--webhook-dispatcher-chart-dir must be given if component " f"'{LandscapeComponent.WHD.value}' is to be deployed." ) else: webhook_dispatcher_chart_dir = existing_dir(webhook_dispatcher_chart_dir) if LandscapeComponent.GARDENLINUX_CACHE in components: if not gardenlinux_cache_chart_dir: raise ValueError( f"--gardenlinux-cache-chart-dir must be given if component " f"'{LandscapeComponent.GARDENLINUX_CACHE.value}' is to be deployed." ) else: gardenlinux_cache_chart_dir = existing_dir(gardenlinux_cache_chart_dir) _display_info( dry_run=dry_run, operation="DEPLOYED", deployment_name=concourse_deployment_name, components=components, ) if dry_run: return cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(config_set_name) concourse_cfg = config_set.concourse() # Set the global kubernetes cluster context to the cluster specified in the ConcourseConfig kubernetes_config_name = concourse_cfg.kubernetes_cluster_config() kubernetes_cfg = cfg_factory.kubernetes(kubernetes_config_name) kube_ctx.set_kubecfg(kubernetes_cfg.kubeconfig()) if LandscapeComponent.SECRETS_SERVER in components: info('Deploying Secrets Server') deploy_secrets_server( config_set_name=config_set_name, ) if LandscapeComponent.CONCOURSE in components: info('Deploying Concourse') deploy_or_upgrade_concourse( config_set_name=config_set_name, deployment_name=concourse_deployment_name, timeout_seconds=timeout_seconds, ) if LandscapeComponent.WHD in components: info('Deploying Webhook Dispatcher') deploy_or_upgrade_webhook_dispatcher( config_set_name=config_set_name, chart_dir=webhook_dispatcher_chart_dir, deployment_name=webhook_dispatcher_deployment_name, ) if LandscapeComponent.CLAMAV in components: info ('Deploying ClamAV') deploy_or_upgrade_clamav( config_set_name=config_set_name, ) if LandscapeComponent.GARDENLINUX_CACHE in components: info ('Deploying Gardenlinux Cache') deploy_or_upgrade_gardenlinux_cache( config_set_name=config_set_name, chart_dir=gardenlinux_cache_chart_dir, deployment_name=gardenlinux_cache_deployment_name, ) if LandscapeComponent.WHITESOURCE_BACKEND in components: info ('Deploying Whitesource Backend') extra_args = {} if whitesource_backend_deployment_name: extra_args['deployment_name'] = whitesource_backend_deployment_name if whitesource_cfg_name: extra_args['whitesource_cfg_name'] = whitesource_cfg_name if whitesource_backend_chart_dir: extra_args['chart_dir'] = whitesource_backend_chart_dir deploy_or_upgrade_whitesource_api_extension( config_set_name=config_set_name, **extra_args, )
def determine_mail_recipients( github_cfg_name, src_dirs=(), components: typing.Sequence[cm.Component] = (), component_names=(), codeowners_files=(), branch_name='master', ctx_repo_url=None, ): ''' returns a generator yielding all email addresses for the given (git) repository work tree Email addresses are looked up: - from head commit: author and committer - from *CODEOWNERS files [0] Email addresses are not de-duplicated (this should be done by consumers) [0] https://help.github.com/articles/about-codeowners/ ''' if not any((components, component_names, src_dirs, codeowners_files)): return # nothing to do if components and component_names: raise ValueError('only one of components, component_names must be set') if component_names and not ctx_repo_url: raise ValueError( 'If component_names is given, ctx_repo_url must also be given') cfg_factory = ctx().cfg_factory() github_cfg = cfg_factory.github(github_cfg_name) github_api = ccc.github.github_api(github_cfg) yield from determine_head_commit_recipients(src_dirs) yield from determine_local_repository_codeowners_recipients( github_api=github_api, src_dirs=src_dirs, ) yield from determine_codeowner_file_recipients( github_api=github_api, codeowners_files=codeowners_files, ) if component_names: entries_and_apis = [ _codeowners_parser_from_component_name( component_name=component_name, ctx_repo_url=ctx_repo_url, branch_name=branch_name) for component_name in component_names ] elif components: entries_and_apis = [ _codeowners_parser_from_component(component=component, branch_name=branch_name) for component in components ] else: raise ValueError('One of components and component_names must be given') for api, codeowner_entries in entries_and_apis: yield from github.codeowners.resolve_email_addresses( codeowners_entries=codeowner_entries, github_api=api, )