def render_pipelines( cfg_name: str, out_dir: str, template_path: str = _template_path(), org: str = None, # if set, filter for org repo: str = None, # if set, filter for repo ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=cfg_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping( concourse_cfg.job_mapping_cfg_name()) template_include_dir = template_path if repo: repository_filter = lambda repo_name: repo_name == repo else: repository_filter = None def_enumerators = [] for job_mapping in job_mapping_set.job_mappings().values(): job_mapping: ccm.JobMapping if org and not org in { oc.name() for oc in job_mapping.github_organisations() }: continue def_enumerators.append( GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set, repository_filter=repository_filter, )) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()
def render_pipeline( definition_file: CliHints.existing_file(), cfg_name: str, out_dir: CliHints.existing_dir(), repo_path: str = 'example/example', repo_branch: str = 'master', repo_host: str = 'github.com', template_path: str=_template_path(), template_include_dir: str=None, ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name=cfg_name) logger.info(f'Template path: {template_path}') repo_url = urllib.parse.urlunparse(('https', repo_host, repo_path, '', '', '')) try: job_mapping = cfg_set.job_mapping().job_mapping_for_repo_url(repo_url, cfg_set) secret_cfg = cfg_factory.secret(job_mapping.secret_cfg()) except ValueError as e: logger.warning(f'An error occurred: {e}. Will use dummy values to render pipeline.') job_mapping = None secret_cfg = None def_enumerators = [ SimpleFileDefinitionEnumerator( definition_file=definition_file, cfg_set=cfg_set, repo_path=repo_path, repo_branch=repo_branch, repo_host=repo_host, job_mapping=job_mapping, secret_cfg=secret_cfg, ) ] preprocessor = DefinitionDescriptorPreprocessor() if not template_include_dir: template_include_dir = template_path template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, render_origin=RenderOrigin.LOCAL, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer ) replicator.replicate()
def replicate_pipelines( cfg_set, job_mapping, template_path=concourse.paths.template_dir, template_include_dir=concourse.paths.template_include_dir, unpause_pipelines: bool = True, expose_pipelines: bool = True, unpause_new_pipelines: bool = True, remove_pipelines_filter: typing.Callable[[str], bool] = None, ): ''' @param remove_pipelines_filter: pipeline-names the filter does not match are never removed ''' definition_enumerators = [ GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=cfg_set, repository_filter=lambda repo: not repo. archived, # exclude archived repositories ), ] preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, render_origin=RenderOrigin.PIPELINE_REPLICATION, ) deployer = ConcourseDeployer( cfg_set=cfg_set, unpause_pipelines=unpause_pipelines, unpause_new_pipelines=unpause_new_pipelines, expose_pipelines=expose_pipelines, ) result_processor = ReplicationResultProcessor( cfg_set=cfg_set, unpause_new_pipelines=unpause_new_pipelines, job_mapping=job_mapping, remove_pipelines_filter=remove_pipelines_filter, ) replicator = PipelineReplicator( definition_enumerators=definition_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, result_processor=result_processor, ) return replicator.replicate()
def render_pipeline( definition_file: CliHints.existing_file(), cfg_name: str, out_dir: CliHints.existing_dir(), template_path: str=_template_path(), template_include_dir: str=None, secret_cfg_name: str = None, ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name=cfg_name) print(template_path) if secret_cfg_name: secret_cfg = cfg_factory.secret(secret_cfg_name) else: secret_cfg = None def_enumerators = [ SimpleFileDefinitionEnumerator( definition_file=definition_file, cfg_set=cfg_set, repo_path='example/example', repo_branch='master', repo_host='github.com', ) ] preprocessor = DefinitionDescriptorPreprocessor() if not template_include_dir: template_include_dir = template_path template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, secret_cfg=secret_cfg, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer ) replicator.replicate()
def __init__( self, cfg_set, template_retriever: TemplateRetriever = TemplateRetriever(), template_include_dir=None, render_origin: RenderOrigin = RenderOrigin.UNKNOWN, ): self.template_retriever = template_retriever self.render_origin = render_origin if template_include_dir: template_include_dir = os.path.abspath(template_include_dir) self.template_include_dir = os.path.abspath(template_include_dir) from mako.lookup import TemplateLookup self.lookup = TemplateLookup([template_include_dir]) self.cfg_set = cfg_set
def render_pipelines( template_path: str, config_name: str, out_dir: str, template_include_dir: str = None, ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping(concourse_cfg.job_mapping_cfg_name()) if not template_include_dir: template_include_dir = template_path def_enumerators = [] for job_mapping in job_mapping_set.job_mappings().values(): def_enumerators.append( GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set ) ) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()
def replicate_pipelines( cfg_set, concourse_cfg, job_mapping, template_path, template_include_dir, unpause_pipelines: bool = True, expose_pipelines: bool = True, unpause_new_pipelines: bool = True, ): definition_enumerators = [ GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=cfg_set, ), ] preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, ) deployer = ConcourseDeployer( unpause_pipelines=unpause_pipelines, expose_pipelines=expose_pipelines, ) result_processor = ReplicationResultProcessor( cfg_set=cfg_set, unpause_new_pipelines=unpause_new_pipelines, job_mapping=job_mapping, ) replicator = PipelineReplicator( definition_enumerators=definition_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, result_processor=result_processor, ) return replicator.replicate()
def deploy_and_run_smoketest_pipeline( config_dir: str, config_name: str, concourse_team_name: str, cc_pipelines_repo_dir: str, cc_utils_repo_dir: str, wait_for_job_execution: bool=False, ): config_factory = ConfigFactory.from_cfg_dir(cfg_dir=config_dir) config_set = config_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() # as this is an integration test, hard-code assumptions about the layout of # our pipelines repository template_path = os.path.join(cc_utils_repo_dir, 'concourse', 'templates') template_include_dir = os.path.join(cc_utils_repo_dir, 'concourse') pipeline_name = 'cc-smoketest' # retrieve pipeline-definition from github at hardcoded location github_cfg = config_set.github() githubrepobranch = GitHubRepoBranch( github_config=github_cfg, repo_owner='kubernetes', repo_name='cc-smoketest', branch='master', ) helper = GitHubRepositoryHelper.from_githubrepobranch( githubrepobranch=githubrepobranch, ) pipeline_definition = yaml.load( helper.retrieve_text_file_contents( file_path='.ci/smoketest-pipeline.yaml', ), Loader=yaml.SafeLoader, ) definition_descriptor = DefinitionDescriptor( pipeline_name=pipeline_name, pipeline_definition=pipeline_definition[pipeline_name], main_repo={'path': 'kubernetes/cc-smoketest', 'branch': 'master'}, concourse_target_cfg=concourse_cfg, concourse_target_team=concourse_team_name, ) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = ConcourseDeployer( unpause_pipelines=True, expose_pipelines=True ) definition_descriptor = preprocessor.process_definition_descriptor(definition_descriptor) rendering_result = renderer.render(definition_descriptor) info('deploying pipeline') deployment_result = deployer.deploy(rendering_result.definition_descriptor) if not deployment_result.deploy_status & DeployStatus.SUCCEEDED: fail('deployment failed')
def render_pipelines( cfg_name: str, out_dir: str, template_path: str=_template_path(), org: str=None, # if set, filter for org repo: str=None, # if set, filter for repo host: str=None, # if set, filter for gh-host ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=cfg_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping(concourse_cfg.job_mapping_cfg_name()) template_include_dir = template_path if repo: repository_filter = lambda repo_obj: repo_obj.name == repo else: repository_filter = None def org_names(job_mapping): for org in job_mapping.github_organisations(): yield org.org_name() def remove_github_org_configs(job_mapping, org: str, host: str): def want_gh_org(org_cfg: model.concourse.GithubOrganisationConfig): if org and org_cfg.org_name() != org: return False gh_cfg: model.github.GithubConfig = cfg_factory.github(org_cfg.github_cfg_name()) if host and gh_cfg.hostname() != host: return False return True gh_orgs = { ghorg.name(): ghorg.raw for ghorg in job_mapping.github_organisations() if want_gh_org(ghorg) } job_mapping.raw['github_orgs'] = gh_orgs job_mappings = [] for job_mapping in job_mapping_set.job_mappings().values(): job_mapping: ccm.JobMapping if org and not org in org_names(job_mapping): continue if org or host: remove_github_org_configs(job_mapping, org, host) job_mappings.append(job_mapping) def_enumerators = [ GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set, repository_filter=repository_filter, ) for job_mapping in job_mappings ] preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, render_origin=RenderOrigin.LOCAL, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()