def render_pipelines( cfg_name: str, out_dir: str, template_path: str = _template_path(), org: str = None, # if set, filter for org repo: str = None, # if set, filter for repo ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=cfg_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping( concourse_cfg.job_mapping_cfg_name()) template_include_dir = template_path if repo: repository_filter = lambda repo_name: repo_name == repo else: repository_filter = None def_enumerators = [] for job_mapping in job_mapping_set.job_mappings().values(): job_mapping: ccm.JobMapping if org and not org in { oc.name() for oc in job_mapping.github_organisations() }: continue def_enumerators.append( GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set, repository_filter=repository_filter, )) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()
def set_teams(config_name: CliHint(typehint=str, help='the cfg_set name to use'), ): config_factory = ctx().cfg_factory() config_set = config_factory.cfg_set(cfg_name=config_name) config = config_set.concourse() setup_concourse.set_teams(config=config)
def sync_org_webhooks_from_cfg(whd_deployment_config_name: str, ): ''' Set or update all org-webhooks for the given configs. ''' cfg_factory = ctx().cfg_factory() whd_deployment_cfg = cfg_factory.webhook_dispatcher_deployment( whd_deployment_config_name) sync_org_webhooks(whd_deployment_cfg)
def start_worker_resurrector( config_name: CliHint(typehint=str, help='the config set name to use'), concourse_namespace='concourse', ): config_factory = ctx().cfg_factory() config_set = config_factory.cfg_set(cfg_name=config_name) kubernetes_cfg = config_set.kubernetes() kube_client = kube.ctx.Ctx() kube_client.set_kubecfg(kubernetes_cfg.kubeconfig()) concourse_cfg = config_set.concourse() concourse_client = client.from_cfg(concourse_cfg=concourse_cfg, team_name='main') resurrect_pods(namespace=concourse_namespace, concourse_client=concourse_client, kubernetes_client=kube_client)
def render_pipelines( template_path: str, config_name: str, out_dir: str, template_include_dir: str = None, ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping(concourse_cfg.job_mapping_cfg_name()) if not template_include_dir: template_include_dir = template_path def_enumerators = [] for job_mapping in job_mapping_set.job_mappings().values(): def_enumerators.append( GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set ) ) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()
def trigger_resource_check( cfg_name: CliHints.non_empty_string(help="cfg_set to use"), team_name: CliHints.non_empty_string(help="pipeline's team name"), pipeline_name: CliHints.non_empty_string(help="pipeline name"), resource_name: CliHints.non_empty_string(help="resource to check"), ): '''Triggers a check of the specified Concourse resource ''' cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name) concourse_cfg = cfg_set.concourse() api = client.from_cfg( concourse_cfg=concourse_cfg, team_name=team_name, ) api.trigger_resource_check( pipeline_name=pipeline_name, resource_name=resource_name, )
def render_pipeline( definition_file: CliHints.existing_file(), cfg_name: str, out_dir: CliHints.existing_dir(), template_path: str=_template_path(), template_include_dir: str=None, ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name=cfg_name) print(template_path) def_enumerators = [ SimpleFileDefinitionEnumerator( definition_file=definition_file, cfg_set=cfg_set, repo_path='example/example', repo_branch='master', repo_host='github.com', ) ] preprocessor = DefinitionDescriptorPreprocessor() if not template_include_dir: template_include_dir = template_path template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer ) replicator.replicate()
def update_certificate( tls_config_name: CliHint(typehint=str, help="TLS config element name to update"), certificate_file: CliHints.existing_file(help="certificate file path"), key_file: CliHints.existing_file(help="private key file path"), output_path: CliHints.existing_dir( help="TLS config file output path")): # Stuff used for yaml formatting, when dumping a dictionary class LiteralStr(str): """Used to create yaml block style indicator | """ def literal_str_representer(dumper, data): """Used to create yaml block style indicator""" return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|') # read new certificate data certificate_file = os.path.abspath(certificate_file) private_key_file = os.path.abspath(key_file) with open(certificate_file) as f: certificate = f.read() with open(private_key_file) as f: private_key = f.read() # set new certificate data to specified argument 'tls_config_name' cfg_factory = ctx().cfg_factory() tls_config_element = cfg_factory.tls_config(tls_config_name) tls_config_element.set_private_key(private_key) tls_config_element.set_certificate(certificate) # patch tls config dict so that yaml.dump outputs literal strings using '|' yaml.add_representer(LiteralStr, literal_str_representer) configs = cfg_factory._configs('tls_config') for k1, v1 in configs.items(): for k2, _ in v1.items(): configs[k1][k2] = LiteralStr(configs[k1][k2]) # dump updated tls config to given output path tls_config_type = cfg_factory._cfg_types()['tls_config'] tls_config_file = list(tls_config_type.sources())[0].file() with open(os.path.join(output_path, tls_config_file), 'w') as f: yaml.dump(configs, f, indent=2, default_flow_style=False)