def _enumerate_required_org_webhooks( whd_deployment_cfg: WebhookDispatcherDeploymentConfig, ): '''Returns tuples of 'github orgname', 'github api object' and 'webhook url' ''' cfg_factory = ctx().cfg_factory() whd_cfg_name = whd_deployment_cfg.webhook_dispatcher_config_name() whd_cfg = cfg_factory.webhook_dispatcher(whd_cfg_name) concourse_cfg_names = whd_cfg.concourse_config_names() concourse_cfgs = map(cfg_factory.concourse, concourse_cfg_names) for concourse_cfg in concourse_cfgs: job_mapping_set = cfg_factory.job_mapping( concourse_cfg.job_mapping_cfg_name()) for github_orgname, github_cfg_name in _enumerate_github_org_configs( job_mapping_set): github_api = _create_github_api_object( github_cfg=cfg_factory.github(github_cfg_name), ) webhook_url = create_url_from_attributes( netloc=whd_deployment_cfg.ingress_host(), scheme='https', path='github-webhook', params='', query='{name}={value}'.format( name=github.webhook.DEFAULT_ORG_HOOK_QUERY_KEY, value=whd_cfg_name), fragment='') yield (github_orgname, github_api, webhook_url)
def generate_release_notes_cli( repo_dir: str, github_cfg_name: str, github_repository_owner: str, github_repository_name: str, repository_branch: str, commit_range: str=None ): github_cfg = ctx().cfg_factory().github(github_cfg_name) githubrepobranch = GitHubRepoBranch( github_config=github_cfg, repo_owner=github_repository_owner, repo_name=github_repository_name, branch=repository_branch, ) helper = GitHubRepositoryHelper.from_githubrepobranch( githubrepobranch=githubrepobranch, ) git_helper = GitHelper.from_githubrepobranch( repo_path=repo_dir, githubrepobranch=githubrepobranch, ) ReleaseNotes.create( github_helper=helper, git_helper=git_helper, repository_branch=repository_branch, commit_range=commit_range ).to_markdown()
def delete_file_from_slack( slack_cfg_name: str, file_id: str, ): slack_cfg = ctx().cfg_factory().slack(slack_cfg_name) response = SlackHelper(slack_cfg).delete_file(file_id=file_id, ) return response
def upload_product_images( protecode_cfg_name: str, product_cfg_file: CliHints.existing_file(), processing_mode: CliHint( choices=list(ProcessingMode), type=ProcessingMode, )=ProcessingMode.UPLOAD_IF_CHANGED, protecode_group_id: int=5, parallel_jobs: int=4, cve_threshold: int=7, ignore_if_triaged: bool=True, reference_group_ids: [int]=[], ): cfg_factory = ctx().cfg_factory() protecode_cfg = cfg_factory.protecode(protecode_cfg_name) product_descriptor = ComponentDescriptor.from_dict( raw_dict=parse_yaml_file(product_cfg_file) ) upload_results, license_report = upload_images( protecode_cfg=protecode_cfg, product_descriptor=product_descriptor, protecode_group_id=protecode_group_id, parallel_jobs=parallel_jobs, cve_threshold=cve_threshold, ignore_if_triaged=ignore_if_triaged, processing_mode=processing_mode, reference_group_ids=reference_group_ids, )
def log_stack_trace_information(resp, *args, **kwargs): ''' This function stores the current stacktrace in elastic search. It must not return anything, otherwise the return value is assumed to replace the response ''' if not util._running_on_ci(): return # early exit if not running in ci job try: els_index = "github_access_stacktrace" els_config = "sap_internal" try: elastic_cfg = ctx().cfg_factory().elasticsearch(els_config) except KeyError: # do nothing: external concourse does not have els config return now = datetime.datetime.utcnow() json_body = { 'date': now.isoformat(), 'url': resp.url, 'req_method': resp.request.method, 'stacktrace': traceback.format_stack() } elastic_client = ccc.elasticsearch.from_cfg(elasticsearch_cfg=elastic_cfg) elastic_client.store_document( index=els_index, body=json_body ) except Exception as e: info(f'Could not log stack trace information: {e}')
def determine_mail_recipients(src_dir, github_cfg_name): ''' returns a generator yielding all email addresses for the given (git) repository work tree Email addresses are looked up: - from head commit: author and committer - from *CODEOWNERS files [0] Email addresses are not de-duplicated (this should be done by consumers) [0] https://help.github.com/articles/about-codeowners/ ''' cfg_factory = ctx().cfg_factory() github_cfg = cfg_factory.github(github_cfg_name) github_api = githubutil._create_github_api_object(github_cfg) # commiter/author from head commit repo = git.Repo(ensure_directory_exists(src_dir)) head_commit = repo.commit(repo.head) yield head_commit.author.email.lower() yield head_commit.committer.email.lower() # codeowners parser = CodeownersParser(repo_dir=src_dir) resolver = CodeOwnerEntryResolver(github_api=github_api) codeowner_entries = parser.parse_codeowners_entries() yield from resolver.resolve_email_addresses(codeowner_entries)
def render_pipelines( definitions_root_dir: str, template_path: [str], config_name: str, template_include_dir: str, out_dir: str ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping(concourse_cfg.job_mapping_cfg_name()) for job_mapping in job_mapping_set.job_mappings().values(): for rendered_pipeline, definition, pipeline_args in pipelines.generate_pipelines( definitions_root_dir=definitions_root_dir, job_mapping=job_mapping, template_path=template_path, template_include_dir=template_include_dir, config_set=config_set ): out_name = os.path.join(out_dir, pipeline_args.name + '.yaml') with open(out_name, 'w') as f: f.write(rendered_pipeline)
def set_teams( config_name: CliHint(typehint=str, help='Which of the configurations contained in "--config-file" to use.'), ): config_factory = ctx().cfg_factory() config_set = config_factory.cfg_set(cfg_name=config_name) config = config_set.concourse() setup.set_teams(config=config)
def deploy_or_upgrade_monitoring( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(config_set_name) setup_monitoring.deploy_monitoring_landscape( cfg_set=cfg_set, cfg_factory=cfg_factory, )
def deploy_secrets_server(config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), ): cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(config_set_name) secrets_server_config = config_set.secrets_server() setup_secrets_server.deploy_secrets_server( secrets_server_config=secrets_server_config, )
def __init__( self, cfg_set, whd_cfg: WebhookDispatcherConfig ): self.cfg_set = cfg_set self.whd_cfg = whd_cfg self.cfg_factory = util.ctx().cfg_factory()
def set_teams( config_name: CliHint(typehint=str, help='the cfg_set name to use'), ): config_factory = ctx().cfg_factory() config_set = config_factory.cfg_set(cfg_name=config_name) config = config_set.concourse() setup_concourse.set_teams(config=config)
def sync_org_webhooks_from_cfg( whd_deployment_config_name: str, ): ''' Set or update all org-webhooks for the given configs. ''' cfg_factory = ctx().cfg_factory() whd_deployment_cfg = cfg_factory.webhook_dispatcher_deployment(whd_deployment_config_name) sync_org_webhooks(whd_deployment_cfg)
def store(index: str, body: str, cfg_name: str): elastic_cfg = util.ctx().cfg_factory().elasticsearch(cfg_name) elastic_client = ccc.elasticsearch.from_cfg(elasticsearch_cfg=elastic_cfg) json_body = json.loads(body) result = elastic_client.store_document( index=index, body=json_body, ) print(result)
def store_bulk(file: str, cfg_name: str): elastic_cfg = util.ctx().cfg_factory().elasticsearch(cfg_name) elastic_client = ccc.elasticsearch.from_cfg(elasticsearch_cfg=elastic_cfg) util.existing_file(file) with open(file) as file_handle: result = elastic_client.store_bulk( body=file_handle.read() ) print(result)
def deploy_or_upgrade_monitoring(cfg_set_name: str, ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_set_name) kubernetes_cfg = cfg_set.kubernetes() concourse_cfg = cfg_set.concourse() setup_monitoring.deploy_monitoring_landscape( kubernetes_cfg=kubernetes_cfg, concourse_cfg=concourse_cfg, cfg_factory=cfg_factory, )
def delete_releases( github_cfg_name: str, github_repository_owner: str, github_repository_name: str, release_name: [str], ): github_cfg = ctx().cfg_factory().github(github_cfg_name) github_helper = GitHubRepositoryHelper( owner=github_repository_owner, name=github_repository_name, github_cfg=github_cfg, ) github_helper.delete_releases(release_names=release_name)
def resolve_component_descriptor( component_descriptor_file: CliHints.existing_file(), ): cfg_factory = ctx().cfg_factory() resolver = ComponentDescriptorResolver(cfg_factory=cfg_factory, ) with open(component_descriptor_file) as f: component_descriptor = Product.from_dict(yaml.load(f)) resolved_descriptor = resolver.resolve_component_references( product=component_descriptor) print(yaml.dump(resolved_descriptor.raw))
def export_kubeconfig( kubernetes_config_name: str, output_file: str, ): '''Write the kubeconfig contained in a kubernetes config to a given path. ''' cfg_factory = ctx().cfg_factory() kubernetes_cfg = cfg_factory.kubernetes(kubernetes_config_name) destination_path = pathlib.Path(output_file).resolve() existing_dir(destination_path.parent) with destination_path.open(mode='w') as f: yaml.dump(kubernetes_cfg.kubeconfig(), f)
def resolve_component_descriptor( component_descriptor: CliHints.existing_file(), ): cfg_factory = ctx().cfg_factory() resolver = ComponentDescriptorResolver( cfg_factory=cfg_factory, ) component_descriptor = ComponentDescriptor.from_dict(parse_yaml_file(component_descriptor)) resolved_descriptor = resolver.resolve_component_references(product=component_descriptor) print(yaml.dump(resolved_descriptor.raw))
def send_mail( email_cfg_name: CliHint( help="reference to an email cfg (see repo cc-config / secrets-server)" ), recipients: CliHint(typehint=[str], help="Recipient email address"), mail_template_file: CliHints.existing_file(), subject: CliHint(help="email subject"), cc_recipients: CliHint(typehint=[str], help="Carbon copy email address") = [], replace_token: CliHint(typehint=[str], help="<key>=<value> (replace <key> in body)") = [], ): ''' Sends an email using the specified email_cfg (retrieved from a cfg_factory) to the specified recipients. The mail body is read from a file. A simple token-replacement is done if (optional) replace-tokens are given. @param recipients: mail recipients (email addresses) @param mail_template_file: path to the mail template file. Must exist. @param subject: email subject @param cc_recipients: cc mail recipients @param replace_token: format: <token>=<replace-value> - tokens in mail-body are replaced ''' not_empty(email_cfg_name) cfg_factory = ctx().cfg_factory() email_cfg = cfg_factory.email(email_cfg_name) with open(mail_template_file) as f: mail_template = f.read() # validate template-tokens invalid_tokens = filter(lambda t: not isinstance(t, str) or '=' not in t, replace_token) if len(list(invalid_tokens)) > 0: fail('all replace-tokens must be of form <key>=<value>: ' + ' '.join(invalid_tokens)) # parse replace-tokens replace_tokens = dict(map(lambda t: t.split('=', 1), replace_token)) _send_mail( email_cfg=email_cfg, recipients=recipients, mail_template=mail_template, subject=subject, cc_recipients=cc_recipients, replace_tokens=replace_tokens, )
def _retrieve_matching_credentials(image_reference: str, privileges: Privileges = None): util.check_type(image_reference, str) cfg_factory = util.ctx().cfg_factory() matching_cfgs = [ cfg for cfg in cfg_factory._cfg_elements('container_registry') if cfg.image_ref_matches(image_reference, privileges=privileges) ] if not matching_cfgs: return None # return first match return matching_cfgs[0].credentials()
def post_to_slack(release_notes: ReleaseNote, github_repository_name: str, slack_cfg_name: str, slack_channel: str, release_version: str): # slack can't auto link pull requests, commits or users # hence we force the link generation when building the markdown string release_notes_md_links = release_notes.to_markdown( force_link_generation=True) title = '[{n}] {v} released'.format(n=github_repository_name, v=release_version) slack_cfg = ctx().cfg_factory().slack(slack_cfg_name) try: return SlackHelper(slack_cfg).post_to_slack( channel=slack_channel, title=title, message=release_notes_md_links) except RuntimeError as e: warning(e)
def store_files(index: str, files: [str], cfg_name: str): elastic_cfg = util.ctx().cfg_factory().elasticsearch(cfg_name) elastic_client = ccc.elasticsearch.from_cfg(elasticsearch_cfg=elastic_cfg) for file in files: util.existing_file(file) for file in files: with open(file) as file_handle: json_body = json.load(file_handle) result = elastic_client.store_document( index=index, body=json_body, ) print(result)
def _client(): args = ctx().args try: if bool(args.server_endpoint) ^ bool(args.concourse_cfg_name): raise ValueError('either all or none of server-endpoint and concourse-cfg-name must be set') if args.server_endpoint or args.cache_file: return SecretsServerClient( endpoint_url=args.server_endpoint, concourse_secret_name=args.concourse_cfg_name, cache_file=args.cache_file ) except AttributeError: pass # ignore # fall-back to environemnt variables return SecretsServerClient.from_env()
def notify( subject: str, body: str, email_cfg_name: str, recipients: typing.Iterable[str], ): recipients = set(recipients) cfg_factory = ctx().cfg_factory() email_cfg = cfg_factory.email(email_cfg_name) _send_mail(email_cfg=email_cfg, recipients=recipients, mail_template=body, subject=subject) info('sent email to: {r}'.format(r=recipients))
def deploy_or_upgrade_clamav(config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(config_set_name) concourse_cfg = cfg_set.concourse() kubernetes_cfg_name = concourse_cfg.kubernetes_cluster_config() clamav_cfg_name = concourse_cfg.clamav_config() if clamav_cfg_name is not None: setup_clamav.deploy_clam_av( clamav_cfg_name=clamav_cfg_name, kubernetes_cfg_name=kubernetes_cfg_name, ) else: info( f"No ClamAV configured for the Concourse in config set '{config_set_name}'. Will " "not deploy ClamAV.")
def retrieve_component_descriptor( name: str, version: str, ): cfg_factory = ctx().cfg_factory() resolver = ComponentDescriptorResolver( cfg_factory=cfg_factory, ) component_reference = ComponentReference.create(name=name, version=version) try: resolved_descriptor = resolver.retrieve_raw_descriptor(component_reference) except github3.exceptions.NotFoundError: fail('no component descriptor found: {n}:{v}'.format(n=name, v=version)) print(resolved_descriptor)
def render_pipelines( template_path: str, config_name: str, out_dir: str, template_include_dir: str = None, ): if not os.path.isdir(out_dir): os.makedirs(out_dir) cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() job_mapping_set = cfg_factory.job_mapping(concourse_cfg.job_mapping_cfg_name()) if not template_include_dir: template_include_dir = template_path def_enumerators = [] for job_mapping in job_mapping_set.job_mappings().values(): def_enumerators.append( GithubOrganisationDefinitionEnumerator( job_mapping=job_mapping, cfg_set=config_set ) ) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=[template_path]) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer, ) replicator.replicate()
def deploy_or_upgrade_concourse( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), deployment_name: CliHint( typehint=str, help="namespace and deployment name") = 'concourse', timeout_seconds: CliHint( typehint=int, help="how long to wait for concourse startup") = 180, ): '''Deploys a new concourse-instance using the given deployment name and config-directory.''' which("helm") cfg_factory = ctx().cfg_factory() config_set = cfg_factory.cfg_set(config_set_name) setup_concourse.deploy_concourse_landscape( config_set=config_set, deployment_name=deployment_name, timeout_seconds=timeout_seconds, )