def deploy_or_upgrade_tekton_dashboard_ingress( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), tekton_chart_dir: CliHints.existing_dir(help="directory of tekton-dashboard-ingress chart"), oauth_proxy_chart_dir: CliHints.existing_dir(help="directory of oauth2-proxy chart"), deployment_name: str='tekton-dashboard-ingress', ): oauth2_proxy_chart_dir = os.path.abspath(oauth_proxy_chart_dir) tekton_chart_dir = os.path.abspath(tekton_chart_dir) cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(config_set_name) oauth2_proxy_cfg = cfg_set.oauth2_proxy() tekton_dashboard_ingress_cfg = cfg_set.tekton_dashboard_ingress() setup_oauth2_proxy.deploy_oauth2_proxy( oauth2_proxy_config=oauth2_proxy_cfg, chart_dir=oauth2_proxy_chart_dir, deployment_name=f'{deployment_name}-oauth2-proxy', ) setup_tekton_dashboard_ingress.deploy_tekton_dashboard_ingress( tekton_dashboard_ingress_config=tekton_dashboard_ingress_cfg, chart_dir=tekton_chart_dir, deployment_name=deployment_name, )
def render_pipeline( definition_file: CliHints.existing_file(), cfg_name: str, out_dir: CliHints.existing_dir(), repo_path: str = 'example/example', repo_branch: str = 'master', repo_host: str = 'github.com', template_path: str=_template_path(), template_include_dir: str=None, ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name=cfg_name) logger.info(f'Template path: {template_path}') repo_url = urllib.parse.urlunparse(('https', repo_host, repo_path, '', '', '')) try: job_mapping = cfg_set.job_mapping().job_mapping_for_repo_url(repo_url, cfg_set) secret_cfg = cfg_factory.secret(job_mapping.secret_cfg()) except ValueError as e: logger.warning(f'An error occurred: {e}. Will use dummy values to render pipeline.') job_mapping = None secret_cfg = None def_enumerators = [ SimpleFileDefinitionEnumerator( definition_file=definition_file, cfg_set=cfg_set, repo_path=repo_path, repo_branch=repo_branch, repo_host=repo_host, job_mapping=job_mapping, secret_cfg=secret_cfg, ) ] preprocessor = DefinitionDescriptorPreprocessor() if not template_include_dir: template_include_dir = template_path template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, render_origin=RenderOrigin.LOCAL, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer ) replicator.replicate()
def diff_pipelines(left_file: CliHints.yaml_file(), right_file: CliHints.yaml_file()): from deepdiff import DeepDiff from pprint import pprint diff = DeepDiff(left_file, right_file, ignore_order=True) if diff: pprint(diff) fail('diffs were found') else: info('the yaml documents are equivalent')
def render_pipeline( definition_file: CliHints.existing_file(), cfg_name: str, out_dir: CliHints.existing_dir(), template_path: str=_template_path(), template_include_dir: str=None, secret_cfg_name: str = None, ): cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name=cfg_name) print(template_path) if secret_cfg_name: secret_cfg = cfg_factory.secret(secret_cfg_name) else: secret_cfg = None def_enumerators = [ SimpleFileDefinitionEnumerator( definition_file=definition_file, cfg_set=cfg_set, repo_path='example/example', repo_branch='master', repo_host='github.com', ) ] preprocessor = DefinitionDescriptorPreprocessor() if not template_include_dir: template_include_dir = template_path template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=cfg_set, secret_cfg=secret_cfg, ) deployer = FilesystemDeployer(base_dir=out_dir) replicator = PipelineReplicator( definition_enumerators=def_enumerators, descriptor_preprocessor=preprocessor, definition_renderer=renderer, definition_deployer=deployer ) replicator.replicate()
def download_dependencies( component_descriptor: CliHints.existing_file(), out_dir: str, ): if not os.path.isdir(out_dir): os.mkdir(out_dir) component_descriptor = ComponentDescriptor.from_dict( parse_yaml_file(component_descriptor)) image_references = [ container_image.image_reference() for _, container_image in _enumerate_effective_images( component_descriptor=component_descriptor) ] def mangled_outfile_name(image_reference): mangled_fname = image_reference.replace(':', '_').replace('/', '_') return os.path.join(out_dir, mangled_fname + '.tar') for image_ref in image_references: fname = mangled_outfile_name(image_ref) with open(fname, 'wb') as f: container.registry.retrieve_container_image( image_reference=image_ref, outfileobj=f, ) print(fname)
def upload_grouped_product_images( protecode_cfg_name: str, product_cfg_file: CliHints.existing_file(), processing_mode: CliHint( choices=list(ProcessingMode), type=ProcessingMode, )=ProcessingMode.RESCAN, protecode_group_id: int=5, parallel_jobs: int=4, cve_threshold: int=7, ignore_if_triaged: bool=True, reference_group_ids: [int]=[], ): cfg_factory = ctx().cfg_factory() protecode_cfg = cfg_factory.protecode(protecode_cfg_name) component_descriptor = ComponentDescriptor.from_dict( raw_dict=parse_yaml_file(product_cfg_file) ) upload_results, license_report = upload_grouped_images( protecode_cfg=protecode_cfg, component_descriptor=component_descriptor, protecode_group_id=protecode_group_id, parallel_jobs=parallel_jobs, cve_threshold=cve_threshold, ignore_if_triaged=ignore_if_triaged, processing_mode=processing_mode, reference_group_ids=reference_group_ids, )
def deploy_or_upgrade_tekton_dashboard_ingress( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), chart_dir: CliHints.existing_dir( help="directory of tekton-dashboard-ingress chart"), deployment_name: str = 'tekton-dashboard-ingress', ): chart_dir = os.path.abspath(chart_dir) cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(config_set_name) concourse_cfg = cfg_set.concourse() kubernetes_cfg = cfg_factory.kubernetes( concourse_cfg.kubernetes_cluster_config()) oauth2_proxy_cfg = cfg_set.oauth2_proxy() tekton_dashboard_ingress_cfg = cfg_set.tekton_dashboard_ingress() setup_oauth2_proxy.deploy_oauth2_proxy( oauth2_proxy_config=oauth2_proxy_cfg, kubernetes_config=kubernetes_cfg, deployment_name=f'{deployment_name}-oauth2-proxy', ) setup_tekton_dashboard.deploy_tekton_dashboard_ingress( tekton_dashboard_ingress_config=tekton_dashboard_ingress_cfg, kubernetes_config=kubernetes_cfg, chart_dir=chart_dir, deployment_name=deployment_name, )
def serialise_cfg( cfg_dir: CliHints.existing_dir(), cfg_sets: [str], out_file: str): factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir) cfg_sets = [factory.cfg_set(cfg_set) for cfg_set in cfg_sets] serialiser = CSS(cfg_sets=cfg_sets, cfg_factory=factory) with open(out_file, 'w') as f: f.write(serialiser.serialise())
def serialise_cfg( cfg_dir: CliHints.existing_dir(), out_file: str, cfg_sets: [str] = []): factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir) if not cfg_sets: cfg_sets = factory._cfg_element_names('cfg_set') cfg_sets = [factory.cfg_set(cfg_set) for cfg_set in cfg_sets] serialiser = CSS(cfg_sets=cfg_sets, cfg_factory=factory) with open(out_file, 'w') as f: f.write(serialiser.serialise())
def update_certificate( tls_config_name: CliHint(typehint=str, help="TLS config element name to update"), certificate_file: CliHints.existing_file(help="certificate file path"), key_file: CliHints.existing_file(help="private key file path"), output_path: CliHints.existing_dir( help="TLS config file output path")): # Stuff used for yaml formatting, when dumping a dictionary class LiteralStr(str): """Used to create yaml block style indicator | """ def literal_str_representer(dumper, data): """Used to create yaml block style indicator""" return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|') # read new certificate data certificate_file = os.path.abspath(certificate_file) private_key_file = os.path.abspath(key_file) with open(certificate_file) as f: certificate = f.read() with open(private_key_file) as f: private_key = f.read() # set new certificate data to specified argument 'tls_config_name' cfg_factory = ctx().cfg_factory() tls_config_element = cfg_factory.tls_config(tls_config_name) tls_config_element.set_private_key(private_key) tls_config_element.set_certificate(certificate) # patch tls config dict so that yaml.dump outputs literal strings using '|' yaml.add_representer(LiteralStr, literal_str_representer) configs = cfg_factory._configs('tls_config') for k1, v1 in configs.items(): for k2, _ in v1.items(): configs[k1][k2] = LiteralStr(configs[k1][k2]) # dump updated tls config to given output path tls_config_type = cfg_factory._cfg_types()['tls_config'] tls_config_file = list(tls_config_type.sources())[0].file() with open(os.path.join(output_path, tls_config_file), 'w') as f: yaml.dump(configs, f, indent=2, default_flow_style=False)
def trigger_resource_check( cfg_name: CliHints.non_empty_string(help="cfg_set to use"), team_name: CliHints.non_empty_string(help="pipeline's team name"), pipeline_name: CliHints.non_empty_string(help="pipeline name"), resource_name: CliHints.non_empty_string(help="resource to check"), ): '''Triggers a check of the specified Concourse resource ''' cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(cfg_name) concourse_cfg = cfg_set.concourse() api = client.from_cfg( concourse_cfg=concourse_cfg, team_name=team_name, ) api.trigger_resource_check( pipeline_name=pipeline_name, resource_name=resource_name, )
def resolve_component_descriptor( component_descriptor: CliHints.existing_file(), ): cfg_factory = ctx().cfg_factory() resolver = ComponentDescriptorResolver(cfg_factory=cfg_factory, ) component_descriptor = ComponentDescriptor.from_dict( parse_yaml_file(component_descriptor)) resolved_descriptor = resolver.resolve_component_references( product=component_descriptor) print(yaml.dump(resolved_descriptor.raw))
def add_dependencies( descriptor_src_file: CliHints.existing_file(), component_name: str, component_version: str, descriptor_out_file: str=None, component_dependencies: CliHint(action='append')=[], container_image_dependencies: CliHint(action='append')=[], web_dependencies: CliHint(action='append')=[], generic_dependencies: CliHint(action='append')=[], validation_policies: CliHint( type=ValidationPolicy, typehint=[ValidationPolicy], choices=[policy for policy in ValidationPolicy], )=[], ): product = ComponentDescriptor.from_dict(parse_yaml_file(descriptor_src_file)) component = product.component( ComponentReference.create(name=component_name, version=component_version) ) if not component: fail('component {c}:{v} was not found in {f}'.format( c=component_name, v=component_version, f=descriptor_src_file )) # maintain old behaviour if not validation_policies: validation_policies = [ValidationPolicy.FORBID_EXTRA_ATTRIBUTES] dependencies = _parse_dependencies( component_dependencies=component_dependencies, container_image_dependencies=container_image_dependencies, web_dependencies=web_dependencies, generic_dependencies=generic_dependencies, validation_policies=validation_policies, ) component.add_dependencies(dependencies) product_dict = {'components': [component.raw]} print(yaml.dump(product_dict, indent=2)) product_dict = json.loads(json.dumps({'components': [component.raw]})) if not descriptor_out_file: print(yaml.dump(product_dict, indent=2)) else: with open(descriptor_out_file, 'w') as f: yaml.dump(product_dict, f, indent=2)
def send_mail( email_cfg_name: CliHint( help="reference to an email cfg (see repo cc-config / secrets-server)" ), recipients: CliHint(typehint=[str], help="Recipient email address"), mail_template_file: CliHints.existing_file(), subject: CliHint(help="email subject"), cc_recipients: CliHint(typehint=[str], help="Carbon copy email address") = [], replace_token: CliHint(typehint=[str], help="<key>=<value> (replace <key> in body)") = [], ): ''' Sends an email using the specified email_cfg (retrieved from a cfg_factory) to the specified recipients. The mail body is read from a file. A simple token-replacement is done if (optional) replace-tokens are given. @param recipients: mail recipients (email addresses) @param mail_template_file: path to the mail template file. Must exist. @param subject: email subject @param cc_recipients: cc mail recipients @param replace_token: format: <token>=<replace-value> - tokens in mail-body are replaced ''' not_empty(email_cfg_name) cfg_factory = ctx().cfg_factory() email_cfg = cfg_factory.email(email_cfg_name) with open(mail_template_file) as f: mail_template = f.read() # validate template-tokens invalid_tokens = filter(lambda t: not isinstance(t, str) or '=' not in t, replace_token) if len(list(invalid_tokens)) > 0: fail('all replace-tokens must be of form <key>=<value>: ' + ' '.join(invalid_tokens)) # parse replace-tokens replace_tokens = dict(map(lambda t: t.split('=', 1), replace_token)) _send_mail( email_cfg=email_cfg, recipients=recipients, mail_template=mail_template, subject=subject, cc_recipients=cc_recipients, replace_tokens=replace_tokens, )
def component_descriptor_to_xml( component_descriptor: CliHints.existing_file(), out_file: str, ): component_descriptor = ComponentDescriptor.from_dict( parse_yaml_file(component_descriptor)) image_references = [ container_image for _, container_image in _enumerate_effective_images( component_descriptor=component_descriptor) ] result_xml = product.xml.container_image_refs_to_xml(image_references, ) result_xml.write(out_file)
def deploy_or_upgrade_gardenlinux_cache( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), chart_dir: CliHints.existing_dir(help="directory of gardenlinux-cache chart"), deployment_name: str='gardenlinux-cache', ): chart_dir = os.path.abspath(chart_dir) cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(config_set_name) gardenlinux_cache_cfg = cfg_set.gardenlinux_cache() setup_gardenlinux_cache.deploy_gardenlinux_cache( gardenlinux_cache_config=gardenlinux_cache_cfg, chart_dir=chart_dir, deployment_name=deployment_name, )
def deploy_or_upgrade_webhook_dispatcher( config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP), chart_dir: CliHints.existing_dir(help="directory of webhook dispatcher chart"), deployment_name: str='webhook-dispatcher', ): chart_dir = os.path.abspath(chart_dir) cfg_factory = ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(config_set_name) webhook_dispatcher_deployment_cfg = cfg_set.webhook_dispatcher_deployment() setup_whd.deploy_webhook_dispatcher_landscape( cfg_set=cfg_set, webhook_dispatcher_deployment_cfg=webhook_dispatcher_deployment_cfg, chart_dir=chart_dir, deployment_name=deployment_name, )
def add_dependencies( descriptor_src_file: CliHints.existing_file(), component_name: str, component_version: str, descriptor_out_file: str = None, component_dependencies: CliHint(typehint=_parse_component_deps, action='append') = [], container_image_dependencies: CliHint(typehint=_parse_container_image_deps, action='append') = [], web_dependencies: CliHint(typehint=_parse_web_deps, action='append') = [], generic_dependencies: CliHint(typehint=_parse_generic_deps, action='append') = [], ): product = ComponentDescriptor.from_dict( parse_yaml_file(descriptor_src_file)) component = product.component( ComponentReference.create(name=component_name, version=component_version)) if not component: fail('component {c}:{v} was not found in {f}'.format( c=component_name, v=component_version, f=descriptor_src_file)) component_deps = component.dependencies() for component_ref in component_dependencies: component_deps.add_component_dependency(component_ref) for image_dep in container_image_dependencies: component_deps.add_container_image_dependency(image_dep) for web_dep in web_dependencies: component_deps.add_web_dependency(web_dep) for generic_dep in generic_dependencies: component_deps.add_generic_dependency(generic_dep) product_dict = json.loads(json.dumps({'components': [component.raw]})) if not descriptor_out_file: print(yaml.dump(product_dict, indent=2)) else: with open(descriptor_out_file, 'w') as f: yaml.dump(product_dict, f, indent=2)