async def test_skipping_hidden_levels(caplog, logstream, logfn, event_queue, event_queue_loop): logger = ObjectLogger(body=OBJ1) logger_fn = getattr(logger, logfn) logger_fn("hello %s", "world") logger.info("must be here") assert event_queue.qsize() == 1 # not 2! assert caplog.messages == ["hello world", "must be here"]
def terraform_destroy(planId: str, logger: KopfObjectLogger) -> tuple: logger.info(f"PLANID: {planId}") #@TODO check if planId exists throw kopf eception if not ptf = python_terraform.Terraform(working_dir=planId) return_code, stdout, stderr = ptf.destroy(dir_or_plan=f"{planId}/plan", refresh=True, auto_approve=True) # return_code, stdout, stderr = 0, 'all destroyed', '' response = stdout if not stderr else stderr logger.info(f"TERRAFORM DESTROY COMPLETE: {return_code} {response}") return response, return_code
async def test_skipping_below_config(caplog, logstream, logfn, event_queue, event_queue_loop, mocker): logger = ObjectLogger(body=OBJ1) logger_fn = getattr(logger, logfn) mocker.patch.object(EventsConfig, 'events_loglevel', 666) logger_fn("hello %s", "world") mocker.patch.object(EventsConfig, 'events_loglevel', 0) logger.info("must be here") assert event_queue.qsize() == 1 # not 2! assert caplog.messages == ["hello world", "must be here"]
async def test_skipping_below_config(settings, caplog, logstream, logfn, event_queue, event_queue_loop, mocker): logger = ObjectLogger(body=OBJ1, settings=settings) logger_fn = getattr(logger, logfn) settings.posting.level = 666 logger_fn("hello %s", "world") settings.posting.level = 0 logger.info("must be here") assert event_queue.qsize() == 1 # not 2! assert caplog.messages == ["hello world", "must be here"]
async def handle_event( registry: registries.BaseRegistry, resource: registries.Resource, logger: logging_engine.ObjectLogger, patch: dict, event: dict, ): """ Handle a received event, log but ignore all errors. This is a lightweight version of the cause handling, but for the raw events, without any progress persistence. Multi-step calls are also not supported. If the handler fails, it fails and is never retried. Note: K8s-event posting is skipped for `kopf.on.event` handlers, as they should be silent. Still, the messages are logged normally. """ handlers = registry.get_event_handlers(resource=resource, event=event) for handler in handlers: # The exceptions are handled locally and are not re-raised, to keep the operator running. try: logger.debug(f"Invoking handler {handler.id!r}.") # TODO: also set the context-vars, despite most of the make no sense here. result = await invocation.invoke( handler.fn, event=event, patch=patch, logger=logger, ) except Exception: logger.exception( f"Handler {handler.id!r} failed with an exception. Will ignore.", local=True) else: logger.info(f"Handler {handler.id!r} succeeded.", local=True) status.store_result(patch=patch, handler=handler, result=result)
def terraform(working_dir: str, data: str, logger: KopfObjectLogger, apply: bool = False, planId: str = '') -> tuple: logger.info(f"WORKING IN DIR: {working_dir}") Path(f"{working_dir}/main.tf.json").write_text(data) ptf = python_terraform.Terraform(working_dir=working_dir) return_code, stdout, stderr = ptf.init() assert return_code != 1, f"Terraform Init Failed {stderr}" logger.info('TERRAFORM INIT COMPLETE') return_code, stdout, stderr = ptf.plan(refresh=True, out='plan') response = stdout if not stderr else stderr logger.info(f"TERRAFORM PLAN COMPLETE {response}") return response, return_code
def process_destroy(planId: str, logger: KopfObjectLogger) -> str: tf_response, tf_code = terraform_destroy(planId=planId, logger=logger) logger.info(reason='Destroy complete', message=f"Terraform Destroy result: {tf_response}")
def process_apply(planId: str, logger: KopfObjectLogger) -> str: tf_response, tf_code = terraform_apply(planId=planId, logger=logger) logger.info(f"Terraform Apply result: {tf_response}")
def process(terrayaml: str, metadata: dict, logger: KopfObjectLogger) -> str: # # User input YAML # env = jinja2.Environment() env.filters['random_password'] = random_password env.filters['random_name'] = random_name template = T(template_text=terrayaml, environment=env).render(**metadata) provision = yaml.load(template, Loader=yaml.FullLoader) logger.info(f"provision this template {provision}") # print(provision) # # Start terraform # meta = provision.pop('meta', {}) team = meta.get('team', 'oss') profile = meta.get('profile', PROFILE) environment = meta.get('environment', 'testing') application = meta.get('application', 'wurkflow') statefile_region = meta.get('statefile_region', 'eu-west-1') ts = Terrascript() ts += Terraform(required_version=">= 0.12.7").backend( "s3", bucket=REMOTE_STATE_S3_BUCKET, key= f"k8/terrayaml-operator/{team}/{environment}/{application}-terraform.tfstate", region=statefile_region, profile=ROOT_PROFILE) # # Extract the notify component # notify = provision.pop('notify') if notify: # tuple of email, key recipient_emails = notify.get('email', []) # append out infra provisioner email recipient_emails.append('*****@*****.**') recipients = get_recipients_from_pgp(recipient_emails=recipient_emails) logger.info(f"notify these emails: {recipient_emails}") # # Parse the yaml # for provider in provision: #print(f"----- output for provider: {provider.upper()} -----") for resource, data in provision.get(provider).items(): #print(f"----- output for resource: {resource} -----") for item in data.get('items', []): api = TF_YAML_MAP.get(resource) outputs = item.pop('outputs', []) item_name = item.pop('name', random_name(value=resource)) tf_resource = api(item_name, **item) ts.add(tf_resource) # handle terraform outputs for opt in outputs: assert getattr( tf_resource, opt.get('value') ), f"{tf_resource} has no attribute {opt.get('value')}" ts.add( Output(opt.get('name'), value=getattr(tf_resource, opt.get('value')))) # Add a provider (+= syntax) ts += Provider('aws', skip_metadata_api_check=True, profile=profile, region=REGION) data = ts.dump() # Plan working_dir = tempfile.mkdtemp(dir='./runs') crd_api = kubernetes.client.CustomObjectsApi() selfLink = metadata.get('selfLink').split('/') # update with planId logger.info(f"planId: {working_dir}") crd_api.patch_namespaced_custom_object( group=selfLink[2], version=selfLink[3], name=selfLink[7], namespace=selfLink[5], plural=selfLink[6], body={"spec": { "planId": working_dir }}) tf_response, tf_code = terraform(working_dir=working_dir, data=data, logger=logger) logger.info(f"Terraform Plan result: {tf_response}") if recipients: logger.info(f"Send email to {recipients}") send_email(to=recipients, attachment=tf_response, message_type='success' if tf_code != 1 else 'error') else: logger.info('No recipients defined') logger.info(f"PlanId is {working_dir}") return f"{working_dir}"