class SNSNotificationPlugin(ExpirationNotificationPlugin): title = "AWS SNS" slug = "aws-sns" description = "Sends notifications to AWS SNS" version = aws.VERSION author = "Jasmine Schladen <*****@*****.**>" author_url = "https://github.com/Netflix/lemur" additional_options = [ { "name": "accountNumber", "type": "str", "required": True, "validation": check_validation("[0-9]{12}"), "helpMessage": "A valid AWS account number with permission to access the SNS topic", }, { "name": "region", "type": "str", "required": True, "validation": check_validation("[0-9a-z\\-]{1,25}"), "helpMessage": "Region in which the SNS topic is located, e.g. \"us-east-1\"", }, { "name": "topicName", "type": "str", "required": True, # base topic name is 1-256 characters (alphanumeric plus underscore and hyphen) "validation": check_validation("^[a-zA-Z0-9_\\-]{1,256}$"), "helpMessage": "The name of the topic to use for expiration notifications", } ] def send(self, notification_type, message, excluded_targets, options, **kwargs): """ While we receive a `targets` parameter here, it is unused, as the SNS topic is pre-configured in the plugin configuration, and can't reasonably be changed dynamically. """ partition = current_app.config.get("LEMUR_AWS_PARTITION", "aws") topic_arn = f"arn:{partition}:sns:{self.get_option('region', options)}:" \ f"{self.get_option('accountNumber', options)}:" \ f"{self.get_option('topicName', options)}" current_app.logger.info(f"Publishing {notification_type} notification to topic {topic_arn}") sns.publish(topic_arn, message, notification_type, options, region_name=self.get_option("region", options))
class ExpirationNotificationPlugin(NotificationPlugin): """ This is the base class for all expiration notification plugins. It contains some default options that are needed for all expiration notification plugins. """ default_options = [ { "name": "interval", "type": "int", "required": True, "validation": check_validation(r"^\d+$"), "helpMessage": "Number of days to be alert before expiration.", }, { "name": "unit", "type": "select", "required": True, "validation": check_validation(""), "available": ["days", "weeks", "months"], "helpMessage": "Interval unit", }, ] @property def options(self): """ Gets/sets options for the plugin. :return: """ return self.default_options + self.additional_options def send(self, notification_type, message, excluded_targets, options, **kwargs): raise NotImplementedError
class AWSDestinationPlugin(DestinationPlugin): title = "AWS" slug = "aws-destination" description = "Allow the uploading of certificates to AWS IAM" version = aws.VERSION sync_as_source = True sync_as_source_name = AWSSourcePlugin.slug author = "Kevin Glisson" author_url = "https://github.com/netflix/lemur" options = [ { "name": "accountNumber", "type": "str", "required": True, "validation": check_validation("[0-9]{12}"), "helpMessage": "Must be a valid AWS account number!", }, { "name": "path", "type": "str", "validation": r"^(?:|/|/\S+/)$", "default": "/", "helpMessage": "Path prefix for uploaded certificates.", }, ] def upload(self, name, body, private_key, cert_chain, options, **kwargs): try: iam.upload_cert( name, body, private_key, self.get_option("path", options), cert_chain=cert_chain, account_number=self.get_option("accountNumber", options), ) except ClientError: capture_exception() def deploy(self, elb_name, account, region, certificate): pass def clean(self, certificate, options, **kwargs): account_number = self.get_option("accountNumber", options) iam.delete_cert(certificate.name, account_number=account_number)
class JavaTruststoreExportPlugin(ExportPlugin): title = "Java Truststore (JKS)" slug = "java-truststore-jks" description = "Generates a JKS truststore" requires_key = False version = jks.VERSION author = "Marti Raudsepp" author_url = "https://github.com/intgr" options = [ { "name": "alias", "type": "str", "required": False, "helpMessage": "Enter the alias you wish to use for the truststore.", }, { "name": "passphrase", "type": "str", "required": False, "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", "validation": check_validation(""), }, ] def export(self, body, chain, key, options, **kwargs): """ Generates a Java Truststore """ if self.get_option("alias", options): alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) if self.get_option("passphrase", options): passphrase = self.get_option("passphrase", options) else: passphrase = Fernet.generate_key().decode("utf-8") raw = create_truststore(body, chain, alias, passphrase) return "jks", passphrase, raw
class AWSSourcePlugin(SourcePlugin): title = "AWS" slug = "aws-source" description = "Discovers all SSL certificates and ELB or Cloudfront endpoints in an AWS account" version = aws.VERSION author = "Kevin Glisson" author_url = "https://github.com/netflix/lemur" options = [ { "name": "accountNumber", "type": "str", "required": True, "validation": check_validation("^[0-9]{12,12}$"), "helpMessage": "Must be a valid AWS account number!", }, { "name": "regions", "type": "str", "helpMessage": "Comma separated list of regions to search in, if no region is specified we look in all regions.", }, { "name": "path", "type": "str", "validation": r"^(?:|/|/\S+/)$", "default": "/", "helpMessage": "Only discover certificates with this path prefix. Must begin and end with slash. " "For CloudFront sources, use '/cloudfront/'.", }, { "name": "endpointType", "type": "select", "available": [ "elb", # Discover IAM certs, elb and elbv2 in this account and regions "cloudfront", # Discover IAM certs, CloudFront distributions in this account and regions "none", # Discover IAM certs only in this account and regions ], "default": "elb", "helpMessage": "Type of AWS endpoint to discover. Defaults to elb if not set.", }, ] def get_certificates(self, options, **kwargs): cert_data = iam.get_all_certificates( restrict_path=self.get_option("path", options), account_number=self.get_option("accountNumber", options)) return [ dict( body=c["CertificateBody"], chain=c.get("CertificateChain"), name=c["ServerCertificateMetadata"]["ServerCertificateName"], ) for c in cert_data ] def get_endpoints(self, options, **kwargs): endpoint_type = self.get_option("endpointType", options) if endpoint_type == "cloudfront": return self.get_distributions(options, **kwargs) elif endpoint_type == "none": return [] else: return self.get_load_balancers(options, **kwargs) def get_load_balancers(self, options, **kwargs): endpoints = [] account_number = self.get_option("accountNumber", options) regions = self.get_option("regions", options) if not regions: regions = ec2.get_regions(account_number=account_number) else: regions = "".join(regions.split()).split(",") for region in regions: elbs = elb.get_all_elbs(account_number=account_number, region=region) current_app.logger.info({ "message": "Describing classic load balancers", "account_number": account_number, "region": region, "number_of_load_balancers": len(elbs) }) for e in elbs: try: endpoints.extend( get_elb_endpoints(account_number, region, e)) except Exception as e: # noqa capture_exception() continue # fetch advanced ELBs elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region) current_app.logger.info({ "message": "Describing advanced load balancers", "account_number": account_number, "region": region, "number_of_load_balancers": len(elbs_v2) }) for e in elbs_v2: try: endpoints.extend( get_elb_endpoints_v2(account_number, region, e)) except Exception as e: # noqa capture_exception() continue return endpoints def get_distributions(self, options, **kwargs): endpoints = [] account_number = self.get_option("accountNumber", options) try: iam_cert_dict = iam.get_certificate_id_to_name( account_number=account_number) distributions = cloudfront.get_all_distributions( account_number=account_number) except Exception as e: # noqa capture_exception() return endpoints current_app.logger.info({ "message": "Describing CloudFront distributions", "account_number": account_number, "number_of_distributions": len(distributions) }) for d in distributions: try: endpoint = get_distribution_endpoint(account_number, iam_cert_dict, d) if endpoint: endpoints.append(endpoint) except Exception as e: # noqa capture_exception() continue return endpoints def update_endpoint(self, endpoint, certificate): options = endpoint.source.options account_number = self.get_option("accountNumber", options) if endpoint.type == "cloudfront": cert = iam.get_certificate(certificate.name, account_number=account_number) if not cert: return None cert_id = cert["ServerCertificateMetadata"]["ServerCertificateId"] cloudfront.attach_certificate(endpoint.name, cert_id, account_number=account_number) return if endpoint.type not in ["elb", "elbv2"]: raise NotImplementedError() # relies on the fact that region is included in DNS name region = get_region_from_dns(endpoint.dnsname) if endpoint.registry_type == 'iam': arn = iam.create_arn_from_cert(account_number, region, certificate.name, endpoint.certificate_path) else: raise Exception( f"Lemur doesn't support rotating certificates on {endpoint.registry_type} registry" ) if endpoint.type == "elbv2": listener_arn = elb.get_listener_arn_from_endpoint( endpoint.name, endpoint.port, account_number=account_number, region=region, ) elb.attach_certificate_v2( listener_arn, endpoint.port, [{ "CertificateArn": arn }], account_number=account_number, region=region, ) elif endpoint.type == "elb": elb.attach_certificate( endpoint.name, endpoint.port, arn, account_number=account_number, region=region, ) def clean(self, certificate, options, **kwargs): account_number = self.get_option("accountNumber", options) iam.delete_cert(certificate.name, account_number=account_number) def get_certificate_by_name(self, certificate_name, options): account_number = self.get_option("accountNumber", options) # certificate name may contain path, in which case we remove it if "/" in certificate_name: certificate_name = certificate_name.split('/')[-1] try: cert = iam.get_certificate(certificate_name, account_number=account_number) if cert: return dict( body=cert["CertificateBody"], chain=cert.get("CertificateChain"), name=cert["ServerCertificateMetadata"] ["ServerCertificateName"], ) except ClientError: current_app.logger.warning( "get_elb_certificate_failed: Unable to get certificate for {0}" .format(certificate_name)) capture_exception() metrics.send("get_elb_certificate_failed", "counter", 1, metric_tags={ "certificate_name": certificate_name, "account_number": account_number }) return None def get_endpoint_certificate_names(self, endpoint): options = endpoint.source.options account_number = self.get_option("accountNumber", options) region = get_region_from_dns(endpoint.dnsname) certificate_names = [] if endpoint.type == "elb": elb_details = elb.get_elbs( account_number=account_number, region=region, LoadBalancerNames=[endpoint.name], ) for lb_description in elb_details["LoadBalancerDescriptions"]: for listener_description in lb_description[ "ListenerDescriptions"]: listener = listener_description.get("Listener") if not listener.get("SSLCertificateId"): continue certificate_names.append( iam.get_name_from_arn( listener.get("SSLCertificateId"))) elif endpoint.type == "elbv2": listeners = elb.describe_listeners_v2( account_number=account_number, region=region, LoadBalancerArn=elb.get_load_balancer_arn_from_endpoint( endpoint.name, account_number=account_number, region=region), ) for listener in listeners["Listeners"]: if not listener.get("Certificates"): continue for certificate in listener["Certificates"]: certificate_names.append( iam.get_name_from_arn(certificate["CertificateArn"])) elif endpoint.type == "cloudfront": cert_id_to_name = iam.get_certificate_id_to_name( account_number=account_number) dist = cloudfront.get_distribution(account_number=account_number, distribution_id=endpoint.name) loaded = get_distribution_endpoint(account_number, cert_id_to_name, dist) if loaded: certificate_names.append(loaded["certificate_name"]) else: raise NotImplementedError() return certificate_names
class ACMEIssuerPlugin(IssuerPlugin): title = "Acme" slug = "acme-issuer" description = ( "Enables the creation of certificates via ACME CAs (including Let's Encrypt), using the DNS-01 challenge" ) version = acme.VERSION author = "Netflix" author_url = "https://github.com/netflix/lemur.git" options = [ { "name": "acme_url", "type": "str", "required": True, "validation": check_validation(r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"), "helpMessage": "ACME resource URI. Must be a valid web url starting with http[s]://", }, { "name": "telephone", "type": "str", "default": "", "helpMessage": "Telephone to use", }, { "name": "email", "type": "str", "default": "", "validation": EMAIL_RE.pattern, "helpMessage": "Email to use", }, { "name": "certificate", "type": "textarea", "default": "", "validation": check_validation("^-----BEGIN CERTIFICATE-----"), "helpMessage": "ACME root certificate", }, { "name": "store_account", "type": "bool", "required": False, "helpMessage": "Disable to create a new account for each ACME request", "default": False, }, { "name": "eab_kid", "type": "str", "required": False, "helpMessage": "Key identifier for the external account.", }, { "name": "eab_hmac_key", "type": "str", "required": False, "helpMessage": "HMAC key for the external account.", }, { "name": "acme_private_key", "type": "textarea", "default": "", "required": False, "helpMessage": "Account Private Key. Will be encrypted.", }, { "name": "acme_regr", "type": "textarea", "default": "", "required": False, "helpMessage": "Account Registration", } ] def __init__(self, *args, **kwargs): super(ACMEIssuerPlugin, self).__init__(*args, **kwargs) def get_ordered_certificate(self, pending_cert): self.acme = AcmeDnsHandler() acme_client, registration = self.acme.setup_acme_client(pending_cert.authority) order_info = authorization_service.get(pending_cert.external_id) if pending_cert.dns_provider_id: dns_provider = dns_provider_service.get(pending_cert.dns_provider_id) for domain in order_info.domains: # Currently, we only support specifying one DNS provider per certificate, even if that # certificate has multiple SANs that may belong to different providers. self.acme.dns_providers_for_domain[domain] = [dns_provider] else: for domain in order_info.domains: self.acme.autodetect_dns_providers(domain) try: order = acme_client.new_order(pending_cert.csr) except WildcardUnsupportedError: metrics.send("get_ordered_certificate_wildcard_unsupported", "counter", 1) raise Exception( "The currently selected ACME CA endpoint does" " not support issuing wildcard certificates." ) try: authorizations = self.acme.get_authorizations( acme_client, order, order_info ) except ClientError: capture_exception() metrics.send("get_ordered_certificate_error", "counter", 1) current_app.logger.error( f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True ) return False authorizations = self.acme.finalize_authorizations(acme_client, authorizations) pem_certificate, pem_certificate_chain = self.acme.request_certificate( acme_client, authorizations, order ) cert = { "body": "\n".join(str(pem_certificate).splitlines()), "chain": "\n".join(str(pem_certificate_chain).splitlines()), "external_id": str(pending_cert.external_id), } return cert def get_ordered_certificates(self, pending_certs): self.acme = AcmeDnsHandler() self.acme_dns_challenge = AcmeDnsChallenge() pending = [] certs = [] for pending_cert in pending_certs: try: acme_client, registration = self.acme.setup_acme_client( pending_cert.authority ) order_info = authorization_service.get(pending_cert.external_id) if pending_cert.dns_provider_id: dns_provider = dns_provider_service.get( pending_cert.dns_provider_id ) for domain in order_info.domains: # Currently, we only support specifying one DNS provider per certificate, even if that # certificate has multiple SANs that may belong to different providers. self.acme.dns_providers_for_domain[domain] = [dns_provider] else: for domain in order_info.domains: self.acme.autodetect_dns_providers(domain) try: order = acme_client.new_order(pending_cert.csr) except WildcardUnsupportedError: capture_exception() metrics.send( "get_ordered_certificates_wildcard_unsupported_error", "counter", 1, ) raise Exception( "The currently selected ACME CA endpoint does" " not support issuing wildcard certificates." ) authorizations = self.acme.get_authorizations( acme_client, order, order_info ) pending.append( { "acme_client": acme_client, "authorizations": authorizations, "pending_cert": pending_cert, "order": order, } ) except (ClientError, ValueError, Exception) as e: capture_exception() metrics.send( "get_ordered_certificates_pending_creation_error", "counter", 1 ) current_app.logger.error( f"Unable to resolve pending cert: {pending_cert}", exc_info=True ) error = e if globals().get("order") and order: error += f" Order uri: {order.uri}" certs.append( {"cert": False, "pending_cert": pending_cert, "last_error": e} ) for entry in pending: try: entry["authorizations"] = self.acme.finalize_authorizations( entry["acme_client"], entry["authorizations"] ) pem_certificate, pem_certificate_chain = self.acme.request_certificate( entry["acme_client"], entry["authorizations"], entry["order"] ) cert = { "body": "\n".join(str(pem_certificate).splitlines()), "chain": "\n".join(str(pem_certificate_chain).splitlines()), "external_id": str(entry["pending_cert"].external_id), } certs.append({"cert": cert, "pending_cert": entry["pending_cert"]}) except (PollError, AcmeError, Exception) as e: capture_exception() metrics.send("get_ordered_certificates_resolution_error", "counter", 1) order_url = order.uri error = f"{e}. Order URI: {order_url}" current_app.logger.error( f"Unable to resolve pending cert: {pending_cert}. " f"Check out {order_url} for more information.", exc_info=True, ) certs.append( { "cert": False, "pending_cert": entry["pending_cert"], "last_error": error, } ) # Ensure DNS records get deleted self.acme_dns_challenge.cleanup( entry["authorizations"], entry["acme_client"] ) return certs def create_certificate(self, csr, issuer_options): """ Creates an ACME certificate using the DNS-01 challenge. :param csr: :param issuer_options: :return: :raise Exception: """ acme_dns_challenge = AcmeDnsChallenge() return acme_dns_challenge.create_certificate(csr, issuer_options) @staticmethod def create_authority(options): """ Creates an authority, this authority is then used by Lemur to allow a user to specify which Certificate Authority they want to sign their certificate. :param options: :return: """ name = "acme_" + "_".join(options['name'].split(" ")) + "_admin" role = {"username": "", "password": "", "name": name} plugin_options = options.get("plugin", {}).get("plugin_options") if not plugin_options: error = "Invalid options for lemur_acme plugin: {}".format(options) current_app.logger.error(error) raise InvalidConfiguration(error) # Define static acme_root based off configuration variable by default. However, if user has passed a # certificate, use this certificate as the root. acme_root = current_app.config.get("ACME_ROOT") for option in plugin_options: if option.get("name") == "certificate": acme_root = option.get("value") return acme_root, "", [role] def cancel_ordered_certificate(self, pending_cert, **kwargs): # Needed to override issuer function. pass def revoke_certificate(self, certificate, reason): self.acme = AcmeDnsHandler() crl_reason = CRLReason.unspecified if "crl_reason" in reason: crl_reason = CRLReason[reason["crl_reason"]] return self.acme.revoke_certificate(certificate, crl_reason.value)
class ACMEHttpIssuerPlugin(IssuerPlugin): title = "Acme HTTP-01" slug = "acme-http-issuer" description = ( "Enables the creation of certificates via ACME CAs (including Let's Encrypt), using the HTTP-01 challenge" ) version = acme.VERSION author = "Netflix" author_url = "https://github.com/netflix/lemur.git" options = [ { "name": "acme_url", "type": "str", "required": True, "validation": check_validation(r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"), "helpMessage": "Must be a valid web url starting with http[s]://", }, { "name": "telephone", "type": "str", "default": "", "helpMessage": "Telephone to use", }, { "name": "email", "type": "str", "default": "", "validation": EMAIL_RE.pattern, "helpMessage": "Email to use", }, { "name": "certificate", "type": "textarea", "default": "", "validation": check_validation("^-----BEGIN CERTIFICATE-----"), "helpMessage": "ACME root Certificate", }, { "name": "store_account", "type": "bool", "required": False, "helpMessage": "Disable to create a new account for each ACME request", "default": False, }, { "name": "eab_kid", "type": "str", "default": "", "required": False, "helpMessage": "Key identifier for the external account.", }, { "name": "eab_hmac_key", "type": "str", "default": "", "required": False, "helpMessage": "HMAC key for the external account.", }, { "name": "acme_private_key", "type": "textarea", "default": "", "required": False, "helpMessage": "Account Private Key. Will be encrypted.", }, { "name": "acme_regr", "type": "textarea", "default": "", "required": False, "helpMessage": "Account Registration", }, { "name": "tokenDestination", "type": "destinationSelect", "required": True, "helpMessage": "The destination to use to deploy the token.", } ] def __init__(self, *args, **kwargs): super(ACMEHttpIssuerPlugin, self).__init__(*args, **kwargs) def create_certificate(self, csr, issuer_options): """ Creates an ACME certificate using the HTTP-01 challenge. :param csr: :param issuer_options: :return: :raise Exception: """ acme_http_challenge = AcmeHttpChallenge() return acme_http_challenge.create_certificate(csr, issuer_options) @staticmethod def create_authority(options): """ Creates an authority, this authority is then used by Lemur to allow a user to specify which Certificate Authority they want to sign their certificate. :param options: :return: """ name = "acme_" + "_".join(options['name'].split(" ")) + "_admin" role = {"username": "", "password": "", "name": name} plugin_options = options.get("plugin", {}).get("plugin_options") if not plugin_options: error = "Invalid options for lemur_acme plugin: {}".format(options) current_app.logger.error(error) raise InvalidConfiguration(error) # Define static acme_root based off configuration variable by default. However, if user has passed a # certificate, use this certificate as the root. acme_root = current_app.config.get("ACME_ROOT") for option in plugin_options: if option.get("name") == "certificate": acme_root = option.get("value") return acme_root, "", [role] def cancel_ordered_certificate(self, pending_cert, **kwargs): # Needed to override issuer function. pass def revoke_certificate(self, certificate, reason): self.acme = AcmeHandler() crl_reason = CRLReason.unspecified if "crl_reason" in reason: crl_reason = CRLReason[reason["crl_reason"]] return self.acme.revoke_certificate(certificate, crl_reason.value)
class SFTPDestinationPlugin(DestinationPlugin): title = "SFTP" slug = "sftp-destination" description = "Allow the uploading of certificates to SFTP" version = lemur_sftp.VERSION author = "Dmitry Zykov" author_url = "https://github.com/DmitryZykov" options = [ { "name": "host", "type": "str", "required": True, "helpMessage": "The SFTP host.", }, { "name": "port", "type": "int", "required": True, "helpMessage": "The SFTP port, default is 22.", "validation": check_validation(r"^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})"), "default": "22", }, { "name": "user", "type": "str", "required": True, "helpMessage": "The SFTP user. Default is root.", "default": "root", }, { "name": "password", "type": "str", "required": False, "helpMessage": "The SFTP password (optional when the private key is used).", "default": None, }, { "name": "privateKeyPath", "type": "str", "required": False, "helpMessage": "The path to the RSA private key on the Lemur server (optional).", "default": None, }, { "name": "privateKeyPass", "type": "str", "required": False, "helpMessage": "The password for the encrypted RSA private key (optional).", "default": None, }, { "name": "destinationPath", "type": "str", "required": True, "helpMessage": "The SFTP path where certificates will be uploaded.", "default": "/etc/nginx/certs", }, { "name": "exportFormat", "required": True, "value": "NGINX", "helpMessage": "The export format for certificates.", "type": "select", "available": ["NGINX", "Apache"], }, ] def open_sftp_connection(self, options): host = self.get_option("host", options) port = self.get_option("port", options) user = self.get_option("user", options) password = self.get_option("password", options) ssh_priv_key = self.get_option("privateKeyPath", options) ssh_priv_key_pass = self.get_option("privateKeyPass", options) # delete files try: current_app.logger.debug( "Connecting to {0}@{1}:{2}".format(user, host, port) ) ssh = paramiko.SSHClient() # allow connection to the new unknown host ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) # open the ssh connection if password: current_app.logger.debug("Using password") ssh.connect(host, username=user, port=port, password=password) elif ssh_priv_key: current_app.logger.debug("Using RSA private key") pkey = paramiko.RSAKey.from_private_key_file( ssh_priv_key, ssh_priv_key_pass ) ssh.connect(host, username=user, port=port, pkey=pkey) else: current_app.logger.error( "No password or private key provided. Can't proceed" ) raise AuthenticationException # open the sftp session inside the ssh connection return ssh.open_sftp(), ssh except AuthenticationException as e: current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e)) raise AuthenticationException("Couldn't connect to {0}, due to an Authentication exception.") except NoValidConnectionsError as e: current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e)) raise NoValidConnectionsError("Couldn't connect to {0}, possible timeout or invalid hostname") # this is called when using this as a default destination plugin def upload(self, name, body, private_key, cert_chain, options, **kwargs): current_app.logger.debug("SFTP destination plugin is started") cn = common_name(parse_certificate(body)) dst_path = self.get_option("destinationPath", options) dst_path_cn = dst_path + "/" + cn export_format = self.get_option("exportFormat", options) # prepare files for upload files = {cn + ".key": private_key, cn + ".pem": body} if cert_chain: if export_format == "NGINX": # assemble body + chain in the single file files[cn + ".pem"] += "\n" + cert_chain elif export_format == "Apache": # store chain in the separate file files[cn + ".ca.bundle.pem"] = cert_chain self.upload_file(dst_path_cn, files, options) # this is called from the acme http challenge def upload_acme_token(self, token_path, token, options, **kwargs): current_app.logger.debug("SFTP destination plugin is started for HTTP-01 challenge") dst_path = self.get_option("destinationPath", options) _, filename = path.split(token_path) # prepare files for upload files = {filename: token} self.upload_file(dst_path, files, options) # this is called from the acme http challenge def delete_acme_token(self, token_path, options, **kwargs): dst_path = self.get_option("destinationPath", options) _, filename = path.split(token_path) # prepare files for upload files = {filename: None} self.delete_file(dst_path, files, options) # here the file is deleted def delete_file(self, dst_path, files, options): try: # open the ssh and sftp sessions sftp, ssh = self.open_sftp_connection(options) # delete files for filename, _ in files.items(): current_app.logger.debug( "Deleting {0} from {1}".format(filename, dst_path) ) try: sftp.remove(path.join(dst_path, filename)) except PermissionError as permerror: if permerror.errno == 13: current_app.logger.debug( "Deleting {0} from {1} returned Permission Denied Error, making file writable and retrying".format( filename, dst_path) ) sftp.chmod(path.join(dst_path, filename), 0o600) sftp.remove(path.join(dst_path, filename)) ssh.close() except (AuthenticationException, NoValidConnectionsError) as e: raise e except Exception as e: current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e)) try: ssh.close() except BaseException: pass # here the file is uploaded for real, this helps to keep this class DRY def upload_file(self, dst_path, files, options): try: # open the ssh and sftp sessions sftp, ssh = self.open_sftp_connection(options) # split the path into it's segments, so we can create it recursively allparts = [] path_copy = dst_path while True: parts = path.split(path_copy) if parts[0] == path_copy: # sentinel for absolute paths allparts.insert(0, parts[0]) break elif parts[1] == path_copy: # sentinel for relative paths allparts.insert(0, parts[1]) break else: path_copy = parts[0] allparts.insert(0, parts[1]) # make sure that the destination path exists, recursively remote_path = allparts[0] for part in allparts: try: if part != "/" and part != "": remote_path = path.join(remote_path, part) sftp.stat(remote_path) except IOError: current_app.logger.debug("{0} doesn't exist, trying to create it".format(remote_path)) try: sftp.mkdir(remote_path) except IOError as ioerror: current_app.logger.debug( "Couldn't create {0}, error message: {1}".format(remote_path, ioerror)) # upload certificate files to the sftp destination for filename, data in files.items(): current_app.logger.debug( "Uploading {0} to {1}".format(filename, dst_path) ) try: with sftp.open(path.join(dst_path, filename), "w") as f: f.write(data) except PermissionError as permerror: if permerror.errno == 13: current_app.logger.debug( "Uploading {0} to {1} returned Permission Denied Error, making file writable and retrying".format( filename, dst_path) ) sftp.chmod(path.join(dst_path, filename), 0o600) with sftp.open(path.join(dst_path, filename), "w") as f: f.write(data) # most likely the upload user isn't the webuser, -rw-r--r-- sftp.chmod(path.join(dst_path, filename), 0o644) ssh.close() except (AuthenticationException, NoValidConnectionsError) as e: raise e except Exception as e: current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e)) try: ssh.close() except BaseException: pass message = '' if hasattr(e, 'errors'): for _, error in e.errors.items(): message = error.strerror raise Exception( 'Couldn\'t upload file to {}, error message: {}'.format(self.get_option("host", options), message))
class KubernetesDestinationPlugin(DestinationPlugin): title = "Kubernetes" slug = "kubernetes-destination" description = "Allow the uploading of certificates to Kubernetes as secret" author = "Mikhail Khodorovskiy" author_url = "https://github.com/mik373/lemur" options = [ { "name": "secretNameFormat", "type": "str", "required": False, # Validation is difficult. This regex is used by kubectl to validate secret names: # [a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)* # Allowing the insertion of "{common_name}" (or any other such placeholder} # at any point in the string proved very challenging and had a tendency to # cause my browser to hang. The specified expression will allow any valid string # but will also accept many invalid strings. "validation": check_validation("(?:[a-z0-9.-]|\\{common_name\\})+"), "helpMessage": 'Must be a valid secret name, possibly including "{common_name}"', "default": "{common_name}", }, { "name": "kubernetesURL", "type": "str", "required": False, "validation": check_validation("https?://[a-zA-Z0-9.-]+(?::[0-9]+)?"), "helpMessage": "Must be a valid Kubernetes server URL!", "default": "https://kubernetes.default", }, { "name": "kubernetesAuthToken", "type": "str", "required": False, "validation": check_validation("[0-9a-zA-Z-_.]+"), "helpMessage": "Must be a valid Kubernetes server Token!", }, { "name": "kubernetesAuthTokenFile", "type": "str", "required": False, "validation": check_validation("(/[^/]+)+"), "helpMessage": "Must be a valid file path!", "default": "/var/run/secrets/kubernetes.io/serviceaccount/token", }, { "name": "kubernetesServerCertificate", "type": "textarea", "required": False, "validation": check_validation( "-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----" ), "helpMessage": "Must be a valid Kubernetes server Certificate!", }, { "name": "kubernetesServerCertificateFile", "type": "str", "required": False, "validation": check_validation("(/[^/]+)+"), "helpMessage": "Must be a valid file path!", "default": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt", }, { "name": "kubernetesNamespace", "type": "str", "required": False, "validation": check_validation("[a-z0-9]([-a-z0-9]*[a-z0-9])?"), "helpMessage": "Must be a valid Kubernetes Namespace!", }, { "name": "kubernetesNamespaceFile", "type": "str", "required": False, "validation": check_validation("(/[^/]+)+"), "helpMessage": "Must be a valid file path!", "default": "/var/run/secrets/kubernetes.io/serviceaccount/namespace", }, { "name": "secretFormat", "type": "select", "required": True, "available": ["Full", "TLS", "Certificate"], "helpMessage": "The type of Secret to create.", "default": "Full", }, ] def __init__(self, *args, **kwargs): super(KubernetesDestinationPlugin, self).__init__(*args, **kwargs) def upload(self, name, body, private_key, cert_chain, options, **kwargs): try: k8_base_uri = self.get_option("kubernetesURL", options) secret_format = self.get_option("secretFormat", options) k8s_api = K8sSession(self.k8s_bearer(options), self.k8s_cert(options)) cn = common_name(parse_certificate(body)) secret_name_format = self.get_option("secretNameFormat", options) secret_name = secret_name_format.format(common_name=cn) secret = build_secret(secret_format, secret_name, body, private_key, cert_chain) err = ensure_resource( k8s_api, k8s_base_uri=k8_base_uri, namespace=self.k8s_namespace(options), kind="secret", name=secret_name, data=secret, ) except Exception as e: current_app.logger.exception("Exception in upload: {}".format(e), exc_info=True) raise if err is not None: current_app.logger.error("Error deploying resource: %s", err) raise Exception("Error uploading secret: " + err) def k8s_bearer(self, options): bearer = self.get_option("kubernetesAuthToken", options) if not bearer: bearer_file = self.get_option("kubernetesAuthTokenFile", options) with open(bearer_file, "r") as file: bearer = file.readline() if bearer: current_app.logger.debug("Using token read from %s", bearer_file) else: raise Exception("Unable to locate token in options or from %s", bearer_file) else: current_app.logger.debug("Using token from options") return bearer def k8s_cert(self, options): cert_file = self.get_option("kubernetesServerCertificateFile", options) cert = self.get_option("kubernetesServerCertificate", options) if cert: cert_file = os.path.join( os.path.abspath(os.path.dirname(__file__)), "k8.cert") with open(cert_file, "w") as text_file: text_file.write(cert) current_app.logger.debug("Using certificate from options") else: current_app.logger.debug("Using certificate from %s", cert_file) return cert_file def k8s_namespace(self, options): namespace = self.get_option("kubernetesNamespace", options) if not namespace: namespace_file = self.get_option("kubernetesNamespaceFile", options) with open(namespace_file, "r") as file: namespace = file.readline() if namespace: current_app.logger.debug("Using namespace %s from %s", namespace, namespace_file) else: raise Exception( "Unable to locate namespace in options or from %s", namespace_file) else: current_app.logger.debug("Using namespace %s from options", namespace) return namespace
class AzureDestinationPlugin(DestinationPlugin): """Azure Keyvault Destination plugin for Lemur""" title = "Azure" slug = "azure-keyvault-destination" description = "Allow the uploading of certificates to Azure key vault" author = "Sirferl" author_url = "https://github.com/sirferl/lemur" options = [ { "name": "vaultUrl", "type": "str", "required": True, "validation": check_validation("^https?://[a-zA-Z0-9.:-]+$"), "helpMessage": "Valid URL to Azure key vault instance", }, { "name": "azureTenant", "type": "str", "required": True, "validation": check_validation("^([a-zA-Z0-9/-/?])+$"), "helpMessage": "Tenant for the Azure Key Vault", }, { "name": "appID", "type": "str", "required": True, "validation": check_validation("^([a-zA-Z0-9/-/?])+$"), "helpMessage": "AppID for the Azure Key Vault", }, { "name": "azurePassword", "type": "str", "required": True, "validation": check_validation("[0-9a-zA-Z.:_-~]+"), "helpMessage": "Tenant password for the Azure Key Vault", } ] def __init__(self, *args, **kwargs): self.session = requests.Session() super(AzureDestinationPlugin, self).__init__(*args, **kwargs) def upload(self, name, body, private_key, cert_chain, options, **kwargs): """ Upload certificate and private key :param private_key: :param cert_chain: :return: """ # we use the common name to identify the certificate # Azure does not allow "." in the certificate name we replace them with "-" cert = parse_certificate(body) certificate_name = common_name(cert).replace(".", "-") vault_URI = self.get_option("vaultUrl", options) tenant = self.get_option("azureTenant", options) app_id = self.get_option("appID", options) password = self.get_option("azurePassword", options) access_token = get_access_token(tenant, app_id, password, self) cert_url = f"{vault_URI}/certificates/{certificate_name}/import?api-version=7.1" post_header = { "Authorization": f"Bearer {access_token}" } key_pkcs8 = parse_private_key(private_key).private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption(), ) key_pkcs8 = key_pkcs8.decode("utf-8").replace('\\n', '\n') cert_package = f"{body}\n{key_pkcs8}" post_body = { "value": cert_package, "policy": { "key_props": { "exportable": True, "kty": "RSA", "key_size": bitstrength(cert), "reuse_key": True }, "secret_props": { "contentType": "application/x-pem-file" } } } try: response = self.session.post(cert_url, headers=post_header, json=post_body) except requests.exceptions.RequestException as e: current_app.logger.exception(f"AZURE: Error for POST {e}") return_value = handle_response(response)
class VaultSourcePlugin(SourcePlugin): """ Class for importing certificates from Hashicorp Vault""" title = "Vault" slug = "vault-source" description = "Discovers all certificates in a given path" author = "Christopher Jolley" author_url = "https://github.com/alwaysjolley/lemur" options = [ { "name": "vaultUrl", "type": "str", "required": True, "validation": url_regex.pattern, "helpMessage": "Valid URL to Hashi Vault instance", }, { "name": "vaultKvApiVersion", "type": "select", "value": "2", "available": ["1", "2"], "required": True, "helpMessage": "Version of the Vault KV API to use", }, { "name": "authenticationMethod", "type": "select", "value": "token", "available": ["token", "kubernetes"], "required": True, "helpMessage": "Authentication method to use", }, { "name": "tokenFileOrVaultRole", "type": "str", "required": True, "validation": check_validation("^([a-zA-Z0-9/._-]+/?)+$"), "helpMessage": "Must be vaild file path for token based auth and valid role if k8s based auth", }, { "name": "vaultMount", "type": "str", "required": True, "validation": check_validation(r"^\S+$"), "helpMessage": "Must be a valid Vault secrets mount name!", }, { "name": "vaultPath", "type": "str", "required": True, "validation": check_validation("^([a-zA-Z0-9._-]+/?)+$"), "helpMessage": "Must be a valid Vault secrets path", }, { "name": "objectName", "type": "str", "required": True, "validation": check_validation("[0-9a-zA-Z.:_-]+"), "helpMessage": "Object Name to search", }, ] def get_certificates(self, options, **kwargs): """Pull certificates from objects in Hashicorp Vault""" data = [] cert = [] body = "" url = self.get_option("vaultUrl", options) auth_method = self.get_option("authenticationMethod", options) auth_key = self.get_option("tokenFileOrVaultRole", options) mount = self.get_option("vaultMount", options) path = self.get_option("vaultPath", options) obj_name = self.get_option("objectName", options) api_version = self.get_option("vaultKvApiVersion", options) cert_filter = "-----BEGIN CERTIFICATE-----" cert_delimiter = "-----END CERTIFICATE-----" client = hvac.Client(url=url) if auth_method == 'token': with open(auth_key, "r") as tfile: token = tfile.readline().rstrip("\n") client.token = token if auth_method == 'kubernetes': token_path = '/var/run/secrets/kubernetes.io/serviceaccount/token' with open(token_path, 'r') as f: jwt = f.read() client.auth_kubernetes(auth_key, jwt) client.secrets.kv.default_kv_version = api_version path = "{0}/{1}".format(path, obj_name) secret = get_secret(client, mount, path) for cname in secret["data"]: if "crt" in secret["data"][cname]: cert = secret["data"][cname]["crt"].split(cert_delimiter + "\n") elif "pem" in secret["data"][cname]: cert = secret["data"][cname]["pem"].split(cert_delimiter + "\n") else: for key in secret["data"][cname]: if secret["data"][cname][key].startswith(cert_filter): cert = secret["data"][cname][key].split( cert_delimiter + "\n") break body = cert[0] + cert_delimiter if "chain" in secret["data"][cname]: chain = secret["data"][cname]["chain"] elif len(cert) > 1: if cert[1].startswith(cert_filter): chain = cert[1] + cert_delimiter else: chain = None else: chain = None data.append({"body": body, "chain": chain, "name": cname}) return [ dict(body=c["body"], chain=c.get("chain"), name=c["name"]) for c in data ] def get_endpoints(self, options, **kwargs): """ Not implemented yet """ endpoints = [] return endpoints
class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = "Vault" slug = "hashi-vault-destination" description = "Allow the uploading of certificates to Hashi Vault as secret" author = "Christopher Jolley" author_url = "https://github.com/alwaysjolley/lemur" options = [ { "name": "vaultUrl", "type": "str", "required": True, "validation": url_regex.pattern, "helpMessage": "Valid URL to Hashi Vault instance", }, { "name": "vaultKvApiVersion", "type": "select", "value": "2", "available": ["1", "2"], "required": True, "helpMessage": "Version of the Vault KV API to use", }, { "name": "authenticationMethod", "type": "select", "value": "token", "available": ["token", "kubernetes"], "required": True, "helpMessage": "Authentication method to use", }, { "name": "tokenFileOrVaultRole", "type": "str", "required": True, "validation": check_validation("^([a-zA-Z0-9/._-]+/?)+$"), "helpMessage": "Must be vaild file path for token based auth and valid role if k8s based auth", }, { "name": "vaultMount", "type": "str", "required": True, "validation": check_validation(r"^\S+$"), "helpMessage": "Must be a valid Vault secrets mount name!", }, { "name": "vaultPath", "type": "str", "required": True, "validation": check_validation("^(([a-zA-Z0-9._-]+|{(CN|OU|O|L|S|C)})+/?)+$"), "helpMessage": "Must be a valid Vault secrets path. Support vars: {CN|OU|O|L|S|C}", }, { "name": "objectName", "type": "str", "required": False, "validation": check_validation("^([0-9a-zA-Z.:_-]+|{(CN|OU|O|L|S|C)})+$"), "helpMessage": "Name to bundle certs under, if blank use {CN}. Support vars: {CN|OU|O|L|S|C}", }, { "name": "bundleChain", "type": "select", "value": "cert only", "available": ["Nginx", "Apache", "PEM", "no chain"], "required": True, "helpMessage": "Bundle the chain into the certificate", }, { "name": "sanFilter", "type": "str", "value": ".*", "required": False, "validation": check_validation(".*"), "helpMessage": "Valid regex filter", }, ] def __init__(self, *args, **kwargs): super(VaultDestinationPlugin, self).__init__(*args, **kwargs) def upload(self, name, body, private_key, cert_chain, options, **kwargs): """ Upload certificate and private key :param private_key: :param cert_chain: :return: """ cert = parse_certificate(body) cname = common_name(cert) url = self.get_option("vaultUrl", options) auth_method = self.get_option("authenticationMethod", options) auth_key = self.get_option("tokenFileOrVaultRole", options) mount = self.get_option("vaultMount", options) path = self.get_option("vaultPath", options) bundle = self.get_option("bundleChain", options) obj_name = self.get_option("objectName", options) api_version = self.get_option("vaultKvApiVersion", options) san_filter = self.get_option("sanFilter", options) san_list = get_san_list(body) if san_filter: for san in san_list: try: if not re.match(san_filter, san, flags=re.IGNORECASE): current_app.logger.exception( "Exception uploading secret to vault: invalid SAN: {}" .format(san), exc_info=True, ) os._exit(1) except re.error: current_app.logger.exception( "Exception compiling regex filter: invalid filter", exc_info=True, ) client = hvac.Client(url=url) if auth_method == 'token': with open(auth_key, "r") as tfile: token = tfile.readline().rstrip("\n") client.token = token if auth_method == 'kubernetes': token_path = '/var/run/secrets/kubernetes.io/serviceaccount/token' with open(token_path, 'r') as f: jwt = f.read() client.auth_kubernetes(auth_key, jwt) client.secrets.kv.default_kv_version = api_version t_path = path.format( CN=cname, OU=organizational_unit(cert), O=organization(cert), # noqa: E741 L=location(cert), S=state(cert), C=country(cert)) if not obj_name: obj_name = '{CN}' f_obj_name = obj_name.format( CN=cname, OU=organizational_unit(cert), O=organization(cert), # noqa: E741 L=location(cert), S=state(cert), C=country(cert)) path = "{0}/{1}".format(t_path, f_obj_name) secret = get_secret(client, mount, path) secret["data"][cname] = {} if not cert_chain: chain = '' else: chain = cert_chain if bundle == "Nginx": secret["data"][cname]["crt"] = "{0}\n{1}".format(body, chain) secret["data"][cname]["key"] = private_key elif bundle == "Apache": secret["data"][cname]["crt"] = body secret["data"][cname]["chain"] = chain secret["data"][cname]["key"] = private_key elif bundle == "PEM": secret["data"][cname]["pem"] = "{0}\n{1}\n{2}".format( body, chain, private_key) else: secret["data"][cname]["crt"] = body secret["data"][cname]["key"] = private_key if isinstance(san_list, list): secret["data"][cname]["san"] = san_list try: client.secrets.kv.create_or_update_secret(path=path, mount_point=mount, secret=secret["data"]) except ConnectionError as err: current_app.logger.exception( "Exception uploading secret to vault: {0}".format(err), exc_info=True)
class SlackNotificationPlugin(ExpirationNotificationPlugin): title = "Slack" slug = "slack-notification" description = "Sends notifications to Slack" version = slack.VERSION author = "Harm Weites" author_url = "https://github.com/netflix/lemur" additional_options = [ { "name": "webhook", "type": "str", "required": True, "validation": check_validation(r"^https:\/\/hooks\.slack\.com\/services\/.+$"), "helpMessage": "The url Slack told you to use for this integration", }, { "name": "username", "type": "str", "validation": check_validation("^.+$"), "helpMessage": "The great storyteller", "default": "Lemur", }, { "name": "recipients", "type": "str", "required": True, "validation": check_validation("^(@|#).+$"), "helpMessage": "Where to send to, either @username or #channel", }, ] def send(self, notification_type, message, targets, options, **kwargs): """ A typical check can be performed using the notify command: `lemur notify` While we receive a `targets` parameter here, it is unused, as Slack webhooks do not allow dynamic re-targeting of messages. The webhook itself specifies a channel. """ attachments = None if notification_type == "expiration": attachments = create_expiration_attachments(message) elif notification_type == "rotation": attachments = create_rotation_attachments(message) if not attachments: raise Exception("Unable to create message attachments") body = { "text": f"Lemur {notification_type.capitalize()} Notification", "attachments": attachments, "channel": self.get_option("recipients", options), "username": self.get_option("username", options), } r = requests.post(self.get_option("webhook", options), json.dumps(body)) if r.status_code not in [200]: raise Exception( f"Failed to send message. Slack response: {r.status_code} {body}" ) current_app.logger.info( f"Slack response: {r.status_code} Message Body: {body}")
class OpenSSLExportPlugin(ExportPlugin): title = "OpenSSL" slug = "openssl-export" description = "Is a loose interface to openssl and support various formats" version = openssl.VERSION author = "Kevin Glisson" author_url = "https://github.com/netflix/lemur" options = [ { "name": "type", "type": "select", "required": True, "available": ["PKCS12 (.p12)"], "helpMessage": "Choose the format you wish to export", }, { "name": "passphrase", "type": "str", "required": False, "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", "validation": check_validation(""), }, { "name": "alias", "type": "str", "required": False, "helpMessage": "Enter the alias you wish to use for the keystore.", }, ] def export(self, body, chain, key, options, **kwargs): """ Generates a PKCS#12 archive. :param key: :param chain: :param body: :param options: :param kwargs: """ if self.get_option("passphrase", options): passphrase = self.get_option("passphrase", options) else: passphrase = get_psuedo_random_string() if self.get_option("alias", options): alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) type = self.get_option("type", options) with mktemppath() as output_tmp: if type == "PKCS12 (.p12)": if not key: raise Exception("Private Key required by {0}".format(type)) create_pkcs12(body, chain, output_tmp, key, alias, passphrase) extension = "p12" else: raise Exception( "Unable to export, unsupported type: {0}".format(type)) with open(output_tmp, "rb") as f: raw = f.read() return extension, passphrase, raw
def create_default_expiration_notifications(name, recipients, intervals=None): """ Will create standard 30, 10 and 2 day notifications for a given owner unless an alternate set of intervals is supplied. If standard notifications already exist these will be returned instead of new notifications. :param name: :param recipients: :return: """ if not recipients: return [] options = [ { "name": "unit", "type": "select", "required": True, "validation": check_validation(""), "available": ["days", "weeks", "months"], "helpMessage": "Interval unit", "value": "days", }, { "name": "recipients", "type": "str", "required": True, "validation": EMAIL_RE.pattern, "helpMessage": EMAIL_RE_HELP, "value": ",".join(recipients), }, ] if intervals is None: intervals = current_app.config.get( "LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", [30, 15, 2] ) notifications = [] for i in intervals: n = get_by_label("{name}_{interval}_DAY".format(name=name, interval=i)) if not n: inter = [ { "name": "interval", "type": "int", "required": True, "validation": check_validation(r"^\d+$"), "helpMessage": "Number of days to be alert before expiration.", "value": i, } ] inter.extend(options) n = create( label="{name}_{interval}_DAY".format(name=name, interval=i), plugin_name=current_app.config.get( "LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification" ), options=list(inter), description="Default {interval} day expiration notification".format( interval=i ), certificates=[], ) notifications.append(n) return notifications
def test_upload_acme_token(app): from lemur.common.utils import check_validation from lemur.plugins.base import plugins from lemur.plugins.lemur_aws.s3 import get bucket = "public-bucket" account = "123456789012" prefix = "some-path/more-path/" token_content = "Challenge" token_name = "TOKEN" token_path = ".well-known/acme-challenge/" + token_name additional_options = [ { "name": "bucket", "value": bucket, "type": "str", "required": True, "validation": check_validation(r"[0-9a-z.-]{3,63}"), "helpMessage": "Must be a valid S3 bucket name!", }, { "name": "accountNumber", "type": "str", "value": account, "required": True, "validation": check_validation(r"[0-9]{12}"), "helpMessage": "A valid AWS account number with permission to access S3", }, { "name": "region", "type": "str", "default": "us-east-1", "required": False, "helpMessage": "Region bucket exists", "available": ["us-east-1", "us-west-2", "eu-west-1"], }, { "name": "encrypt", "type": "bool", "value": False, "required": False, "helpMessage": "Enable server side encryption", "default": True, }, { "name": "prefix", "type": "str", "value": prefix, "required": False, "helpMessage": "Must be a valid S3 object prefix!", }, ] s3_client = boto3.client('s3') s3_client.create_bucket(Bucket=bucket) p = plugins.get("aws-s3") response = p.upload_acme_token(token_path=token_path, token_content=token_content, token=token_content, options=additional_options) assert response response = get(bucket_name=bucket, prefixed_object_name=prefix + token_name, encrypt=False, account_number=account) # put data, and getting the same data assert (response == token_content) response = p.delete_acme_token(token_path=token_path, options=additional_options, account_number=account) assert response
class S3DestinationPlugin(ExportDestinationPlugin): title = "AWS-S3" slug = "aws-s3" description = "Allow the uploading of certificates to Amazon S3" author = "Mikhail Khodorovskiy, Harm Weites <*****@*****.**>" author_url = "https://github.com/Netflix/lemur" additional_options = [ { "name": "bucket", "type": "str", "required": True, "validation": check_validation("[0-9a-z.-]{3,63}"), "helpMessage": "Must be a valid S3 bucket name!", }, { "name": "accountNumber", "type": "str", "required": True, "validation": check_validation("[0-9]{12}"), "helpMessage": "A valid AWS account number with permission to access S3", }, { "name": "region", "type": "str", "default": "us-east-1", "required": False, "helpMessage": "Region bucket exists", "available": ["us-east-1", "us-west-2", "eu-west-1"], }, { "name": "encrypt", "type": "bool", "required": False, "helpMessage": "Enable server side encryption", "default": True, }, { "name": "prefix", "type": "str", "required": False, "helpMessage": "Must be a valid S3 object prefix!", }, ] def __init__(self, *args, **kwargs): super(S3DestinationPlugin, self).__init__(*args, **kwargs) def upload(self, name, body, private_key, chain, options, **kwargs): files = self.export(body, private_key, chain, options) for ext, passphrase, data in files: s3.put( self.get_option("bucket", options), self.get_option("region", options), "{prefix}/{name}.{extension}".format(prefix=self.get_option( "prefix", options), name=name, extension=ext), data, self.get_option("encrypt", options), account_number=self.get_option("accountNumber", options), ) def upload_acme_token(self, token_path, token, options, **kwargs): """ This is called from the acme http challenge :param self: :param token_path: :param token: :param options: :param kwargs: :return: """ current_app.logger.debug( "S3 destination plugin is started to upload HTTP-01 challenge") function = f"{__name__}.{sys._getframe().f_code.co_name}" account_number = self.get_option("accountNumber", options) bucket_name = self.get_option("bucket", options) prefix = self.get_option("prefix", options) region = self.get_option("region", options) filename = token_path.split("/")[-1] if not prefix.endswith("/"): prefix + "/" response = s3.put(bucket_name=bucket_name, region_name=region, prefix=prefix + filename, data=token, encrypt=False, account_number=account_number) res = "Success" if response else "Failure" log_data = { "function": function, "message": "upload acme token challenge", "result": res, "bucket_name": bucket_name, "filename": filename } current_app.logger.info(log_data) metrics.send(f"{function}", "counter", 1, metric_tags={ "result": res, "bucket_name": bucket_name, "filename": filename }) return response def delete_acme_token(self, token_path, options, **kwargs): current_app.logger.debug( "S3 destination plugin is started to delete HTTP-01 challenge") function = f"{__name__}.{sys._getframe().f_code.co_name}" account_number = self.get_option("accountNumber", options) bucket_name = self.get_option("bucket", options) prefix = self.get_option("prefix", options) filename = token_path.split("/")[-1] response = s3.delete(bucket_name=bucket_name, prefixed_object_name=prefix + filename, account_number=account_number) res = "Success" if response else "Failure" log_data = { "function": function, "message": "delete acme token challenge", "result": res, "bucket_name": bucket_name, "filename": filename } current_app.logger.info(log_data) metrics.send(f"{function}", "counter", 1, metric_tags={ "result": res, "bucket_name": bucket_name, "filename": filename }) return response
class AWSSourcePlugin(SourcePlugin): title = "AWS" slug = "aws-source" description = "Discovers all SSL certificates and ELB endpoints in an AWS account" version = aws.VERSION author = "Kevin Glisson" author_url = "https://github.com/netflix/lemur" options = [ { "name": "accountNumber", "type": "str", "required": True, "validation": check_validation("^[0-9]{12,12}$"), "helpMessage": "Must be a valid AWS account number!", }, { "name": "regions", "type": "str", "helpMessage": "Comma separated list of regions to search in, if no region is specified we look in all regions.", }, ] def get_certificates(self, options, **kwargs): cert_data = iam.get_all_certificates( account_number=self.get_option("accountNumber", options)) return [ dict( body=c["CertificateBody"], chain=c.get("CertificateChain"), name=c["ServerCertificateMetadata"]["ServerCertificateName"], ) for c in cert_data ] def get_endpoints(self, options, **kwargs): endpoints = [] account_number = self.get_option("accountNumber", options) regions = self.get_option("regions", options) if not regions: regions = ec2.get_regions(account_number=account_number) else: regions = "".join(regions.split()).split(",") for region in regions: try: elbs = elb.get_all_elbs(account_number=account_number, region=region) current_app.logger.info({ "message": "Describing classic load balancers", "account_number": account_number, "region": region, "number_of_load_balancers": len(elbs) }) except Exception as e: # noqa capture_exception() continue for e in elbs: try: endpoints.extend( get_elb_endpoints(account_number, region, e)) except Exception as e: # noqa capture_exception() continue # fetch advanced ELBs try: elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region) except Exception as e: # noqa capture_exception() continue current_app.logger.info({ "message": "Describing advanced load balancers", "account_number": account_number, "region": region, "number_of_load_balancers": len(elbs_v2) }) for e in elbs_v2: try: endpoints.extend( get_elb_endpoints_v2(account_number, region, e)) except Exception as e: # noqa capture_exception() continue return endpoints def update_endpoint(self, endpoint, certificate): options = endpoint.source.options account_number = self.get_option("accountNumber", options) # relies on the fact that region is included in DNS name region = get_region_from_dns(endpoint.dnsname) arn = iam.create_arn_from_cert(account_number, region, certificate.name) if endpoint.type == "elbv2": listener_arn = elb.get_listener_arn_from_endpoint( endpoint.name, endpoint.port, account_number=account_number, region=region, ) elb.attach_certificate_v2( listener_arn, endpoint.port, [{ "CertificateArn": arn }], account_number=account_number, region=region, ) else: elb.attach_certificate( endpoint.name, endpoint.port, arn, account_number=account_number, region=region, ) def clean(self, certificate, options, **kwargs): account_number = self.get_option("accountNumber", options) iam.delete_cert(certificate.name, account_number=account_number) def get_certificate_by_name(self, certificate_name, options): account_number = self.get_option("accountNumber", options) # certificate name may contain path, in which case we remove it if "/" in certificate_name: certificate_name = certificate_name.split('/')[-1] try: cert = iam.get_certificate(certificate_name, account_number=account_number) if cert: return dict( body=cert["CertificateBody"], chain=cert.get("CertificateChain"), name=cert["ServerCertificateMetadata"] ["ServerCertificateName"], ) except ClientError: current_app.logger.warning( "get_elb_certificate_failed: Unable to get certificate for {0}" .format(certificate_name)) capture_exception() metrics.send("get_elb_certificate_failed", "counter", 1, metric_tags={ "certificate_name": certificate_name, "account_number": account_number }) return None def get_endpoint_certificate_names(self, endpoint): options = endpoint.source.options account_number = self.get_option("accountNumber", options) region = get_region_from_dns(endpoint.dnsname) certificate_names = [] if endpoint.type == "elb": elb_details = elb.get_elbs( account_number=account_number, region=region, LoadBalancerNames=[endpoint.name], ) for lb_description in elb_details["LoadBalancerDescriptions"]: for listener_description in lb_description[ "ListenerDescriptions"]: listener = listener_description.get("Listener") if not listener.get("SSLCertificateId"): continue certificate_names.append( iam.get_name_from_arn( listener.get("SSLCertificateId"))) elif endpoint.type == "elbv2": listeners = elb.describe_listeners_v2( account_number=account_number, region=region, LoadBalancerArn=elb.get_load_balancer_arn_from_endpoint( endpoint.name, account_number=account_number, region=region), ) for listener in listeners["Listeners"]: if not listener.get("Certificates"): continue for certificate in listener["Certificates"]: certificate_names.append( iam.get_name_from_arn(certificate["CertificateArn"])) return certificate_names