def should_backup() -> bool: """Check if we should be doing backups""" if SERVICE_HOST_ENV_NAME in environ and not CONFIG.y("postgresql.s3_backup.bucket"): LOGGER.info("Running in k8s and s3 backups are not configured, skipping") return False if not CONFIG.y_bool("postgresql.backup.enabled"): return False return True
def get_reference_object(self) -> V1Deployment: """Get deployment object for outpost""" # Generate V1ContainerPort objects container_ports = [] for port in self.controller.deployment_ports: container_ports.append( V1ContainerPort( container_port=port.port, name=port.name, protocol=port.protocol.upper(), )) meta = self.get_object_meta(name=self.name) secret_name = f"authentik-outpost-{self.controller.outpost.uuid.hex}-api" image_prefix = CONFIG.y("outposts.docker_image_base") return V1Deployment( metadata=meta, spec=V1DeploymentSpec( replicas=self.outpost.config.kubernetes_replicas, selector=V1LabelSelector(match_labels=self.get_pod_meta()), template=V1PodTemplateSpec( metadata=V1ObjectMeta(labels=self.get_pod_meta()), spec=V1PodSpec(containers=[ V1Container( name=str(self.outpost.type), image= f"{image_prefix}-{self.outpost.type}:{__version__}", ports=container_ports, env=[ V1EnvVar( name="AUTHENTIK_HOST", value_from=V1EnvVarSource( secret_key_ref=V1SecretKeySelector( name=secret_name, key="authentik_host", )), ), V1EnvVar( name="AUTHENTIK_TOKEN", value_from=V1EnvVarSource( secret_key_ref=V1SecretKeySelector( name=secret_name, key="token", )), ), V1EnvVar( name="AUTHENTIK_INSECURE", value_from=V1EnvVarSource( secret_key_ref=V1SecretKeySelector( name=secret_name, key="authentik_host_insecure", )), ), ], ) ]), ), ), )
def certificate_discovery(self: MonitoredTask): """Discover, import and update certificates from the filesystem""" certs = {} private_keys = {} discovered = 0 for file in glob(CONFIG.y("cert_discovery_dir") + "/**", recursive=True): path = Path(file) if not path.exists(): continue if path.is_dir(): continue # For certbot setups, we want to ignore archive. if "archive" in file: continue # Support certbot's directory structure if path.name in ["fullchain.pem", "privkey.pem"]: cert_name = path.parent.name else: cert_name = path.name.replace(path.suffix, "") try: with open(path, "r+", encoding="utf-8") as _file: body = _file.read() if "PRIVATE KEY" in body: private_keys[cert_name] = ensure_private_key_valid(body) else: certs[cert_name] = ensure_certificate_valid(body) except (OSError, ValueError) as exc: LOGGER.warning("Failed to open file or invalid format", exc=exc, file=path) discovered += 1 for name, cert_data in certs.items(): cert = CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % name).first() if not cert: cert = CertificateKeyPair( name=name, managed=MANAGED_DISCOVERED % name, ) dirty = False if cert.certificate_data != cert_data: cert.certificate_data = cert_data dirty = True if name in private_keys: if cert.key_data != private_keys[name]: cert.key_data = private_keys[name] dirty = True if dirty: cert.save() self.set_status( TaskResult( TaskResultStatus.SUCCESSFUL, messages=[ _("Successfully imported %(count)d files." % {"count": discovered}) ], ))
class OutpostConfig: """Configuration an outpost uses to configure it self""" authentik_host: str authentik_host_insecure: bool = False log_level: str = CONFIG.y("log_level") error_reporting_enabled: bool = CONFIG.y_bool("error_reporting.enabled") error_reporting_environment: str = CONFIG.y("error_reporting.environment", "customer") object_naming_template: str = field(default="ak-outpost-%(name)s") kubernetes_replicas: int = field(default=1) kubernetes_namespace: str = field(default="default") kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict) kubernetes_ingress_secret_name: str = field( default="authentik-outpost-tls") kubernetes_service_type: str = field(default="ClusterIP")
def get_container_image(self) -> str: """Get container image to use for this outpost""" if self.outpost.config.container_image is not None: return self.outpost.config.container_image image_name_template: str = CONFIG.y("outposts.container_image_base") return image_name_template % { "type": self.outpost.type, "version": __version__, "build_hash": get_build_hash(), }
def __open(self): """Get GeoIP Reader, if configured, otherwise none""" path = CONFIG.y("geoip") if path == "" or not path: return try: self.__reader = Reader(path) self.__last_mtime = stat(path).st_mtime LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime) except OSError as exc: LOGGER.warning("Failed to load GeoIP database", exc=exc)
def get_geoip_reader() -> Optional[Reader]: """Get GeoIP Reader, if configured, otherwise none""" path = CONFIG.y("authentik.geoip") if path == "" or not path: return None try: reader = Reader(path) LOGGER.info("Enabled GeoIP support") return reader except OSError: return None
def context_processor(request: HttpRequest) -> dict[str, Any]: """Context Processor that injects tenant object into every template""" tenant = getattr(request, "tenant", DEFAULT_TENANT) trace = "" span = Hub.current.scope.span if span: trace = span.to_traceparent() return { "tenant": tenant, "footer_links": CONFIG.y("footer_links"), "sentry_trace": trace, }
def test_current_tenant(self): """Test Current tenant API""" tenant = create_test_tenant() self.assertJSONEqual( self.client.get(reverse("authentik_api:tenant-current")).content.decode(), { "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_favicon": "/static/dist/assets/icons/icon.png", "branding_title": "authentik", "matched_domain": tenant.domain, "ui_footer_links": CONFIG.y("footer_links"), }, )
def test_fallback(self): """Test fallback tenant""" Tenant.objects.all().delete() self.assertJSONEqual( self.client.get(reverse("authentik_api:tenant-current")).content.decode(), { "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_favicon": "/static/dist/assets/icons/icon.png", "branding_title": "authentik", "matched_domain": "fallback", "ui_footer_links": CONFIG.y("footer_links"), }, )
def test_current_tenant(self): """Test Current tenant API""" self.assertJSONEqual( force_str( self.client.get( reverse("authentik_api:tenant-current")).content), { "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_title": "authentik", "matched_domain": "authentik-default", "ui_footer_links": CONFIG.y("authentik.footer_links"), }, )
class CurrentTenantSerializer(PassiveSerializer): """Partial tenant information for styling""" matched_domain = CharField(source="domain") branding_title = CharField() branding_logo = CharField() ui_footer_links = ListField( child=FooterLinkSerializer(), read_only=True, default=CONFIG.y("authentik.footer_links"), ) flow_unenrollment = CharField(source="flow_unenrollment.slug", required=False)
def test_tenant_subdomain(self): """Test Current tenant API""" Tenant.objects.all().delete() Tenant.objects.create(domain="bar.baz", branding_title="custom") self.assertJSONEqual( self.client.get( reverse("authentik_api:tenant-current"), HTTP_HOST="foo.bar.baz" ).content.decode(), { "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_favicon": "/static/dist/assets/icons/icon.png", "branding_title": "custom", "matched_domain": "bar.baz", "ui_footer_links": CONFIG.y("footer_links"), }, )
def get_capabilities(self) -> list[Capabilities]: """Get all capabilities this server instance supports""" caps = [] deb_test = settings.DEBUG or settings.TEST if path.ismount(settings.MEDIA_ROOT) or deb_test: caps.append(Capabilities.CAN_SAVE_MEDIA) if GEOIP_READER.enabled: caps.append(Capabilities.CAN_GEO_IP) if SERVICE_HOST_ENV_NAME in environ: # Running in k8s, only s3 backup is supported if CONFIG.y("postgresql.s3_backup"): caps.append(Capabilities.CAN_BACKUP) else: # Running in compose, backup is always supported caps.append(Capabilities.CAN_BACKUP) return caps
def get(self, request: Request) -> Response: """Retrieve public configuration options""" config = ConfigSerializer( { "error_reporting": { "enabled": CONFIG.y("error_reporting.enabled") and not settings.DEBUG, "environment": CONFIG.y("error_reporting.environment"), "send_pii": CONFIG.y("error_reporting.send_pii"), "traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)), }, "capabilities": self.get_capabilities(), "cache_timeout": int(CONFIG.y("redis.cache_timeout")), "cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")), "cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")), "cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")), } ) return Response(config.data)
def avatar(self) -> str: """Get avatar, depending on authentik.avatar setting""" mode: str = CONFIG.y("avatars", "none") if mode == "none": return DEFAULT_AVATAR # gravatar uses md5 for their URLs, so md5 can't be avoided mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec if mode == "gravatar": parameters = [ ("s", "158"), ("r", "g"), ] gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}" return escape(gravatar_url) return mode % { "username": self.username, "mail_hash": mail_hash, "upn": self.attributes.get("upn", ""), }
def list(self, request: Request) -> Response: """Retrive public configuration options""" config = ConfigSerializer({ "branding_logo": CONFIG.y("authentik.branding.logo"), "branding_title": CONFIG.y("authentik.branding.title"), "error_reporting_enabled": CONFIG.y("error_reporting.enabled"), "error_reporting_environment": CONFIG.y("error_reporting.environment"), "error_reporting_send_pii": CONFIG.y("error_reporting.send_pii"), "ui_footer_links": CONFIG.y("authentik.footer_links"), }) return Response(config.data)
def backup_database(self: MonitoredTask): # pragma: no cover """Database backup""" self.result_timeout_hours = 25 if SERVICE_HOST_ENV_NAME in environ and not CONFIG.y( "postgresql.s3_backup"): LOGGER.info( "Running in k8s and s3 backups are not configured, skipping") self.set_status( TaskResult( TaskResultStatus.WARNING, [ ("Skipping backup as authentik is running in Kubernetes " "without S3 backups configured."), ], )) return try: start = datetime.now() out = StringIO() management.call_command("dbbackup", quiet=True, stdout=out) self.set_status( TaskResult( TaskResultStatus.SUCCESSFUL, [ f"Successfully finished database backup {naturaltime(start)} {out.getvalue()}", ], )) LOGGER.info("Successfully backed up database.") except ( IOError, BotoCoreError, ClientError, Boto3Error, PermissionError, CommandConnectorError, ValueError, ) as exc: self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
PLAN_CONTEXT_REDIRECT = "redirect" PLAN_CONTEXT_APPLICATION = "application" PLAN_CONTEXT_SOURCE = "source" # Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan # was restored. PLAN_CONTEXT_IS_RESTORED = "is_restored" GAUGE_FLOWS_CACHED = Gauge( "authentik_flows_cached", "Cached flows", ) HIST_FLOWS_PLAN_TIME = Histogram( "authentik_flows_plan_time", "Duration to build a plan for a flow", ["flow_slug"], ) CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_flows")) def cache_key(flow: Flow, user: Optional[User] = None) -> str: """Generate Cache key for flow""" prefix = f"flow_{flow.pk}" if user: prefix += f"#{user.pk}" return prefix @dataclass class FlowPlan: """This data-class is the output of a FlowPlanner. It holds a flat list of all Stages that should be run."""
def j_print(event: str, log_level: str = "info", **kwargs): """Print event in the same format as structlog with JSON. Used before structlog is configured.""" data = { "event": event, "level": log_level, "logger": __name__, "timestamp": time(), } data.update(**kwargs) print(dumps(data), file=stderr) # Sanity check, ensure SECRET_KEY is set before we even check for database connectivity if CONFIG.y("secret_key") is None or len(CONFIG.y("secret_key")) == 0: j_print( "Secret key missing, check https://goauthentik.io/docs/installation/.") sysexit(1) while True: try: conn = connect( dbname=CONFIG.y("postgresql.name"), user=CONFIG.y("postgresql.user"), password=CONFIG.y("postgresql.password"), host=CONFIG.y("postgresql.host"), port=int(CONFIG.y("postgresql.port")), ) conn.cursor() break
"""Print event in the same format as structlog with JSON. Used before structlog is configured.""" data = { "event": event, "level": log_level, "logger": __name__, "timestamp": time(), } data.update(**kwargs) print(dumps(data), file=stderr) while True: try: conn = connect( dbname=CONFIG.y("postgresql.name"), user=CONFIG.y("postgresql.user"), password=CONFIG.y("postgresql.password"), host=CONFIG.y("postgresql.host"), ) conn.cursor() break except OperationalError: sleep(1) j_print("PostgreSQL Connection failed, retrying...") while True: try: redis = Redis( host=CONFIG.y("redis.host"), port=6379,
"""authentik reputation request signals""" from django.contrib.auth.signals import user_logged_in, user_login_failed from django.core.cache import cache from django.dispatch import receiver from django.http import HttpRequest from structlog.stdlib import get_logger from authentik.lib.config import CONFIG from authentik.lib.utils.http import get_client_ip from authentik.policies.reputation.models import CACHE_KEY_PREFIX from authentik.policies.reputation.tasks import save_reputation from authentik.stages.identification.signals import identification_failed LOGGER = get_logger() CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_reputation")) def update_score(request: HttpRequest, identifier: str, amount: int): """Update score for IP and User""" remote_ip = get_client_ip(request) try: # We only update the cache here, as its faster than writing to the DB score = cache.get_or_set( CACHE_KEY_PREFIX + remote_ip + identifier, { "ip": remote_ip, "identifier": identifier, "score": 0 }, CACHE_TIMEOUT,
} data.update(**kwargs) print(dumps(data), file=sys.stderr) LOGGER = structlog.get_logger() # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) STATIC_ROOT = BASE_DIR + "/static" STATICFILES_DIRS = [BASE_DIR + "/web"] MEDIA_ROOT = BASE_DIR + "/media" SECRET_KEY = CONFIG.y( "secret_key", "9$@r!d^1^jrn#fk#1#@ks#9&i$^s#1)_13%$rwjrhd=e8jfi_s") # noqa Debug DEBUG = CONFIG.y_bool("debug") INTERNAL_IPS = ["127.0.0.1"] ALLOWED_HOSTS = ["*"] SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") LOGIN_URL = "authentik_flows:default-authentication" # Custom user model AUTH_USER_MODEL = "authentik_core.User" _cookie_suffix = "_debug" if DEBUG else "" CSRF_COOKIE_NAME = "authentik_csrf" LANGUAGE_COOKIE_NAME = f"authentik_language{_cookie_suffix}"
"""Print event in the same format as structlog with JSON. Used before structlog is configured.""" data = { "event": event, "level": log_level, "logger": __name__, "timestamp": time(), } data.update(**kwargs) print(dumps(data), file=stderr) while True: try: conn = connect( dbname=CONFIG.y("postgresql.name"), user=CONFIG.y("postgresql.user"), password=CONFIG.y("postgresql.password"), host=CONFIG.y("postgresql.host"), ) conn.cursor() break except OperationalError as exc: sleep(1) j_print(f"PostgreSQL Connection failed, retrying... ({exc})") while True: try: redis = Redis.from_url( f"redis://:{CONFIG.y('redis.password')}@{CONFIG.y('redis.host')}:6379" f"/{CONFIG.y('redis.ws_db')}"
from prometheus_client import Histogram from sentry_sdk.hub import Hub from sentry_sdk.tracing import Span from structlog.stdlib import get_logger from authentik.events.models import Event, EventAction from authentik.lib.config import CONFIG from authentik.lib.utils.errors import exception_to_string from authentik.policies.exceptions import PolicyException from authentik.policies.models import PolicyBinding from authentik.policies.types import PolicyRequest, PolicyResult LOGGER = get_logger() FORK_CTX = get_context("fork") CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_policies")) PROCESS_CLASS = FORK_CTX.Process HIST_POLICIES_EXECUTION_TIME = Histogram( "authentik_policies_execution_time", "Execution times for single policies", [ "binding_order", "binding_target_type", "binding_target_name", "object_name", "object_type", "user", ], )
} data.update(**kwargs) print(dumps(data), file=sys.stderr) LOGGER = structlog.get_logger() # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) STATIC_ROOT = BASE_DIR + "/static" STATICFILES_DIRS = [BASE_DIR + "/web"] MEDIA_ROOT = BASE_DIR + "/media" DEBUG = CONFIG.y_bool("debug") SECRET_KEY = CONFIG.y("secret_key") INTERNAL_IPS = ["127.0.0.1"] ALLOWED_HOSTS = ["*"] SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") SECURE_CROSS_ORIGIN_OPENER_POLICY = None LOGIN_URL = "authentik_flows:default-authentication" # Custom user model AUTH_USER_MODEL = "authentik_core.User" _cookie_suffix = "_debug" if DEBUG else "" CSRF_COOKIE_NAME = "authentik_csrf" CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF" LANGUAGE_COOKIE_NAME = f"authentik_language{_cookie_suffix}" SESSION_COOKIE_NAME = f"authentik_session{_cookie_suffix}"