async def get(self): """ Provide information about site configuration for the frontend :return: """ is_contractor = config.config_plugin().is_contractor(self.user) site_config = { "consoleme_logo": await get_random_security_logo(), "google_tracking_uri": config.get("google_analytics.tracking_url"), "documentation_url": config.get("documentation_page"), "support_contact": config.get("support_contact"), "support_chat_url": config.get("support_chat_url"), "security_logo": config.get("security_logo.image"), "security_url": config.get("security_logo.url"), } user_profile = { "site_config": site_config, "user": self.user, "can_logout": config.get("auth.set_auth_cookie"), "is_contractor": is_contractor, "employee_photo_url": config.config_plugin().get_employee_photo_url(self.user), "employee_info_url": config.config_plugin().get_employee_info_url(self.user), "authorization": { "can_edit_policies": can_admin_policies(self.user, self.groups), "can_create_roles": can_create_roles(self.user, self.groups), "can_delete_roles": can_delete_roles(self.user, self.groups), }, "pages": { "header": { "custom_header_message_title": config.get("headers.custom_header_message.title", ""), "custom_header_message_text": config.get("headers.custom_header_message.text", ""), }, "groups": { "enabled": config.get("headers.group_access.enabled", False) }, "users": { "enabled": config.get("headers.group_access.enabled", False) }, "policies": { "enabled": config.get("headers.policies.enabled", True) and not is_contractor }, "self_service": { "enabled": config.get("enable_self_service", True) and not is_contractor }, "api_health": { "enabled": is_in_group( self.user, self.groups, config.get("groups.can_edit_health_alert", []), ) }, "audit": { "enabled": is_in_group(self.user, self.groups, config.get("groups.can_audit", [])) }, "config": { "enabled": can_edit_dynamic_config(self.user, self.groups) }, }, "accounts": await get_account_id_to_name_mapping(), } self.set_header("Content-Type", "application/json") self.write(user_profile)
async def generate_honeybee_request_from_change_model_array( request_creation: RequestCreationModel, user: str, extended_request_uuid: str ) -> ExtendedRequestModel: repositories_for_request = {} primary_principal = None t = int(time.time()) generated_branch_name = f"{user}-{t}" policy_name = config.get( "generate_honeybee_request_from_change_model_array.policy_name", "self_service_generated", ) repo_config = None # Checkout Git Repo and generate a branch name for the user's change for change in request_creation.changes.changes: if primary_principal and change.principal != primary_principal: raise Exception("Changes must all affect the same principal") primary_principal = change.principal discovered_repository_for_change = False if repositories_for_request.get(change.principal.repository_name): continue # Find repo for r in config.get("cache_resource_templates.repositories", []): if r["name"] == change.principal.repository_name: repo_config = r repo = Repository( r["repo_url"], r["name"], r["authentication_settings"]["email"] ) await repo.clone(depth=1) git_client = repo.git git_client.reset() git_client.checkout(b=generated_branch_name) repositories_for_request[change.principal.repository_name] = { "main_branch_name": r["main_branch_name"], "repo": repo, "git_client": git_client, "config": r, } discovered_repository_for_change = True break if not discovered_repository_for_change: raise Exception( "No matching repository found for change in ConsoleMe's configuration" ) request_changes = ChangeModelArray(changes=[]) affected_templates = [] for change in request_creation.changes.changes: git_client = repositories_for_request[change.principal.repository_name][ "git_client" ] repo = repositories_for_request[change.principal.repository_name]["repo"].repo main_branch_name = repositories_for_request[change.principal.repository_name][ "main_branch_name" ] git_client.checkout( f"origin/{main_branch_name}", change.principal.resource_identifier ) change_file_path = f"{repo.working_dir}/{change.principal.resource_identifier}" with open(change_file_path, "r") as f: yaml_content = yaml.load(f) # Original buf = io.BytesIO() yaml.dump(yaml_content, buf) original_text = buf.getvalue() successfully_merged_statement = False if not yaml_content.get("Policies"): yaml_content["Policies"] = [] if isinstance(yaml_content["Policies"], dict): yaml_content["Policies"] = [yaml_content["Policies"]] # The PolicyModel is a representation of a single (usually inline) policy that a user has requested be merged # into a given template. If the policy is provided as a string, it's the contents of the full file (which # should include the user's requested change) if isinstance(change.policy, PolicyModel): if isinstance(change.policy.policy_document["Statement"], str): change.policy.policy_document["Statement"] = [ change.policy.policy_document["Statement"] ] for i in range(len(yaml_content.get("Policies", []))): policy = yaml_content["Policies"][i] if policy.get("PolicyName") != policy_name: continue if policy.get("IncludeAccounts") or policy.get("ExcludeAccounts"): raise ValueError( f"The {policy_name} policy has IncludeAccounts or ExcludeAccounts set" ) successfully_merged_statement = True policy["Statement"].extend( CommentedSeq(change.policy.policy_document["Statement"]) ) yaml_content["Policies"][i][ "Statement" ] = await minimize_iam_policy_statements( json.loads(json.dumps(policy["Statement"])) ) if not successfully_merged_statement: yaml_content["Policies"].append( { "PolicyName": policy_name, "Statement": change.policy.policy_document["Statement"], } ) with open(change_file_path, "w") as f: yaml.dump(yaml_content, f) # New buf = io.BytesIO() yaml.dump(yaml_content, buf) updated_text = buf.getvalue() elif isinstance(change.policy, str): # If the change is provided as a string, it represents the full change updated_text = change.policy with open(change_file_path, "w") as f: f.write(updated_text) else: raise Exception( "Unable to parse change from Honeybee templated role change request" ) request_changes.changes.append( GenericFileChangeModel( principal=primary_principal, action="attach", change_type="generic_file", policy=updated_text, old_policy=original_text, encoding="yaml", ) ) git_client.add(change.principal.resource_identifier) affected_templates.append(change.principal.resource_identifier) pull_request_url = "" if not request_creation.dry_run: commit_title = f"ConsoleMe Generated PR for {', '.join(affected_templates)}" commit_message = ( f"This request was made through ConsoleMe Self Service\n\nUser: {user}\n\n" f"Justification: {request_creation.justification}" ) git_client.commit(m=commit_message) git_client.push(u=["origin", generated_branch_name]) if repo_config["code_repository_provider"] == "bitbucket": bitbucket = BitBucket( repo_config["code_repository_config"]["url"], config.get( repo_config["code_repository_config"]["username_config_key"] ), config.get( repo_config["code_repository_config"]["password_config_key"] ), ) pull_request_url = await bitbucket.create_pull_request( repo_config["project_key"], repo_config["name"], repo_config["project_key"], repo_config["name"], generated_branch_name, repo_config["main_branch_name"], commit_title, commit_message, ) else: raise Exception( f"Unsupported `code_repository_provider` specified in configuration: {repo_config}" ) for repo_name, repo_details in repositories_for_request.items(): await repo_details["repo"].cleanup() if not pull_request_url and not request_creation.dry_run: raise Exception("Unable to generate pull request URL") return ExtendedRequestModel( id=extended_request_uuid, request_url=pull_request_url, principal=primary_principal, timestamp=int(time.time()), requester_email=user, approvers=[], request_status=RequestStatus.pending, changes=request_changes, requester_info=UserModel( email=user, extended_info=await auth.get_user_info(user), details_url=config.config_plugin().get_employee_info_url(user), photo_url=config.config_plugin().get_employee_photo_url(user), ), comments=[], cross_account=False, )
async def authorization_flow(self, user: str = None, console_only: bool = True, refresh_cache: bool = False) -> None: """Perform high level authorization flow.""" self.eligible_roles = [] self.eligible_accounts = [] self.request_uuid = str(uuid.uuid4()) refresh_cache = (self.request.arguments.get( "refresh_cache", [False])[0] or refresh_cache) attempt_sso_authn = await self.attempt_sso_authn() refreshed_user_roles_from_cache = False if not refresh_cache and config.get( "dynamic_config.role_cache.always_refresh_roles_cache", False): refresh_cache = True self.red = await RedisHandler().redis() self.ip = self.get_request_ip() self.user = user self.groups = None self.user_role_name = None self.auth_cookie_expiration = 0 log_data = { "function": "Basehandler.authorization_flow", "ip": self.ip, "request_path": self.request.uri, "user-agent": self.request.headers.get("User-Agent"), "request_id": self.request_uuid, "message": "Incoming request", } log.debug(log_data) # Check to see if user has a valid auth cookie if config.get("auth_cookie_name", "consoleme_auth"): auth_cookie = self.get_cookie( config.get("auth_cookie_name", "consoleme_auth")) # Validate auth cookie and use it to retrieve group information if auth_cookie: res = await validate_and_return_jwt_token(auth_cookie) if res and isinstance(res, dict): self.user = res.get("user") self.groups = res.get("groups") self.auth_cookie_expiration = res.get("exp") if not self.user: # Check for development mode and a configuration override that specify the user and their groups. if config.get("development") and config.get( "_development_user_override"): self.user = config.get("_development_user_override") if config.get("development") and config.get( "_development_groups_override"): self.groups = config.get("_development_groups_override") if not self.user: # SAML flow. If user has a JWT signed by ConsoleMe, and SAML is enabled in configuration, user will go # through this flow. if config.get("auth.get_user_by_saml", False) and attempt_sso_authn: res = await authenticate_user_by_saml(self) if not res: if (self.request.uri != "/saml/acs" and not self.request.uri.startswith("/auth?")): raise SilentException( "Unable to authenticate the user by SAML. " "Redirecting to authentication endpoint") return if not self.user: if config.get("auth.get_user_by_oidc", False) and attempt_sso_authn: res = await authenticate_user_by_oidc(self) if not res: raise SilentException( "Unable to authenticate the user by OIDC. " "Redirecting to authentication endpoint") if res and isinstance(res, dict): self.user = res.get("user") self.groups = res.get("groups") if not self.user: if config.get("auth.get_user_by_aws_alb_auth", False): res = await authenticate_user_by_alb_auth(self) if not res: raise Exception( "Unable to authenticate the user by ALB Auth") if res and isinstance(res, dict): self.user = res.get("user") self.groups = res.get("groups") if not self.user: # Username/Password authn flow if config.get("auth.get_user_by_password", False): after_redirect_uri = self.request.arguments.get( "redirect_url", [""])[0] if after_redirect_uri and isinstance(after_redirect_uri, bytes): after_redirect_uri = after_redirect_uri.decode("utf-8") self.set_status(403) self.write({ "type": "redirect", "redirect_url": f"/login?redirect_after_auth={after_redirect_uri}", "reason": "unauthenticated", "message": "User is not authenticated. Redirect to authenticate", }) await self.finish() raise SilentException( "Redirecting user to authenticate by username/password.") if not self.user: try: # Get user. Config options can specify getting username from headers or # OIDC, but custom plugins are also allowed to override this. self.user = await auth.get_user(headers=self.request.headers) if not self.user: raise NoUserException( f"User not detected. Headers: {self.request.headers}") log_data["user"] = self.user except NoUserException: self.clear() self.set_status(403) stats.count( "Basehandler.authorization_flow.no_user_detected", tags={ "request_path": self.request.uri, "ip": self.ip, "user_agent": self.request.headers.get("User-Agent"), }, ) log_data["message"] = "No user detected. Check configuration." log.error(log_data) await self.finish(log_data["message"]) raise self.contractor = config.config_plugin().is_contractor(self.user) if config.get("auth.cache_user_info_server_side", True) and not refresh_cache: try: cache_r = self.red.get( f"USER-{self.user}-CONSOLE-{console_only}") except redis.exceptions.ConnectionError: cache_r = None if cache_r: log_data["message"] = "Loading from cache" log.debug(log_data) cache = json.loads(cache_r) self.groups = cache.get("groups") self.eligible_roles = cache.get("eligible_roles") self.eligible_accounts = cache.get("eligible_accounts") self.user_role_name = cache.get("user_role_name") refreshed_user_roles_from_cache = True try: if not self.groups: self.groups = await auth.get_groups( self.user, headers=self.request.headers) if not self.groups: raise NoGroupsException( f"Groups not detected. Headers: {self.request.headers}") except NoGroupsException: stats.count("Basehandler.authorization_flow.no_groups_detected") log_data["message"] = "No groups detected. Check configuration." log.error(log_data) # Set Per-User Role Name (This logic is not used in OSS deployment) if (config.get("user_roles.opt_in_group") and config.get("user_roles.opt_in_group") in self.groups): # Get or create user_role_name attribute self.user_role_name = await auth.get_or_create_user_role_name( self.user) self.eligible_roles = await group_mapping.get_eligible_roles( self.user, self.groups, self.user_role_name, console_only=console_only) if not self.eligible_roles: log_data[ "message"] = "No eligible roles detected for user. But letting them continue" log.error(log_data) log_data["eligible_roles"] = len(self.eligible_roles) if not self.eligible_accounts: try: self.eligible_accounts = await group_mapping.get_eligible_accounts( self.eligible_roles) log_data["eligible_accounts"] = len(self.eligible_accounts) log_data["message"] = "Successfully authorized user." log.debug(log_data) except Exception: stats.count("Basehandler.authorization_flow.exception") log.error(log_data, exc_info=True) raise if (config.get("auth.cache_user_info_server_side", True) and self.groups # Only set role cache if we didn't retrieve user's existing roles from cache and not refreshed_user_roles_from_cache): try: self.red.setex( f"USER-{self.user}-CONSOLE-{console_only}", config.get("dynamic_config.role_cache.cache_expiration", 60), json.dumps({ "groups": self.groups, "eligible_roles": self.eligible_roles, "eligible_accounts": self.eligible_accounts, "user_role_name": self.user_role_name, }), ) except redis.exceptions.ConnectionError: pass if (config.get("auth.set_auth_cookie") and config.get("auth_cookie_name", "consoleme_auth") and not self.get_cookie( config.get("auth_cookie_name", "consoleme_auth"))): expiration = datetime.utcnow().replace( tzinfo=pytz.UTC) + timedelta( minutes=config.get("jwt.expiration_minutes", 60)) encoded_cookie = await generate_jwt_token(self.user, self.groups, exp=expiration) self.set_cookie( config.get("auth_cookie_name", "consoleme_auth"), encoded_cookie, expires=expiration, secure=config.get( "auth.cookie.secure", "https://" in config.get("url"), ), httponly=config.get("auth.cookie.httponly", True), samesite=config.get("auth.cookie.samesite", True), ) if self.tracer: await self.tracer.set_additional_tags({"USER": self.user})
async def get(self): """ Provide information about site configuration for the frontend :return: """ is_contractor = config.config_plugin().is_contractor(self.user) site_config = { "consoleme_logo": await get_random_security_logo(), "google_analytics": { "tracking_id": config.get("google_analytics.tracking_id"), "options": config.get("google_analytics.options", {}), }, "documentation_url": config.get( "documentation_page", "https://hawkins.gitbook.io/consoleme/" ), "support_contact": config.get("support_contact"), "support_chat_url": config.get( "support_chat_url", "https://discord.com/invite/nQVpNGGkYu" ), "security_logo": config.get("security_logo.image"), "security_url": config.get("security_logo.url"), # If site_config.landing_url is set, users will be redirected to the landing URL after authenticating # on the frontend. "landing_url": config.get("site_config.landing_url"), "temp_policy_support": config.get("policies.temp_policy_support"), "notifications": { "enabled": config.get("site_config.notifications.enabled"), "request_interval": config.get( "site_config.notifications.request_interval", 60 ), }, "cloudtrail_denies_policy_generation": config.get( "celery.cache_cloudtrail_denies.enabled", False ), } custom_page_header: Dict[str, str] = await get_custom_page_header( self.user, self.groups ) user_profile = { "site_config": site_config, "user": self.user, "can_logout": config.get("auth.set_auth_cookie", False), "is_contractor": is_contractor, "employee_photo_url": config.config_plugin().get_employee_photo_url( self.user ), "employee_info_url": config.config_plugin().get_employee_info_url( self.user ), "authorization": { "can_edit_policies": can_admin_policies(self.user, self.groups), "can_create_roles": can_create_roles(self.user, self.groups), "can_delete_iam_principals": can_delete_iam_principals( self.user, self.groups ), }, "pages": { "header": { "custom_header_message_title": custom_page_header.get( "custom_header_message_title", "" ), "custom_header_message_text": custom_page_header.get( "custom_header_message_text", "" ), "custom_header_message_route": custom_page_header.get( "custom_header_message_route", "" ), }, "groups": { "enabled": config.get("headers.group_access.enabled", False) }, "users": {"enabled": config.get("headers.group_access.enabled", False)}, "policies": { "enabled": config.get("headers.policies.enabled", True) and not is_contractor }, "self_service": { "enabled": config.get("enable_self_service", True) and not is_contractor }, "api_health": { "enabled": is_in_group( self.user, self.groups, config.get("groups.can_edit_health_alert", []), ) }, "audit": { "enabled": is_in_group( self.user, self.groups, config.get("groups.can_audit", []) ) }, "config": {"enabled": can_edit_dynamic_config(self.user, self.groups)}, }, "accounts": await get_account_id_to_name_mapping(), } self.set_header("Content-Type", "application/json") self.write(user_profile)