Beispiel #1
0
    def load_config_from_dynamo(self):
        """If enabled, we can load a configuration dynamically from Dynamo at a certain time interval. This reduces
        the need for code redeploys to make configuration changes"""
        from consoleme.lib.dynamo import UserDynamoHandler
        from consoleme.lib.redis import RedisHandler

        ddb = UserDynamoHandler()
        red = RedisHandler().redis_sync()

        while True:
            dynamic_config = refresh_dynamic_config(ddb)
            if dynamic_config and dynamic_config != self.config.get("dynamic_config"):
                red.set(
                    "DYNAMIC_CONFIG_CACHE",
                    json.dumps(dynamic_config),
                )
                self.get_logger("config").debug(
                    {
                        "function": f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}",
                        "message": "Dynamic configuration changes detected and loaded",
                        "dynamic_config": dynamic_config,
                    }
                )
                self.config["dynamic_config"] = dynamic_config
            time.sleep(self.get("dynamic_config.dynamo_load_interval", 60))
Beispiel #2
0
    def test_post(self):
        mock_request_data = [
            {
                "request_id": 12345,
                "username": "******",
                "request_time": 22345,
            },
            {
                "request_id": 12346,
                "username": "******",
                "request_time": 12345,
            },
        ]

        from consoleme.lib.redis import RedisHandler

        # Mocked by fakeredis
        red = RedisHandler().redis_sync()
        red.set(
            config.get("cache_policy_requests.redis_key", "ALL_POLICY_REQUESTS"),
            json.dumps(mock_request_data),
        )

        headers = {
            config.get("auth.user_header_name"): "*****@*****.**",
            config.get("auth.groups_header_name"): "groupa,groupb,groupc",
        }
        response = self.fetch(
            "/api/v2/requests", method="POST", headers=headers, body="{}"
        )
        self.assertEqual(response.code, 200)
        diff = DeepDiff(json.loads(response.body), mock_request_data)
        self.assertFalse(diff)
Beispiel #3
0
    def load_dynamic_config_from_redis(self, log_data: Dict[str, Any], red=None):
        if not red:
            from consoleme.lib.redis import RedisHandler

            red = RedisHandler().redis_sync()
        dynamic_config = red.get("DYNAMIC_CONFIG_CACHE")
        if not dynamic_config:
            self.get_logger("config").warning(
                {
                    **log_data,
                    "error": (
                        "Unable to retrieve Dynamic Config from Redis. "
                        "This can be safely ignored if your dynamic config is empty."
                    ),
                }
            )
            return
        dynamic_config_j = json.loads(dynamic_config)
        if self.config.get("dynamic_config", {}) != dynamic_config_j:
            self.get_logger("config").debug(
                {
                    **log_data,
                    "message": "Refreshing dynamic configuration from Redis",
                }
            )
            self.config["dynamic_config"] = dynamic_config_j
Beispiel #4
0
    def test_post_limit(self):
        mock_request_data = [
            {
                "request_id": 12345,
                "username": "******"
            },
            {
                "request_id": 12346,
                "username": "******"
            },
        ]

        from consoleme.lib.redis import RedisHandler

        # Mocked by fakeredis
        red = RedisHandler().redis_sync()
        red.set(
            self.config.get("cache_policy_requests.redis_key",
                            "ALL_POLICY_REQUESTS"),
            json.dumps(mock_request_data),
        )

        headers = {
            self.config.get("auth.user_header_name"): "*****@*****.**",
            self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
        }
        response = self.fetch(
            "/api/v2/requests",
            method="POST",
            headers=headers,
            body=json.dumps({"limit": 1}),
        )
        self.assertEqual(response.code, 200)
        self.assertEqual(len(json.loads(response.body)), 3)
        self.assertEqual(len(json.loads(response.body)["data"]), 1)
Beispiel #5
0
    def test_post_request(self):
        mock_request_data = {
            "justification": "test asdf",
            "admin_auto_approve": False,
            "changes": {
                "changes": [{
                    "principal": {
                        "principal_arn":
                        "arn:aws:iam::123456789012:role/TestInstanceProfile",
                        "principal_type": "AwsResource",
                    },
                    "change_type": "inline_policy",
                    "action": "attach",
                    "policy": {
                        "policy_document": {
                            "Version":
                            "2012-10-17",
                            "Statement": [{
                                "Action": ["sqs:SetQueueAttributes"],
                                "Effect":
                                "Allow",
                                "Resource":
                                ["arn:aws:sqs:us-east-1:223456789012:queue"],
                            }],
                        }
                    },
                }]
            },
        }

        from consoleme.lib.redis import RedisHandler

        # Mocked by fakeredis
        red = RedisHandler().redis_sync()
        red.set(
            self.config.get("cache_policy_requests.redis_key",
                            "ALL_POLICY_REQUESTS"),
            json.dumps(mock_request_data),
        )

        headers = {
            self.config.get("auth.user_header_name"): "*****@*****.**",
            self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
        }
        response = self.fetch(
            "/api/v2/request",
            method="POST",
            headers=headers,
            body=json.dumps(mock_request_data),
        )
        self.assertEqual(response.code, 200)
        response_d = json.loads(response.body)
        self.assertEqual(response_d["errors"], 0)
        self.assertEqual(response_d["request_created"], True)
        self.assertIn("/policies/request/", response_d["request_url"])
Beispiel #6
0
def redis_prereqs(redis):
    from consoleme.lib.redis import RedisHandler

    red = RedisHandler().redis_sync()
    red.hmset(
        "AWSCONFIG_RESOURCE_CACHE",
        {
            "arn:aws:ec2:us-west-2:123456789013:security-group/12345": "{}",
            "arn:aws:sqs:us-east-1:123456789012:rolequeue": "{}",
            "arn:aws:sns:us-east-1:123456789012:roletopic": "{}",
            "arn:aws:iam::123456789012:role/role": "{}",
        },
    )
Beispiel #7
0
    def purge_redislite_cache(self):
        """
        Purges redislite cache in primary DB periodically. This will force a cache refresh, and it is
        convenient for cases where you cannot securely run shared Redis (ie: AWS AppRunner)
        """
        if not self.get("redis.use_redislite"):
            return
        from consoleme.lib.redis import RedisHandler

        red = RedisHandler().redis_sync()
        while True:
            red.flushdb()
            time.sleep(self.get("redis.purge_redislite_cache_interval", 1800))
Beispiel #8
0
    def purge_redislite_cache(self):
        """
        Purges redislite cache in primary DB periodically. This will force a cache refresh, and it is
        convenient for cases where you cannot securely run shared Redis (ie: AWS AppRunner)
        """
        if not self.get("redis.use_redislite"):
            return
        from consoleme.lib.redis import RedisHandler

        red = RedisHandler().redis_sync()
        while threading.main_thread().is_alive():
            red.flushdb()
            # Wait till main exit flag is set OR a fixed timeout
            if main_exit_flag.wait(timeout=self.get(
                    "redis.purge_redislite_cache_interval", 1800)):
                break
Beispiel #9
0
def create_default_resources(s3, iam, redis, iam_sync_principals, iamrole_table):
    from asgiref.sync import async_to_sync

    from consoleme.config import config
    from consoleme.lib.cache import store_json_results_in_redis_and_s3

    global all_roles
    buckets = [config.get("consoleme_s3_bucket")]
    for bucket in buckets:
        s3.create_bucket(Bucket=bucket)

    if all_roles:
        async_to_sync(store_json_results_in_redis_and_s3)(
            all_roles,
            s3_bucket=config.get(
                "cache_iam_resources_across_accounts.all_roles_combined.s3.bucket"
            ),
            s3_key=config.get(
                "cache_iam_resources_across_accounts.all_roles_combined.s3.file",
                "account_resource_cache/cache_all_roles_v1.json.gz",
            ),
        )
        return
    from consoleme.celery_tasks.celery_tasks import cache_iam_resources_for_account
    from consoleme.lib.account_indexers import get_account_id_to_name_mapping
    from consoleme.lib.redis import RedisHandler

    red = RedisHandler().redis_sync()

    accounts_d = async_to_sync(get_account_id_to_name_mapping)()
    for account_id in accounts_d.keys():
        cache_iam_resources_for_account(account_id)

    cache_key = config.get("aws.iamroles_redis_key", "IAM_ROLE_CACHE")
    all_roles = red.hgetall(cache_key)
    async_to_sync(store_json_results_in_redis_and_s3)(
        all_roles,
        s3_bucket=config.get(
            "cache_iam_resources_across_accounts.all_roles_combined.s3.bucket"
        ),
        s3_key=config.get(
            "cache_iam_resources_across_accounts.all_roles_combined.s3.file",
            "account_resource_cache/cache_all_roles_v1.json.gz",
        ),
    )
Beispiel #10
0
    def load_config_from_dynamo(self, ddb=None, red=None):
        if not ddb:
            from consoleme.lib.dynamo import UserDynamoHandler

            ddb = UserDynamoHandler()
        if not red:
            from consoleme.lib.redis import RedisHandler

            red = RedisHandler().redis_sync()

        dynamic_config = refresh_dynamic_config(ddb)
        if dynamic_config and dynamic_config != self.config.get("dynamic_config"):
            red.set(
                "DYNAMIC_CONFIG_CACHE",
                json.dumps(dynamic_config),
            )
            self.get_logger("config").debug(
                {
                    "function": f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}",
                    "message": "Dynamic configuration changes detected and loaded",
                }
            )
            self.config["dynamic_config"] = dynamic_config
Beispiel #11
0
    def load_config_from_dynamo_bg_thread(self):
        """If enabled, we can load a configuration dynamically from Dynamo at a certain time interval. This reduces
        the need for code redeploys to make configuration changes"""
        from consoleme.lib.dynamo import UserDynamoHandler
        from consoleme.lib.redis import RedisHandler

        ddb = UserDynamoHandler()
        red = RedisHandler().redis_sync()

        while threading.main_thread().is_alive():
            self.load_config_from_dynamo(ddb, red)
            # Wait till main exit flag is set OR a fixed timeout
            if main_exit_flag.wait(timeout=self.get(
                    "dynamic_config.dynamo_load_interval", 60)):
                break
Beispiel #12
0
 def __init__(self):
     self.red = RedisHandler().redis_sync()
     self.redis_key = config.get("aws.iamroles_redis_key", "IAM_ROLE_CACHE")
     self.dynamo = IAMRoleDynamoHandler()
Beispiel #13
0
class Aws:
    """The AWS class handles all interactions with AWS."""

    def __init__(self):
        self.red = RedisHandler().redis_sync()
        self.redis_key = config.get("aws.iamroles_redis_key", "IAM_ROLE_CACHE")
        self.dynamo = IAMRoleDynamoHandler()

    @retry(
        stop_max_attempt_number=3,
        wait_exponential_multiplier=1000,
        wait_exponential_max=1000,
    )
    def _add_role_to_redis(self, role_entry: dict):
        """Add the role to redis with a retry.

        :param role_entry:
        :return:
        """
        self.red.hset(self.redis_key, role_entry["arn"], json.dumps(role_entry))

    @retry(
        stop_max_attempt_number=3,
        wait_exponential_multiplier=1000,
        wait_exponential_max=1000,
    )
    def _fetch_role_from_redis(self, role_arn: str):
        """Fetch the role from redis with a retry.

        :param role_arn:
        :return:
        """
        return self.red.hget(self.redis_key, role_arn)

    @retry(
        stop_max_attempt_number=3,
        wait_exponential_multiplier=1000,
        wait_exponential_max=1000,
    )
    def _invoke_lambda(self, client: object, function_name: str, payload: bytes):
        """Invoke the lambda function for creating the user-roles."""
        return client.invoke(
            FunctionName=function_name,
            InvocationType="RequestResponse",
            Payload=payload,
        )

    async def _cloudaux_to_aws(self, role):
        """Convert the cloudaux get_role into the get_account_authorization_details equivalent."""
        # Pop out the fields that are not required:
        # Arn and RoleName will be popped off later:
        unrequired_fields = ["_version", "MaxSessionDuration"]

        for uf in unrequired_fields:
            role.pop(uf, None)

        # Fix the Managed Policies:
        role["AttachedManagedPolicies"] = list(
            map(
                lambda x: {"PolicyName": x["name"], "PolicyArn": x["arn"]},
                role.get("ManagedPolicies", []),
            )
        )
        role.pop("ManagedPolicies", None)

        # Fix the tags:
        if isinstance(role.get("Tags", {}), dict):
            role["Tags"] = list(
                map(
                    lambda key: {"Key": key, "Value": role["Tags"][key]},
                    role.get("Tags", {}),
                )
            )

        # Note: the instance profile list is verbose -- not transforming it (outside of renaming the field)!
        role["InstanceProfileList"] = role.pop("InstanceProfiles", [])

        # Inline Policies:
        role["RolePolicyList"] = list(
            map(
                lambda name: {
                    "PolicyName": name,
                    "PolicyDocument": role["InlinePolicies"][name],
                },
                role.get("InlinePolicies", {}),
            )
        )
        role.pop("InlinePolicies", None)

        return role

    async def fetch_iam_role(
        self, account_id: str, role_arn: str, force_refresh: bool = False
    ) -> dict:
        """Fetch the IAM Role template from Redis and/or Dynamo.

        :param account_id:
        :param role_arn:
        :return:
        """
        log_data: dict = {
            "function": f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}",
            "role_arn": role_arn,
            "account_id": account_id,
            "force_refresh": force_refresh,
        }

        result: dict = {}

        if not force_refresh:
            # First check redis:
            result: str = await sync_to_async(self._fetch_role_from_redis)(role_arn)

            if result:
                result: dict = json.loads(result)

                # If this item is less than an hour old, then return it from Redis.
                if result["ttl"] > int(
                    (datetime.utcnow() - timedelta(hours=1)).timestamp()
                ):
                    log_data["message"] = "Role not in Redis -- fetching from DDB."
                    log.debug(log_data)
                    stats.count(
                        "aws.fetch_iam_role.in_redis",
                        tags={"account_id": account_id, "role_arn": role_arn},
                    )
                    result["policy"] = json.loads(result["policy"])
                    return result

            # If not in Redis or it's older than an hour, proceed to DynamoDB:
            result = await sync_to_async(self.dynamo.fetch_iam_role)(
                role_arn, account_id
            )

        # If it's NOT in dynamo, or if we're forcing a refresh, we need to reach out to AWS and fetch:
        if force_refresh or not result.get("Item"):
            if force_refresh:
                log_data["message"] = "Force refresh is enabled. Going out to AWS."
                stats.count(
                    "aws.fetch_iam_role.force_refresh",
                    tags={"account_id": account_id, "role_arn": role_arn},
                )
            else:
                log_data["message"] = "Role is missing in DDB. Going out to AWS."
                stats.count(
                    "aws.fetch_iam_role.missing_dynamo",
                    tags={"account_id": account_id, "role_arn": role_arn},
                )
            log.debug(log_data)
            try:
                tasks = []
                role_name = role_arn.split("/")[-1]
                # Instantiate a cached CloudAux client
                client = await sync_to_async(boto3_cached_conn)(
                    "iam",
                    account_number=account_id,
                    assume_role=config.get("policies.role_name"),
                    retry_max_attempts=2,
                )
                conn = {
                    "account_number": account_id,
                    "assume_role": config.get("policies.role_name"),
                    "region": config.region,
                }

                role_details = asyncio.ensure_future(
                    sync_to_async(client.get_role)(RoleName=role_name)
                )
                tasks.append(role_details)

                all_tasks = [
                    get_role_managed_policies,
                    get_role_inline_policies,
                    list_role_tags,
                ]

                for t in all_tasks:
                    tasks.append(
                        asyncio.ensure_future(
                            sync_to_async(t)({"RoleName": role_name}, **conn)
                        )
                    )

                responses = asyncio.gather(*tasks)
                result = await responses
                role = result[0]["Role"]
                role["ManagedPolicies"] = result[1]
                role["InlinePolicies"] = result[2]
                role["Tags"] = result[3]

            except ClientError as ce:
                if ce.response["Error"]["Code"] == "NoSuchEntity":
                    # The role does not exist:
                    log_data["message"] = "Role does not exist in AWS."
                    log.error(log_data)
                    stats.count(
                        "aws.fetch_iam_role.missing_in_aws",
                        tags={"account_id": account_id, "role_arn": role_arn},
                    )
                    return None

                else:
                    log_data["message"] = f"Some other error: {ce.response}"
                    log.error(log_data)
                    stats.count(
                        "aws.fetch_iam_role.aws_connection_problem",
                        tags={"account_id": account_id, "role_arn": role_arn},
                    )
                    raise

            # Format the role for DynamoDB and Redis:
            await self._cloudaux_to_aws(role)
            result = {
                "arn": role.get("Arn"),
                "name": role.pop("RoleName"),
                "resourceId": role.pop("RoleId"),
                "accountId": account_id,
                "ttl": int((datetime.utcnow() + timedelta(hours=36)).timestamp()),
                "policy": self.dynamo.convert_role_to_json(role),
                "permissions_boundary": role.get("PermissionsBoundary", {}),
                "templated": self.red.hget(
                    config.get("templated_roles.redis_key", "TEMPLATED_ROLES_v2"),
                    role.get("Arn").lower(),
                ),
            }

            # Sync with DDB:
            await sync_to_async(self.dynamo.sync_iam_role_for_account)(result)
            log_data["message"] = "Role fetched from AWS, and synced with DDB."
            stats.count(
                "aws.fetch_iam_role.fetched_from_aws",
                tags={"account_id": account_id, "role_arn": role_arn},
            )

        else:
            log_data["message"] = "Role fetched from DDB."
            stats.count(
                "aws.fetch_iam_role.in_dynamo",
                tags={"account_id": account_id, "role_arn": role_arn},
            )

            # Fix the TTL:
            result["Item"]["ttl"] = int(result["Item"]["ttl"])
            result = result["Item"]

        # Update the redis cache:
        stats.count(
            "aws.fetch_iam_role.in_dynamo",
            tags={"account_id": account_id, "role_arn": role_arn},
        )
        await sync_to_async(self._add_role_to_redis)(result)

        log_data["message"] += " Updated Redis."
        log.debug(log_data)

        result["policy"] = json.loads(result["policy"])
        return result

    async def call_user_lambda(
        self, role: str, user_email: str, account_id: str, user_role_name: str = "user"
    ) -> str:
        """Call out to the lambda function to provision the per-user role for the account."""
        # Get the template's name based on the account and user role name:
        accounts = await get_account_id_to_name_mapping()
        account_name = accounts[account_id]
        role_to_fetch = (
            f"arn:aws:iam::{account_id}:role/{account_name}_{user_role_name}"
        )

        # Fetch the role
        role_details = await self.fetch_iam_role(account_id, role_to_fetch)

        # If we did not receive any role details, raise an exception:
        if not role_details:
            raise NoRoleTemplateException(f"Unable to locate {role_to_fetch}")

        # Prepare the payload for the lambda and send it out:
        payload = json.dumps(
            {
                "user_role_short_name": role.split("role/")[1],
                "user_email": user_email,
                "account_number": account_id,
                "primary_policies": role_details["policy"].get("RolePolicyList", []),
                "managed_policy_arns": role_details["policy"].get(
                    "AttachedManagedPolicies", []
                ),
            }
        ).encode()

        client = boto3.client("lambda", region_name=config.region)

        lambda_result = await sync_to_async(self._invoke_lambda)(
            client,
            config.get("lambda_role_creator.function_name", "UserRoleCreator"),
            payload,
        )
        lambda_result = json.loads(lambda_result["Payload"].read().decode())

        if not lambda_result.get("success", False):
            raise UserRoleLambdaException(f"Received invalid response: {lambda_result}")

        return f'arn:aws:iam::{lambda_result["account_number"]}:role/{lambda_result["role_name"]}'

    @tenacity.retry(
        wait=tenacity.wait_fixed(2),
        stop=tenacity.stop_after_attempt(5),
        retry=tenacity.retry_if_exception_type(UserRoleNotAssumableYet),
    )
    async def get_credentials(
        self,
        user: str,
        role: str,
        enforce_ip_restrictions: bool = True,
        user_role: bool = False,
        account_id: str = None,
        custom_ip_restrictions: list = None,
    ) -> dict:
        """Get Credentials will return the list of temporary credentials from AWS."""
        log_data = {
            "function": f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}",
            "user": user,
            "role": role,
            "enforce_ip_restrictions": enforce_ip_restrictions,
            "custom_ip_restrictions": custom_ip_restrictions,
            "message": "Generating credentials",
        }
        session = boto3.Session()
        client = session.client(
            "sts",
            region_name=config.region,
            endpoint_url=f"https://sts.{config.region}.amazonaws.com",
        )

        ip_restrictions = config.get("aws.ip_restrictions")
        stats.count("aws.get_credentials", tags={"role": role, "user": user})

        # If this is a dynamic request, then we need to fetch the role details, call out to the lambda
        # wait for it to complete, assume the role, and then return the assumed credentials back.
        if user_role:
            stats.count("aws.call_user_lambda", tags={"role": role, "user": user})
            try:
                role = await self.call_user_lambda(role, user, account_id)
            except Exception as e:
                raise e

        try:
            if enforce_ip_restrictions and ip_restrictions:
                policy = json.dumps(
                    dict(
                        Version="2012-10-17",
                        Statement=[
                            dict(
                                Effect="Deny",
                                Action="*",
                                Resource="*",
                                Condition=dict(
                                    NotIpAddress={"aws:SourceIP": ip_restrictions}
                                ),
                            ),
                            dict(Effect="Allow", Action="*", Resource="*"),
                        ],
                    )
                )

                credentials = await sync_to_async(client.assume_role)(
                    RoleArn=role,
                    RoleSessionName=user.lower(),
                    Policy=policy,
                    DurationSeconds=config.get("aws.session_duration", 3600),
                )
                credentials["Credentials"]["Expiration"] = int(
                    credentials["Credentials"]["Expiration"].timestamp()
                )
                return credentials
            if custom_ip_restrictions:
                policy = json.dumps(
                    dict(
                        Version="2012-10-17",
                        Statement=[
                            dict(
                                Effect="Deny",
                                Action="*",
                                Resource="*",
                                Condition=dict(
                                    NotIpAddress={
                                        "aws:SourceIP": custom_ip_restrictions
                                    }
                                ),
                            ),
                            dict(Effect="Allow", Action="*", Resource="*"),
                        ],
                    )
                )

                credentials = await sync_to_async(client.assume_role)(
                    RoleArn=role,
                    RoleSessionName=user.lower(),
                    Policy=policy,
                    DurationSeconds=config.get("aws.session_duration", 3600),
                )
                credentials["Credentials"]["Expiration"] = int(
                    credentials["Credentials"]["Expiration"].timestamp()
                )
                return credentials

            credentials = await sync_to_async(client.assume_role)(
                RoleArn=role,
                RoleSessionName=user.lower(),
                DurationSeconds=config.get("aws.session_duration", 3600),
            )
            credentials["Credentials"]["Expiration"] = int(
                credentials["Credentials"]["Expiration"].timestamp()
            )
            log.debug(log_data)
            return credentials
        except ClientError as e:
            # TODO(ccastrapel): Determine if user role was really just created, or if this is an older role.
            if user_role:
                raise UserRoleNotAssumableYet(e.response["Error"])
            raise

    async def generate_url(
        self,
        user: str,
        role: str,
        region: str = "us-east-1",
        user_role: bool = False,
        account_id: str = None,
    ) -> str:
        """Generate URL will get temporary credentials and craft a URL with those credentials."""
        function = (
            f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}"
        )
        log_data = {
            "function": function,
            "user": user,
            "role": role,
            "message": "Generating authenticated AWS console URL",
        }
        log.debug(log_data)
        credentials = await self.get_credentials(
            user,
            role,
            user_role=user_role,
            account_id=account_id,
            enforce_ip_restrictions=False,
        )

        credentials_d = {
            "sessionId": credentials.get("Credentials", {}).get("AccessKeyId"),
            "sessionKey": credentials.get("Credentials", {}).get("SecretAccessKey"),
            "sessionToken": credentials.get("Credentials", {}).get("SessionToken"),
        }

        req_params = {
            "Action": "getSigninToken",
            "Session": bleach.clean(json.dumps(credentials_d)),
            "DurationSeconds": config.get("aws.session_duration", 3600),
        }

        http_client = AsyncHTTPClient(force_instance=True)

        url_with_params: str = url_concat(
            config.get(
                "aws.federation_url", "https://signin.aws.amazon.com/federation"
            ),
            req_params,
        )
        r = await http_client.fetch(url_with_params, ssl_options=ssl.SSLContext())
        token = json.loads(r.body)

        login_req_params = {
            "Action": "login",
            "Issuer": config.get("aws.issuer"),
            "Destination": (
                "{}".format(
                    config.get(
                        "aws.console_url", "https://{}.console.aws.amazon.com"
                    ).format(region)
                )
            ),
            "SigninToken": bleach.clean(token.get("SigninToken")),
            "SessionDuration": config.get("aws.session_duration", 3600),
        }

        r2 = requests_sync.Request(
            "GET",
            config.get(
                "aws.federation_url", "https://signin.aws.amazon.com/federation"
            ),
            params=login_req_params,
        )
        url = r2.prepare().url
        return url

    async def sns_publisher_group_requests(
        self, user, group, justification, request_id, bg_check_passed
    ):
        raise NotImplementedError()

    async def sns_publish_policy_requests(self, request, request_uri):
        raise NotImplementedError()

    async def send_communications_policy_change_request(self, request, send_sns=False):
        """
        Optionally send a notification when there's a new policy change request

        :param request:
        :param send_sns:
        :return:
        """
        log_data: dict = {
            "function": f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}",
            "message": "Function is not configured.",
        }
        log.warning(log_data)
        return

    async def send_communications_new_policy_request(
        self, extended_request, admin_approved, approval_probe_approved
    ):
        """
        Optionally send a notification when there's a new policy change request

        :param approval_probe_approved:
        :param admin_approved:
        :param extended_request:
        :return:
        """
        log_data: dict = {
            "function": f"{__name__}.{self.__class__.__name__}.{sys._getframe().f_code.co_name}",
            "message": "Function is not configured.",
        }
        log.warning(log_data)
        return

    @staticmethod
    def handle_detected_role(role):
        pass

    async def should_auto_approve_policy_v2(self, extended_request, user, user_groups):
        return {"approved": False}
Beispiel #14
0
import sys
from typing import Dict

from consoleme.config import config
from consoleme.lib.cloud_credential_authorization_mapping.models import (
    CredentialAuthzMappingGenerator,
    RoleAuthorizations,
    user_or_group,
)
from consoleme.lib.redis import RedisHandler

red = RedisHandler().redis_sync()


class DynamicConfigAuthorizationMappingGenerator(CredentialAuthzMappingGenerator):
    async def generate_credential_authorization_mapping(
        self, authorization_mapping: Dict[user_or_group, RoleAuthorizations]
    ) -> Dict[user_or_group, RoleAuthorizations]:
        """This will list accounts that meet the account attribute search criteria."""
        function = f"{__name__}.{sys._getframe().f_code.co_name}"
        log_data = {
            "function": function,
        }
        config.CONFIG.load_dynamic_config_from_redis(log_data, red)
        group_mapping_configuration = config.get("dynamic_config.group_mapping")

        if not group_mapping_configuration:
            return authorization_mapping

        for group, role_mapping in group_mapping_configuration.items():
            if config.get("auth.force_groups_lowercase", False):
Beispiel #15
0
    def test_typeahead_get(self):
        from consoleme.config import config

        headers = {
            config.get("auth.user_header_name"): "*****@*****.**",
            config.get("auth.groups_header_name"): "groupa,groupb,groupc",
        }
        from consoleme.lib.redis import RedisHandler

        red = RedisHandler().redis_sync()
        red.hmset(
            "AWSCONFIG_RESOURCE_CACHE",
            {
                "arn:aws:ec2:us-west-2:123456789013:security-group/12345":
                "{}",
                "arn:aws:sqs:us-east-1:123456789012:rolequeue": "{}",
                "arn:aws:sns:us-east-1:123456789012:roletopic": "{}",
                "arn:aws:iam::123456789012:role/role": "{}",
            },
        )
        # Return all the things
        response = self.fetch("/api/v2/typeahead/resources",
                              method="GET",
                              headers=headers)
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)

        self.assertEqual(len(responseJSON), 4)
        # Filter for a specific query
        response = self.fetch("/api/v2/typeahead/resources?typeahead=role",
                              method="GET",
                              headers=headers)
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)
        self.assertEqual(len(responseJSON), 3)

        # Filter for a specific limit
        response = self.fetch(
            "/api/v2/typeahead/resources?typeahead=role&limit=1",
            method="GET",
            headers=headers,
        )
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)
        self.assertEqual(len(responseJSON), 1)

        # Filter for a specific account
        response = self.fetch(
            "/api/v2/typeahead/resources?account_id=123456789013",
            method="GET",
            headers=headers,
        )
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)
        self.assertEqual(len(responseJSON), 1)

        # Filter for a specific resource type
        response = self.fetch(
            "/api/v2/typeahead/resources?resource_type=sqs",
            method="GET",
            headers=headers,
        )
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)
        self.assertEqual(len(responseJSON), 1)

        # filter for region
        response = self.fetch(
            "/api/v2/typeahead/resources?region=us-east-1",
            method="GET",
            headers=headers,
        )
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)
        self.assertEqual(len(responseJSON), 2)

        # multifilter
        response = self.fetch(
            "/api/v2/typeahead/resources?region=us-east-1&account_id=123456789012&typeahead=role&limit=5",
            method="GET",
            headers=headers,
        )
        self.assertEqual(response.code, 200)
        responseJSON = json.loads(response.body)
        self.assertEqual(len(responseJSON), 2)
Beispiel #16
0
from typing import Optional

import sentry_sdk
import ujson as json
from asgiref.sync import async_to_sync, sync_to_async

from consoleme.config import config
from consoleme.exceptions.exceptions import DataNotRetrievable
from consoleme.handlers.base import BaseAPIV2Handler
from consoleme.lib.cache import retrieve_json_data_from_redis_or_s3
from consoleme.lib.redis import RedisHandler
from consoleme.models import ArnArray

red = async_to_sync(RedisHandler().redis)()


class ResourceTypeAheadHandlerV2(BaseAPIV2Handler):
    async def get(self):
        try:
            type_ahead: Optional[str] = (self.request.arguments.get(
                "typeahead")[0].decode("utf-8").lower())
        except TypeError:
            type_ahead = None

        try:
            account_id: Optional[str] = self.request.arguments.get(
                "account_id")[0].decode("utf-8")
        except TypeError:
            account_id = None

        try:
Beispiel #17
0
    def test_clear_old_redis_iam_cache(self):
        from consoleme.config.config import CONFIG
        from consoleme.lib.redis import RedisHandler

        red = RedisHandler().redis_sync()

        self.celery.REDIS_IAM_COUNT = 3

        # Clear out the existing cache from Redis:
        red.delete("test_cache_roles_for_account_expiration")

        # Set the config value for the redis cache location
        old_value = CONFIG.config["aws"].pop("iamroles_redis_key", None)
        CONFIG.config["aws"][
            "iamroles_redis_key"
        ] = "test_cache_roles_for_account_expiration"

        # Add in some dummy IAM roles with a TTL that is more than 6 hours old:
        old_ttl = int((datetime.utcnow() - timedelta(hours=6, seconds=5)).timestamp())

        # 13 items / 3 = 5 iterations -- all of these roles should be cleaned up:
        for i in range(0, 13):
            role_entry = {
                "arn": f"arn:aws:iam::123456789012:role/RoleNumber{i}",
                "name": f"RoleNumber{i}",
                "accountId": "123456789012",
                "ttl": old_ttl,
                "policy": "{}",
            }
            self.celery._add_role_to_redis(
                "test_cache_roles_for_account_expiration", role_entry
            )

        # Add a role with a current TTL -- this should not be cleaned up:
        role_entry = {
            "arn": "arn:aws:iam::123456789012:role/RoleNumber99",
            "name": "RoleNumber99",
            "accountId": "123456789012",
            "ttl": int(datetime.utcnow().timestamp()),
            "policy": "{}",
        }
        self.celery._add_role_to_redis(
            "test_cache_roles_for_account_expiration", role_entry
        )

        # Nothing should happen if we are not in us-west-2:
        old_conf_region = self.celery.config.region
        self.celery.config.region = "us-east-1"

        self.celery.clear_old_redis_iam_cache()
        self.assertEqual(red.hlen("test_cache_roles_for_account_expiration"), 14)

        # With the proper region:
        self.celery.config.region = "us-west-2"
        self.celery.clear_old_redis_iam_cache()

        # Verify:
        self.assertEqual(red.hlen("test_cache_roles_for_account_expiration"), 1)
        self.assertIsNotNone(
            red.hget(
                "test_cache_roles_for_account_expiration",
                "arn:aws:iam::123456789012:role/RoleNumber99",
            )
        )

        # Clear out the existing cache from Redis:
        red.delete("test_cache_roles_for_account_expiration")

        # Reset the config values:
        self.celery.config.region = old_conf_region
        self.celery.REDIS_IAM_COUNT = 1000
        if not old_value:
            del CONFIG.config["aws"]["iamroles_redis_key"]
        else:
            CONFIG.config["aws"]["iamroles_redis_key"] = old_value
Beispiel #18
0
    def test_cache_roles_for_account(self):
        from consoleme.config.config import CONFIG
        from consoleme.lib.dynamo import IAMRoleDynamoHandler
        from consoleme.lib.redis import RedisHandler

        red = RedisHandler().redis_sync()

        # Set the config value for the redis cache location
        old_value = CONFIG.config["aws"].pop("iamroles_redis_key", None)
        CONFIG.config["aws"]["iamroles_redis_key"] = "test_cache_roles_for_account"
        # Clear out the existing cache from Redis:
        red.delete("test_cache_roles_for_account")
        # Run it:
        self.celery.cache_roles_for_account("123456789012")

        # Verify that everything is there:
        dynamo = IAMRoleDynamoHandler()

        results = dynamo.role_table.scan(TableName="consoleme_iamroles_global")

        remaining_roles = [
            "arn:aws:iam::123456789012:role/ConsoleMe",
            "arn:aws:iam::123456789012:role/cm_someuser_N",
            "arn:aws:iam::123456789012:role/awsaccount_user",
            "arn:aws:iam::123456789012:role/TestInstanceProfile",
            "arn:aws:iam::123456789012:role/rolename",
        ] + [f"arn:aws:iam::123456789012:role/RoleNumber{num}" for num in range(0, 10)]

        self.assertEqual(results["Count"], len(remaining_roles))
        self.assertEqual(results["Count"], red.hlen("test_cache_roles_for_account"))

        for i in results["Items"]:
            remaining_roles.remove(i["arn"])
            self.assertEqual(i["accountId"], "123456789012")
            self.assertGreater(int(i["ttl"]), 0)
            self.assertIsNotNone(json.loads(i["policy"]))
            self.assertEqual(
                json.loads(red.hget("test_cache_roles_for_account", i["arn"]))[
                    "policy"
                ],
                i["policy"],
            )

        # Should all be accounted for:
        self.assertEqual(remaining_roles, [])

        # We should have the same data in redis on all regions, this time coming from DDB
        old_conf_region = self.celery.config.region
        self.celery.config.region = "us-east-1"

        # Clear out the existing cache from Redis:
        red.delete("test_cache_roles_for_account")

        # This should spin off extra fake celery tasks
        res = self.celery.cache_roles_across_accounts()
        self.assertEqual(
            res,
            {
                "function": "consoleme.celery_tasks.celery_tasks.cache_roles_across_accounts",
                "cache_key": "test_cache_roles_for_account",
                "num_roles": 0,
                "num_accounts": 1,
            },
        )

        # Reset the config value:
        self.celery.config.region = old_conf_region
        if not old_value:
            del CONFIG.config["aws"]["iamroles_redis_key"]
        else:
            CONFIG.config["aws"]["iamroles_redis_key"] = old_value