def configure(cls, settings):
        kwargs = super(AWSSecretsManagerAccessBackend, cls).configure(settings)
        kwargs["secret_id"] = settings["auth.secret_id"]
        kwargs["kms_key_id"] = settings.get("auth.kms_key_id")
        session = boto3.session.Session(**get_settings(
            settings,
            "auth.",
            region_name=str,
            aws_access_key_id=str,
            aws_secret_access_key=str,
            aws_session_token=str,
            profile_name=str,
        ))
        kwargs["client"] = session.client("secretsmanager")

        return kwargs
Exemple #2
0
    def configure(cls, settings):
        kwargs = super(AWSSecretsManagerAccessBackend, cls).configure(settings)
        kwargs['secret_id'] = settings['auth.secret_id']
        kwargs['kms_key_id'] = settings.get('auth.kms_key_id')

        kwargs['client'] = boto3.client(
            'secretsmanager',
            **get_settings(
                settings,
                'auth.',
                region_name=str,
                aws_access_key_id=str,
                aws_secret_access_key=str,
                aws_session_token=str,
                profile_name=str,
            ))

        return kwargs
Exemple #3
0
    def get_bucket(cls, bucket_name, settings):
        config_settings = get_settings(
            settings,
            "storage.",
            region_name=str,
            signature_version=str,
            user_agent=str,
            user_agent_extra=str,
            connect_timeout=int,
            read_timeout=int,
            parameter_validation=asbool,
            max_pool_connections=int,
            proxies=asdict,
        )
        config_settings["s3"] = get_settings(
            settings,
            "storage.",
            use_accelerate_endpoint=asbool,
            payload_signing_enabled=asbool,
            addressing_style=str,
            signature_version=str,
        )
        config = Config(**config_settings)

        def verify_value(val):
            """ Verify can be a boolean (False) or a string """
            s = str(val).strip().lower()
            if s in falsey:
                return False
            else:
                return str(val)

        s3conn = boto3.resource("s3",
                                config=config,
                                **get_settings(
                                    settings,
                                    "storage.",
                                    region_name=str,
                                    api_version=str,
                                    use_ssl=asbool,
                                    verify=verify_value,
                                    endpoint_url=str,
                                    aws_access_key_id=str,
                                    aws_secret_access_key=str,
                                    aws_session_token=str,
                                ))

        bucket = s3conn.Bucket(bucket_name)
        try:
            head = s3conn.meta.client.head_bucket(Bucket=bucket_name)
        except ClientError as e:
            if e.response["Error"]["Code"] == "404":
                LOG.info("Creating S3 bucket %s", bucket_name)

                if config.region_name:
                    location = {"LocationConstraint": config.region_name}
                    bucket.create(CreateBucketConfiguration=location)
                else:
                    bucket.create()

                bucket.wait_until_exists()
            else:
                if e.response["Error"]["Code"] == "301":
                    LOG.error("Bucket found in different region. Check that "
                              "the S3 bucket specified in 'storage.bucket' is "
                              "in 'storage.region_name'")
                raise
        return bucket
Exemple #4
0
    def configure(cls, settings):
        kwargs = super(S3Storage, cls).configure(settings)
        kwargs['expire_after'] = int(
            settings.get('storage.expire_after', 60 * 60 * 24))
        kwargs['bucket_prefix'] = settings.get('storage.prefix', '')
        kwargs['prepend_hash'] = asbool(
            settings.get('storage.prepend_hash', True))
        kwargs['sse'] = sse = settings.get('storage.server_side_encryption')
        if sse not in [None, 'AES256', 'aws:kms']:
            LOG.warn(
                "Unrecognized value %r for 'storage.sse'. See "
                "https://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Object.put "
                "for more details", sse)
        kwargs['object_acl'] = settings.get('storage.object_acl', None)
        kwargs['redirect_urls'] = asbool(
            settings.get('storage.redirect_urls', False))

        config_settings = get_settings(
            settings,
            'storage.',
            region_name=str,
            signature_version=str,
            user_agent=str,
            user_agent_extra=str,
            connect_timeout=int,
            read_timeout=int,
            parameter_validation=asbool,
            max_pool_connections=int,
            proxies=asdict,
        )
        config_settings['s3'] = get_settings(
            settings,
            'storage.',
            use_accelerate_endpoint=asbool,
            payload_signing_enabled=asbool,
            addressing_style=str,
        )
        config = Config(**config_settings)

        def verify_value(val):
            """ Verify can be a boolean (False) or a string """
            s = str(val).strip().lower()
            if s in falsey:
                return False
            else:
                return str(val)

        s3conn = boto3.resource('s3',
                                config=config,
                                **get_settings(
                                    settings,
                                    'storage.',
                                    region_name=str,
                                    api_version=str,
                                    use_ssl=asbool,
                                    verify=verify_value,
                                    endpoint_url=str,
                                    aws_access_key_id=str,
                                    aws_secret_access_key=str,
                                    aws_session_token=str,
                                ))

        bucket_name = settings.get('storage.bucket')
        if bucket_name is None:
            raise ValueError("You must specify the 'storage.bucket'")
        bucket = s3conn.Bucket(bucket_name)
        try:
            bucket.load()
        except ClientError as e:
            if e.response['Error']['Code'] == '404':
                LOG.info("Creating S3 bucket %s", bucket_name)
                bucket.create()
                bucket.wait_until_exists()
            else:
                if e.response['Error']['Code'] == '301':
                    LOG.warn("Bucket found in different region. Check that "
                             "the S3 bucket specified in 'storage.bucket' is "
                             "in 'storage.region_name'")
                raise
        kwargs['region_name'] = config_settings.get('region_name')
        kwargs['bucket'] = bucket
        return kwargs
Exemple #5
0
    def configure(cls, settings):
        kwargs = super(OpenStackSwiftStorage, cls).configure(settings)

        # noinspection PyTypeChecker
        config = get_settings(settings,
                              "storage.",
                              auth_url=str,
                              auth_version=str,
                              password=str,
                              username=str,
                              user_id=str,
                              tenant_name=str,
                              tenant_id=str,
                              project_name=str,
                              project_id=str,
                              user_domain_name=str,
                              user_domain_id=str,
                              project_domain_name=str,
                              project_domain_id=str,
                              endpoint_type=str,
                              region_name=str,
                              auth_token=str,
                              storage_url=str,
                              storage_policy=str,
                              container=str)

        options = {
            'authurl': config.get('auth_url'),
            'auth_version': config.get('auth_version', None),
            'user': config.get('username'),
            'key': config.get('password'),
            'preauthtoken': config.get('auth_token', None),
            'preauthurl': config.get('storage_url', None),
            'os_options': {
                'username': config.get('username', None),
                'user_id': config.get('user_id', None),
                'user_domain_name': config.get('user_domain_name', None),
                'user_domain_id': config.get('user_domain_id', None),
                'project_domain_name': config.get('project_domain_name', None),
                'project_domain_id': config.get('project_domain_id', None),
                'tenant_id': config.get('tenant_id', None),
                'tenant_name': config.get('tenant_name', None),
                'project_id': config.get('project_id', None),
                'project_name': config.get('project_name', None),
                'endpoint_type': config.get('endpoint_type', None),
                'region_name': config.get('region_name', None),
            },
            'force_auth_retry': True
        }

        client = Connection(**options)
        container = config.get('container')
        storage_policy = config.get('storage_policy', None)

        if storage_policy:
            try:
                caps = client.get_capabilities()
                LOG.debug('Swift capabilities: %s', caps)
            except ClientException as e:
                LOG.warning("Can't get swift capabilities: %s", e)
            else:
                policies = set()
                for policy in caps.get('swift', {}).get('policies', []):
                    policies.add(policy.get('name', '').lower())
                    for alias in policy.get('aliases', '').split(','):
                        policies.add(alias.strip().lower())
                if policies and storage_policy.lower() not in policies:
                    kwargs['storage_policy'] = storage_policy

        try:
            headers = client.head_container(container,
                                            headers={'user-agent': USER_AGENT})
            LOG.info('Container exist: object_count = %s, bytes_used = %s',
                     headers['x-container-object-count'],
                     headers['x-container-bytes-used'])
        except ClientException as e:
            if e.http_status != 404:
                LOG.error('Failed to check container existence "%s": %s',
                          container, e)
                raise
            create_container(client, container, storage_policy)

        kwargs['client'] = client
        kwargs['container'] = container
        return kwargs
Exemple #6
0
    def get_bucket(cls, bucket_name, settings):
        config_settings = get_settings(
            settings,
            "storage.",
            region_name=str,
            signature_version=str,
            user_agent=str,
            user_agent_extra=str,
            connect_timeout=int,
            read_timeout=int,
            parameter_validation=asbool,
            max_pool_connections=int,
            proxies=asdict,
        )
        config_settings["s3"] = get_settings(
            settings,
            "storage.",
            use_accelerate_endpoint=asbool,
            payload_signing_enabled=asbool,
            addressing_style=str,
        )
        config = Config(**config_settings)

        def verify_value(val):
            """ Verify can be a boolean (False) or a string """
            s = str(val).strip().lower()
            if s in falsey:
                return False
            else:
                return str(val)

        s3conn = boto3.resource(
            "s3",
            config=config,
            **get_settings(
                settings,
                "storage.",
                region_name=str,
                api_version=str,
                use_ssl=asbool,
                verify=verify_value,
                endpoint_url=str,
                aws_access_key_id=str,
                aws_secret_access_key=str,
                aws_session_token=str,
            )
        )

        bucket = s3conn.Bucket(bucket_name)
        try:
            head = s3conn.meta.client.head_bucket(Bucket=bucket_name)
        except ClientError as e:
            if e.response["Error"]["Code"] == "404":
                LOG.info("Creating S3 bucket %s", bucket_name)
                bucket.create()
                bucket.wait_until_exists()
            else:
                if e.response["Error"]["Code"] == "301":
                    LOG.error(
                        "Bucket found in different region. Check that "
                        "the S3 bucket specified in 'storage.bucket' is "
                        "in 'storage.region_name'"
                    )
                raise
        return bucket