Esempio n. 1
0
 def test_object_acl(self):
     """Can specify an object ACL for S3 objects"""
     settings = dict(self.settings)
     settings["storage.object_acl"] = "authenticated-read"
     kwargs = S3Storage.configure(settings)
     storage = S3Storage(MagicMock(), **kwargs)
     package = make_package()
     storage.upload(package, BytesIO())
     acl = list(self.bucket.objects.all())[0].Object().Acl()
     self.assertItemsEqual(
         acl.grants,
         [
             {
                 "Grantee": {
                     "Type": "CanonicalUser",
                     "ID": ANY
                 },
                 "Permission": "FULL_CONTROL",
             },
             {
                 "Grantee": {
                     "Type":
                     "Group",
                     "URI":
                     "http://acs.amazonaws.com/groups/global/AuthenticatedUsers",
                 },
                 "Permission": "READ",
             },
         ],
     )
Esempio n. 2
0
 def test_create_bucket(self):
     """ If S3 bucket doesn't exist, create it """
     settings = {
         'storage.bucket': 'new_bucket',
         'storage.region_name': 'us-east-1',
     }
     S3Storage.configure(settings)
     bucket = self.s3.Bucket('new_bucket')
     bucket.load()
Esempio n. 3
0
 def test_create_bucket(self):
     """ If S3 bucket doesn't exist, create it """
     settings = {
         "storage.bucket": "new_bucket",
         "storage.region_name": "us-east-1"
     }
     S3Storage.configure(settings)
     bucket = self.s3.Bucket("new_bucket")
     bucket.load()
Esempio n. 4
0
    def test_create_bucket_eu(self):
        """If S3 bucket doesn't exist, create it"""
        settings = {
            "storage.bucket": "new_bucket",
            "storage.region_name": "eu-central-1",
            "signature_version": "s3v4",
        }
        S3Storage.configure(settings)

        bucket = self.s3.Bucket("new_bucket")
        bucket.load()
Esempio n. 5
0
 def test_storage_class(self):
     """Can specify a storage class for S3 objects"""
     settings = dict(self.settings)
     settings["storage.storage_class"] = "STANDARD_IA"
     kwargs = S3Storage.configure(settings)
     storage = S3Storage(MagicMock(), **kwargs)
     package = make_package()
     storage.upload(package, BytesIO())
     storage_class = list(
         self.bucket.objects.all())[0].Object().storage_class
     self.assertItemsEqual(storage_class, "STANDARD_IA")
Esempio n. 6
0
 def test_create_bucket(self, boto_mock):
     """ If S3 bucket doesn't exist, create it """
     conn = boto_mock.connect_s3()
     conn.lookup.return_value = None
     settings = {
         'aws.bucket': 'new_bucket',
         'aws.region': 'us-east-1',
     }
     S3Storage.configure(settings)
     conn.create_bucket.assert_called_with('new_bucket',
                                           location='us-east-1')
Esempio n. 7
0
 def test_create_bucket(self, boto_mock):
     """ If S3 bucket doesn't exist, create it """
     conn = boto_mock.connect_s3()
     conn.lookup.return_value = None
     settings = {
         'aws.bucket': 'new_bucket',
         'aws.region': 'us-east-1',
     }
     S3Storage.configure(settings)
     conn.create_bucket.assert_called_with('new_bucket',
                                           location='us-east-1')
Esempio n. 8
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         'storage.bucket': 'mybucket',
         'storage.access_key': 'abc',
         'storage.secret_key': 'bcd',
     }
     conn = boto.connect_s3()
     self.bucket = conn.create_bucket('mybucket')
     patch.object(S3Storage, 'test', True).start()
     kwargs = S3Storage.configure(self.settings)
     self.storage = S3Storage(MagicMock(), **kwargs)
Esempio n. 9
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         "storage.bucket": "mybucket",
         "storage.aws_access_key_id": "abc",
         "storage.aws_secret_access_key": "bcd",
     }
     self.s3 = boto3.resource("s3")
     self.bucket = self.s3.create_bucket(Bucket="mybucket")
     patch.object(S3Storage, "test", True).start()
     kwargs = S3Storage.configure(self.settings)
     self.storage = S3Storage(MagicMock(), **kwargs)
Esempio n. 10
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         'storage.bucket': 'mybucket',
         'storage.aws_access_key_id': 'abc',
         'storage.aws_secret_access_key': 'bcd',
     }
     self.s3 = boto3.resource('s3')
     self.bucket = self.s3.create_bucket(Bucket='mybucket')
     patch.object(S3Storage, 'test', True).start()
     kwargs = S3Storage.configure(self.settings)
     self.storage = S3Storage(MagicMock(), **kwargs)
Esempio n. 11
0
    def test_create_bucket(self, boto_mock):
        """ If S3 bucket doesn't exist, create it """
        conn = boto_mock.s3.connect_to_region()
        boto_mock.exception.S3ResponseError = boto.exception.S3ResponseError

        def raise_not_found(*_, **__):
            """ Raise a 'bucket not found' exception """
            e = boto.exception.S3ResponseError(400, "missing")
            e.error_code = "NoSuchBucket"
            raise e

        conn.get_bucket = raise_not_found
        settings = {"storage.bucket": "new_bucket", "storage.region": "us-east-1"}
        S3Storage.configure(settings)
        conn.create_bucket.assert_called_with("new_bucket", location="us-east-1")
Esempio n. 12
0
 def test_object_acl(self):
     """ Can specify an object ACL for S3 objects """
     settings = dict(self.settings)
     settings["storage.object_acl"] = "authenticated-read"
     kwargs = S3Storage.configure(settings)
     storage = S3Storage(MagicMock(), **kwargs)
     package = make_package()
     storage.upload(package, BytesIO())
     acl = list(self.bucket.objects.all())[0].Object().Acl()
     self.assertItemsEqual(
         acl.grants,
         [
             {
                 "Grantee": {"Type": "CanonicalUser", "ID": ANY},
                 "Permission": "FULL_CONTROL",
             },
             {
                 "Grantee": {
                     "Type": "Group",
                     "URI": "http://acs.amazonaws.com/groups/global/AuthenticatedUsers",
                 },
                 "Permission": "READ",
             },
         ],
     )
Esempio n. 13
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     config = MagicMock()
     self.settings = {
         'aws.bucket': 'mybucket',
         'aws.access_key': 'abc',
         'aws.secret_key': 'bcd',
     }
     config.get_settings.return_value = self.settings
     conn = boto.connect_s3()
     self.bucket = conn.create_bucket('mybucket')
     patch.object(S3Storage, 'test', True).start()
     S3Storage.configure(config)
     self.storage = S3Storage(MagicMock())
Esempio n. 14
0
    def test_create_bucket(self, boto_mock):
        """ If S3 bucket doesn't exist, create it """
        conn = boto_mock.s3.connect_to_region()
        boto_mock.exception.S3ResponseError = boto.exception.S3ResponseError

        def raise_not_found(*_, **__):
            """ Raise a 'bucket not found' exception """
            e = boto.exception.S3ResponseError(400, 'missing')
            e.error_code = 'NoSuchBucket'
            raise e
        conn.get_bucket = raise_not_found
        settings = {
            'storage.bucket': 'new_bucket',
            'storage.region': 'us-east-1',
        }
        S3Storage.configure(settings)
        conn.create_bucket.assert_called_with('new_bucket',
                                              location='us-east-1')
Esempio n. 15
0
    def test_create_bucket(self, boto_mock):
        """ If S3 bucket doesn't exist, create it """
        conn = boto_mock.s3.connect_to_region()
        boto_mock.exception.S3ResponseError = boto.exception.S3ResponseError

        def raise_not_found(*_, **__):
            """ Raise a 'bucket not found' exception """
            e = boto.exception.S3ResponseError(400, 'missing')
            e.error_code = 'NoSuchBucket'
            raise e
        conn.get_bucket = raise_not_found
        settings = {
            'storage.bucket': 'new_bucket',
            'storage.region': 'us-east-1',
        }
        S3Storage.configure(settings)
        conn.create_bucket.assert_called_with('new_bucket',
                                              location='us-east-1')
Esempio n. 16
0
 def test_storage_class(self):
     """ Can specify a storage class for S3 objects """
     settings = dict(self.settings)
     settings["storage.storage_class"] = "STANDARD_IA"
     kwargs = S3Storage.configure(settings)
     storage = S3Storage(MagicMock(), **kwargs)
     package = make_package()
     storage.upload(package, BytesIO())
     storage_class = list(self.bucket.objects.all())[0].Object().storage_class
     self.assertItemsEqual(storage_class, "STANDARD_IA")
Esempio n. 17
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {"storage.bucket": "mybucket", "storage.access_key": "abc", "storage.secret_key": "bcd"}
     conn = boto.connect_s3()
     self.bucket = conn.create_bucket("mybucket")
     patch.object(S3Storage, "test", True).start()
     kwargs = S3Storage.configure(self.settings)
     self.storage = S3Storage(MagicMock(), **kwargs)
Esempio n. 18
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         'storage.bucket': 'mybucket',
         'storage.access_key': 'abc',
         'storage.secret_key': 'bcd',
     }
     conn = boto.connect_s3()
     self.bucket = conn.create_bucket('mybucket')
     patch.object(S3Storage, 'test', True).start()
     kwargs = S3Storage.configure(self.settings)
     self.storage = S3Storage(MagicMock(), **kwargs)
Esempio n. 19
0
 def setUp(self):
     super(TestS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         "storage.bucket": "mybucket",
         "storage.aws_access_key_id": "abc",
         "storage.aws_secret_access_key": "bcd",
     }
     self.s3 = boto3.resource("s3")
     self.bucket = self.s3.create_bucket(Bucket="mybucket")
     patch.object(S3Storage, "test", True).start()
     kwargs = S3Storage.configure(self.settings)
     self.storage = S3Storage(MagicMock(), **kwargs)
Esempio n. 20
0
 def test_object_acl(self):
     """ Can specify an object ACL for S3 objects """
     settings = dict(self.settings)
     settings['storage.object_acl'] = 'authenticated-read'
     kwargs = S3Storage.configure(settings)
     storage = S3Storage(MagicMock(), **kwargs)
     package = make_package()
     storage.upload(package, BytesIO())
     acl = list(self.bucket.objects.all())[0].Object().Acl()
     self.assertItemsEqual(acl.grants, [{
         'Grantee': {
             'Type': 'CanonicalUser',
             'ID': ANY
         },
         'Permission': 'FULL_CONTROL'
     }, {
         'Grantee': {
             'Type': 'Group',
             'URI':
             'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
         },
         'Permission': 'READ'
     }])
Esempio n. 21
0
 def test_create_bucket(self):
     """ If S3 bucket doesn't exist, create it """
     settings = {"storage.bucket": "new_bucket", "storage.region_name": "us-east-1"}
     S3Storage.configure(settings)
     bucket = self.s3.Bucket("new_bucket")
     bucket.load()