def test_override_settings(self): with override_settings(AWS_LOCATION='foo1'): storage = s3boto3.S3Boto3Storage() self.assertEqual(storage.location, 'foo1') with override_settings(AWS_LOCATION='foo2'): storage = s3boto3.S3Boto3Storage() self.assertEqual(storage.location, 'foo2')
def test_location_leading_slash(self): msg = ( "S3Boto3Storage.location cannot begin with a leading slash. " "Found '/'. Use '' instead." ) with self.assertRaises(ImproperlyConfigured, msg=msg): s3boto3.S3Boto3Storage(location='/')
def __init__(self, _location, params): """ location is not used but otherwise Django crashes. """ BaseCache.__init__(self, params) # Amazon and boto have a maximum limit of 1000 for get_all_keys(). See: # http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html # This implementation of the GET operation returns some or all (up to 1000) # of the objects in a bucket.... if self._max_entries > 1000: self._max_entries = 1000 self._options = params.get('OPTIONS', {}) # backward compatible syntax for s3cache users before v1.2 for easy upgrades # in v1.2 we update to latest django-storages 1.1.8 which changes variable names # in non-backward compatible fashion if 'ACCESS_KEY' not in self._options.keys(): self._options['ACCESS_KEY'] = self._options.get( 'ACCESS_KEY_ID', None) if 'SECRET_KEY' not in self._options.keys(): self._options['SECRET_KEY'] = self._options.get( 'SECRET_ACCESS_KEY', None) if 'BUCKET_NAME' not in self._options.keys(): self._options['BUCKET_NAME'] = self._options.get( 'STORAGE_BUCKET_NAME', None) # we use S3 compatible varibale names while django-storages doesn't _bucket_name = self._options.get('BUCKET_NAME', None) _default_acl = self._options.get('DEFAULT_ACL', 'private') _bucket_acl = self._options.get('BUCKET_ACL', _default_acl) # in case it was not specified in OPTIONS default to 'private' self._options['BUCKET_ACL'] = _bucket_acl self._location = self._options.get('LOCATION', self._options.get('location', '')) # sanitize location by removing leading and traling slashes self._options['LOCATION'] = self._location.strip('/') # S3BotoStorage wants lower case names lowercase_options = [] for name, value in self._options.items(): if value: # skip None values lowercase_options.append((name.lower(), value)) # this avoids RuntimeError: dictionary changed size during iteration # with Python 3 if we assign to the dictionary directly for _n, _v in lowercase_options: self._options[_n] = _v self._storage = s3boto3.S3Boto3Storage(acl=_default_acl, bucket=_bucket_name, **self._options)
def test_deprecated_bucket(self): with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings( record=True) as w: s3boto3.S3Boto3Storage(bucket='django') assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) message = ( "The bucket argument of S3Boto3Storage is deprecated. Use argument " "bucket_name or setting AWS_STORAGE_BUCKET_NAME instead. The bucket " "argument will be removed in version 1.10.") assert str(w[-1].message) == message
def test_deprecated_acl(self): with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings( record=True) as w: s3boto3.S3Boto3Storage(acl='private') assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) message = ( "The acl argument of S3Boto3Storage is deprecated. Use argument " "default_acl or setting AWS_DEFAULT_ACL instead. The acl argument " "will be removed in version 1.10.") assert str(w[-1].message) == message
def test_deprecated_default_acl(self): with warnings.catch_warnings(record=True) as w: s3boto3.S3Boto3Storage() assert len(w) == 1 message = ( "The default behavior of S3Boto3Storage is insecure and will change " "in django-storages 1.10. By default files and new buckets are saved " "with an ACL of 'public-read' (globally publicly readable). Version 1.10 will " "default to using the bucket's ACL. To opt into the new behavior set " "AWS_DEFAULT_ACL = None, otherwise to silence this warning explicitly " "set AWS_DEFAULT_ACL.") assert str(w[-1].message) == message
def test_deprecated_autocreate_bucket(self): with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: s3boto3.S3Boto3Storage(auto_create_bucket=True) assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) message = ( "Automatic bucket creation will be removed in version 1.10. It encourages " "using overly broad credentials with this library. Either create it before " "manually or use one of a myriad of automatic configuration management tools. " "Unset AWS_AUTO_CREATE_BUCKET (it defaults to False) to silence this warning." ) assert str(w[-1].message) == message
def test_override_init_argument(self): storage = s3boto3.S3Boto3Storage(location='foo1') self.assertEqual(storage.location, 'foo1') storage = s3boto3.S3Boto3Storage(location='foo2') self.assertEqual(storage.location, 'foo2')
def setUp(self): self.storage = s3boto3.S3Boto3Storage() self.storage._connections.connection = mock.MagicMock()
def test_s3_session(self): settings.AWS_S3_SESSION_PROFILE = "test_profile" with mock.patch('boto3.Session') as mock_session: storage = s3boto3.S3Boto3Storage() _ = storage.connection mock_session.assert_called_once_with(profile_name="test_profile")