def test_client_receives_extra_args(self): with mock.patch('boto3.session.Session.resource') as mockresource: S3Storage(*self.cred, endpoint_url='http://somehwere.it', region_name='worlwide') mockresource.assert_called_once_with( 's3', endpoint_url='http://somehwere.it', region_name='worlwide')
def test_storage_class(self): fs_ia = S3Storage(*self.cred, bucket=self.bucket, storage_class='STANDARD_IA') fid = fs_ia.create(FILE_CONTENT) key = self.fs._bucket_driver.get_key(fid) assert key.storage_class == 'STANDARD_IA'
def test_storage_non_ascii_filenames(self): filename = u'些公.pdf' storage = S3Storage(*self.cred, bucket=self.bucket, storage_class='STANDARD_IA') new_file_id = storage.create(FILE_CONTENT, filename=filename, content_type='application/pdf') assert new_file_id is not None
def test_bucket_failure(self): from botocore.exceptions import ClientError def mock_make_api_call(_, operation_name, kwarg): if operation_name == 'ListBuckets': raise ClientError(error_response={'Error': {'Code': 500}}, operation_name=operation_name) try: with mock.patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): S3Storage(*self.cred) except ClientError: pass else: assert False, 'Should have reraised ClientError'
def setup(self): try: global S3Storage from depot.io.boto3 import S3Storage except ImportError: raise SkipTest('Boto not installed') from botocore.exceptions import ClientError env = os.environ access_key_id = env.get('AWS_ACCESS_KEY_ID') secret_access_key = env.get('AWS_SECRET_ACCESS_KEY') if access_key_id is None or secret_access_key is None: raise SkipTest('Amazon S3 credentials not available') self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), ) self.cred = (access_key_id, secret_access_key) self.fs = S3Storage(access_key_id, secret_access_key, 'filedepot-testfs-%s' % self.run_id)
def test_get_key_failure(self): from botocore.exceptions import ClientError from botocore.client import BaseClient make_api_call = BaseClient._make_api_call def mock_make_api_call(cli, operation_name, kwarg): if operation_name == 'HeadObject': raise ClientError(error_response={'Error': {'Code': 500}}, operation_name=operation_name) return make_api_call(cli, operation_name, kwarg) fs = S3Storage(*self.cred) try: with mock.patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): fs.get(uuid.uuid1()) except ClientError: pass else: assert False, 'Should have reraised ClientError'
def test_creates_bucket_when_missing(self): created_buckets = [] def mock_make_api_call(_, operation_name, kwarg): if operation_name == 'ListBuckets': return {'Buckets': []} elif operation_name == 'CreateBucket': created_buckets.append(kwarg['Bucket']) return None elif operation_name == 'HeadBucket': if kwarg['Bucket'] in created_buckets: return {'ResponseMetadata': {'HTTPStatusCode': 200}} else: return {} else: assert False, 'Unexpected Call' with mock.patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): S3Storage(*self.cred) assert created_buckets == [self.default_bucket_name]
def setupClass(self): # Travis runs multiple tests concurrently on fake machines that might # collide on pid and hostid, so use an uuid1 which should be fairly random # thanks to clock_seq self.run_id = '%s-%s' % (uuid.uuid1().hex, os.getpid()) try: global S3Storage from depot.io.boto3 import S3Storage except ImportError: raise SkipTest('Boto not installed') from botocore.exceptions import ClientError env = os.environ access_key_id = env.get('AWS_ACCESS_KEY_ID') secret_access_key = env.get('AWS_SECRET_ACCESS_KEY') if access_key_id is None or secret_access_key is None: raise SkipTest('Amazon S3 credentials not available') self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), ) self.cred = (access_key_id, secret_access_key) self.bucket = 'filedepot-testfs-%s' % self.run_id self.fs = S3Storage(*self.cred, bucket=self.bucket)