def test_bucket_already_exists(self, conn_class): connection = Mock(name='connnection') connection.create_bucket = Mock( side_effect=S3CreateError(1, None, None)) conn_class.return_value = connection s3 = S3Storage(self.config) self.assertRaises(ImageException, s3.store_image, lambda x: x, 'image_id', None) connection.create_bucket.assert_called_with('my_bucket_name')
def create_bucket(self, bucket_name, headers=None, location='', policy=None): """ Creates a new located bucket. By default it's in the USA. You can pass Location.EU to create an European bucket. :type bucket_name: string :param bucket_name: The name of the new bucket :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type location: :class:`boto.s3.connection.Location` :param location: The location of the new bucket :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. """ if not bucket_name.islower(): raise BotoClientError("Bucket names must be lower case.") if policy: if headers: headers['x-amz-acl'] = policy else: headers = {'x-amz-acl': policy} if location == '': data = '' else: data = '<CreateBucketConstraint><LocationConstraint>' + \ location + '</LocationConstraint></CreateBucketConstraint>' response = self.make_request('PUT', bucket_name, headers=headers, data=data) body = response.read() if response.status == 409: raise S3CreateError(response.status, response.reason, body) if response.status == 200: return Bucket(self, bucket_name) else: raise self.provider.storage_response_error(response.status, response.reason, body)
def test_archive_file_s3_not_available(self, mock_S3_buckup, mock_archive, mock_strftime): mock_strftime.return_value = self.__prefix mock_S3_buckup.side_effect = S3CreateError(403, '') rtn = archive_files(self.__src_dir, 's3_bucket', 1, prefix=self.__prefix, backup_of_backup=self.__backup_dir) self.assertFalse(rtn) arg_list = mock_archive.call_args_list self.assertEqual(0, len(arg_list)) self.assertTrue(os.path.exists(self.__backup_dir)) backup_filenames = [] for dirpath, dirs, files in os.walk(self.__backup_dir): for filename in files: fname = os.path.abspath(os.path.join(dirpath, filename)) backup_filenames.append(fname[len(self.__backup_dir) + 1:]) self.assertEqual(6, len(backup_filenames)) for file in backup_filenames: self.assertIn(file, self.__dest_files)
def create_bucket(self, bucket_name, headers=None, location=Location.DEFAULT, policy=None): """ Creates a new located bucket. By default it's in the USA. You can pass Location.EU to create an European bucket. :type bucket_name: string :param bucket_name: The name of the new bucket :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type location: :class:`boto.s3.connection.Location` :param location: The location of the new bucket :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. """ if policy: if headers: headers[self.provider_headers.acl_header] = policy else: headers = {self.provider_headers.acl_header: policy} if location == Location.DEFAULT: data = '' else: data = '<CreateBucketConstraint><LocationConstraint>' + \ location + '</LocationConstraint></CreateBucketConstraint>' response = self.make_request('PUT', bucket_name, headers=headers, data=data) body = response.read() if response.status == 409: raise S3CreateError(response.status, response.reason, body) if response.status == 200: return self.bucket_class(self, bucket_name) else: raise S3ResponseError(response.status, response.reason, body)
def create_bucket(self, bucket_name, location=Location.DEFAULT, policy=None): """ Creates a new located bucket. By default it's in the USA. You can pass Location.EU to create an European bucket. :type bucket_name: string :param bucket_name: The name of the new bucket :type location: :class:`boto.s3.connection.Location` :param location: The location of the new bucket :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. """ if policy: extra_headers = {'x-amz-acl': policy} else: extra_headers = {} if location == Location.DEFAULT: data = '' else: data = '<CreateBucketConstraint><LocationConstraint>' + \ location + '</LocationConstraint></CreateBucketConstraint>' response = self.make_request('PUT', bucket_name, extra_headers=extra_headers, data=data) body = response.read() if response.status == 409: raise S3CreateError(response.status, response.reason, body) if response.status == 200: return Bucket(self, bucket_name) else: raise S3ResponseError(response.status, response.reason, body)
def test_message_storage_create_error(self): # This test value comes from https://answers.launchpad.net/duplicity/+question/150801 xml = """<?xml version="1.0" encoding="UTF-8"?> <Error> <Code>BucketAlreadyOwnedByYou</Code> <Message>Your previous request to create the named bucket succeeded and you already own it.</Message> <BucketName>cmsbk</BucketName> <RequestId>FF8B86A32CC3FE4F</RequestId> <HostId>6ENGL3DT9f0n7Tkv4qdKIs/uBNCMMA6QUFapw265WmodFDluP57esOOkecp55qhh</HostId> </Error> """ s3ce = S3CreateError('409', 'Conflict', body=xml) self.assertEqual(s3ce.bucket, 'cmsbk') self.assertEqual(s3ce.error_code, 'BucketAlreadyOwnedByYou') self.assertEqual(s3ce.status, '409') self.assertEqual(s3ce.reason, 'Conflict') self.assertEqual(s3ce.error_message, 'Your previous request to create the named bucket succeeded ' 'and you already own it.') self.assertEqual(s3ce.error_message, s3ce.message) self.assertEqual(s3ce.request_id, 'FF8B86A32CC3FE4F')