def test_dlo_referer_on_segment_container(self): if 'username3' not in tf.config: raise tf.SkipTest('Requires user 3') # First the account2 (test3) should fail config2 = tf.config.copy() config2['username'] = tf.config['username3'] config2['password'] = tf.config['password3'] conn2 = Connection(config2) conn2.authenticate() headers = { 'X-Auth-Token': conn2.storage_token, 'Referer': 'http://blah.example.com' } dlo_file = self.env.container.file("mancont2") self.assertRaises(ResponseError, dlo_file.read, hdrs=headers) self.assert_status(403) # Now set the referer on the dlo container only referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'} self.env.container.update_metadata(referer_metadata) self.assertRaises(ResponseError, dlo_file.read, hdrs=headers) self.assert_status(403) # Finally set the referer on the segment container self.env.container2.update_metadata(referer_metadata) contents = dlo_file.read(hdrs=headers) self.assertEqual( contents, b"ffffffffffgggggggggghhhhhhhhhhiiiiiiiiiijjjjjjjjjj")
def setUp(self): super(TestS3ApiMultiUpload, self).setUp() if not tf.cluster_info['s3api'].get('allow_multipart_uploads', False): raise tf.SkipTest('multipart upload is not enebled') self.min_segment_size = int(tf.cluster_info['s3api'].get( 'min_segment_size', 5242880))
def test_bucket_listing_with_staticweb(self): if 'staticweb' not in tf.cluster_info: raise tf.SkipTest('Staticweb not enabled') bucket = 'bucket' resp = self.conn.create_bucket(Bucket=bucket) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) resp = self.conn.list_objects(Bucket=bucket) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode']) # enable staticweb listings; make publicly-readable conn = Connection(tf.config) conn.authenticate() post_status = conn.make_request('POST', [bucket], hdrs={ 'X-Container-Read': '.r:*,.rlistings', 'X-Container-Meta-Web-Listings': 'true', }) self.assertEqual(post_status, 204) resp = self.conn.list_objects(Bucket=bucket) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
def setUp(self): super(TestS3ApiVersioning, self).setUp() if 'object_versioning' not in tf.cluster_info: # Alternatively, maybe we should assert we get 501s... raise tf.SkipTest('S3 versioning requires that Swift object ' 'versioning be enabled') status, headers, body = self.conn.make_request('PUT', 'bucket') self.assertEqual(status, 200)
def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') try: self.conn = Connection() self.conn.reset() except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message)
def setUp(self): super(TestS3Acl, self).setUp() self.bucket = 'bucket' self.obj = 'object' if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'TestS3Acl requires s3_access_key2 and s3_secret_key2 setting') self.conn.make_request('PUT', self.bucket) access_key2 = tf.config['s3_access_key2'] secret_key2 = tf.config['s3_secret_key2'] self.conn2 = Connection(access_key2, secret_key2, access_key2)
def setUp(self): super(TestS3Acl, self).setUp() self.bucket = 'bucket' self.obj = 'object' if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest( 'TestS3Acl requires s3_access_key3 and s3_secret_key3 ' 'configured for reduced-access user') self.conn.make_request('PUT', self.bucket) access_key3 = tf.config['s3_access_key3'] secret_key3 = tf.config['s3_secret_key3'] self.conn3 = Connection(access_key3, secret_key3, access_key3)
def test_put_bucket_error(self): status, headers, body = \ self.conn.make_request('PUT', 'bucket+invalid') self.assertEqual(get_error_code(body), 'InvalidBucketName') auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = auth_error_conn.make_request('PUT', 'bucket') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.conn.make_request('PUT', 'bucket') status, headers, body = self.conn.make_request('PUT', 'bucket') self.assertEqual(status, 409) self.assertEqual(get_error_code(body), 'BucketAlreadyExists') if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'Cannot test for BucketAlreadyExists with second user; need ' 's3_access_key2 and s3_secret_key2 configured') # Other users of the same account get the same error conn2 = Connection(tf.config['s3_access_key2'], tf.config['s3_secret_key2'], tf.config['s3_access_key2']) status, headers, body = conn2.make_request('PUT', 'bucket') self.assertEqual(status, 409) self.assertEqual(get_error_code(body), 'BucketAlreadyExists') if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest('Cannot test for AccessDenied; need ' 's3_access_key3 and s3_secret_key3 configured') # If the user can't create buckets, they shouldn't even know # whether the bucket exists. conn3 = Connection(tf.config['s3_access_key3'], tf.config['s3_secret_key3'], tf.config['s3_access_key3']) status, headers, body = conn3.make_request('PUT', 'bucket') self.assertEqual(status, 403) self.assertEqual(get_error_code(body), 'AccessDenied')
def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') try: self.conn = get_boto3_conn() self.endpoint_url = self.conn._endpoint.host self.access_key = self.conn._request_signer._credentials.access_key self.region = self.conn._client_config.region_name tear_down_s3(self.conn) except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message)
def test_put_bucket_error_key3(self): if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest('Cannot test for AccessDenied; need ' 's3_access_key3 and s3_secret_key3 configured') self.conn.make_request('PUT', 'bucket') # If the user can't create buckets, they shouldn't even know # whether the bucket exists. conn3 = Connection(tf.config['s3_access_key3'], tf.config['s3_secret_key3'], tf.config['s3_access_key3']) status, headers, body = conn3.make_request('PUT', 'bucket') self.assertEqual(status, 403) self.assertEqual(get_error_code(body), 'AccessDenied')
def test_put_bucket_error_key2(self): if config_true_value(tf.cluster_info['s3api'].get('s3_acl')): if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'Cannot test for BucketAlreadyExists with second user; ' 'need s3_access_key2 and s3_secret_key2 configured') self.conn.make_request('PUT', 'bucket') # Other users of the same account get the same 409 error conn2 = Connection(tf.config['s3_access_key2'], tf.config['s3_secret_key2'], tf.config['s3_access_key2']) status, headers, body = conn2.make_request('PUT', 'bucket') self.assertEqual(status, 409) self.assertEqual(get_error_code(body), 'BucketAlreadyExists')
def test_put_bucket_error_key3(self): if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest('Cannot test for AccessDenied; need ' 's3_access_key3 and s3_secret_key3 configured') self.conn.create_bucket(Bucket='bucket') # If the user can't create buckets, they shouldn't even know # whether the bucket exists. conn3 = get_boto3_conn(tf.config['s3_access_key3'], tf.config['s3_secret_key3']) with self.assertRaises(botocore.exceptions.ClientError) as ctx: conn3.create_bucket(Bucket='bucket') self.assertEqual( ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403) self.assertEqual(ctx.exception.response['Error']['Code'], 'AccessDenied')
def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') if tf.config.get('account'): user_id = '%s:%s' % (tf.config['account'], tf.config['username']) else: user_id = tf.config['username'] try: self.conn = Connection( tf.config['s3_access_key'], tf.config['s3_secret_key'], user_id=user_id) self.conn.reset() except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message)
def test_put_bucket_error_key2(self): if config_true_value(tf.cluster_info['s3api'].get('s3_acl')): if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'Cannot test for BucketAlreadyExists with second user; ' 'need s3_access_key2 and s3_secret_key2 configured') self.conn.create_bucket(Bucket='bucket') # Other users of the same account get the same 409 error conn2 = get_boto3_conn(tf.config['s3_access_key2'], tf.config['s3_secret_key2']) with self.assertRaises(botocore.exceptions.ClientError) as ctx: conn2.create_bucket(Bucket='bucket') self.assertEqual( ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 409) self.assertEqual(ctx.exception.response['Error']['Code'], 'BucketAlreadyExists')