def setUp(self): """Establish a clean test environment.""" super(TestStore, self).setUp() self.store = s3.Store(self.conf) self.config(**S3_CONF) self.store.configure() self.register_store_schemes(self.store, 's3') self.hash_algo = 'sha256'
def _option_required(self, key): conf = S3_CONF.copy() conf[key] = None try: self.config(**conf) self.store = s3.Store(self.conf) self.store.configure() return not self.store.is_capable( capabilities.BitMasks.WRITE_ACCESS) except Exception: return False
def _option_required(self, key): conf = S3_CONF.copy() conf[key] = None try: self.config(**conf) self.store = s3.Store(self.conf) self.store.configure() return self.store.add == self.store.add_disabled except Exception: return False return False
def test_add_different_backend(self, mock_client): self.store = s3.Store(self.conf, backend="s3_region2") self.store.configure() self.register_store_backend_schemes(self.store, 's3', 's3_region2') expected_image_id = str(uuid.uuid4()) expected_s3_size = FIVE_KB expected_s3_contents = b"*" * expected_s3_size expected_checksum = md5(expected_s3_contents, usedforsecurity=False).hexdigest() expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest() expected_location = format_s3_location(S3_CONF['s3_store_access_key'], S3_CONF['s3_store_secret_key'], 'http://s3-region2.com', S3_CONF['s3_store_bucket'], expected_image_id) image_s3 = six.BytesIO(expected_s3_contents) fake_s3_client = botocore.session.get_session().create_client('s3') with stub.Stubber(fake_s3_client) as stubber: stubber.add_response( method='head_bucket', service_response={}, expected_params={'Bucket': S3_CONF['s3_store_bucket']}) stubber.add_client_error(method='head_object', service_error_code='404', service_message='', expected_params={ 'Bucket': S3_CONF['s3_store_bucket'], 'Key': expected_image_id }) stubber.add_response(method='put_object', service_response={}, expected_params={ 'Bucket': S3_CONF['s3_store_bucket'], 'Key': expected_image_id, 'Body': botocore.stub.ANY }) mock_client.return_value = fake_s3_client loc, size, checksum, multihash, metadata = \ self.store.add(expected_image_id, image_s3, expected_s3_size, self.hash_algo) self.assertEqual("s3_region2", metadata["store"]) self.assertEqual(expected_location, loc) self.assertEqual(expected_s3_size, size) self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_multihash, multihash)
def setUp(self): """Establish a clean test environment.""" super(TestStore, self).setUp() self.store = s3.Store(self.conf) self.config(**S3_CONF) self.store.configure() self.register_store_schemes(self.store, 's3') fctor, fbucket = fakers() init = mock.patch.object(boto.s3.connection.S3Connection, '__init__').start() init.side_effect = fctor self.addCleanup(init.stop) bucket = mock.patch.object(boto.s3.connection.S3Connection, 'get_bucket').start() bucket.side_effect = fbucket self.addCleanup(bucket.stop)
def test_add_host_variations(self): """ Test that having http(s):// in the s3serviceurl in config options works as expected. """ variations = [ 'http://localhost:80', 'http://localhost', 'http://localhost/v1', 'http://localhost/v1/', 'https://localhost', 'https://localhost:8080', 'https://localhost/v1', 'https://localhost/v1/', 'localhost', 'localhost:8080/v1' ] for variation in variations: expected_image_id = str(uuid.uuid4()) expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = hashlib.md5(expected_s3_contents).hexdigest() new_conf = S3_CONF.copy() new_conf['s3_store_host'] = variation expected_location = format_s3_location( new_conf['s3_store_access_key'], new_conf['s3_store_secret_key'], new_conf['s3_store_host'], new_conf['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) self.config(**new_conf) self.store = s3.Store(self.conf) self.store.configure() location, size, checksum, _ = self.store.add( expected_image_id, image_s3, expected_s3_size) self.assertEqual(expected_location, location) self.assertEqual(expected_s3_size, size) self.assertEqual(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = new_image_s3.getvalue() new_image_s3_size = len(new_image_s3) self.assertEqual(expected_s3_contents, new_image_contents) self.assertEqual(expected_s3_size, new_image_s3_size)
def setUp(self): """Establish a clean test environment.""" super(TestMultiS3Store, self).setUp() enabled_backends = {"s3_region1": "s3", "s3_region2": "s3"} self.hash_algo = 'sha256' self.conf = self._CONF self.conf(args=[]) self.conf.register_opt(cfg.DictOpt('enabled_backends')) self.config(enabled_backends=enabled_backends) store.register_store_opts(self.conf) self.config(default_backend='s3_region1', group='glance_store') # set s3 related config options self.config(group='s3_region1', s3_store_access_key='user', s3_store_secret_key='key', s3_store_host='https://s3-region1.com', s3_store_bucket='glance', s3_store_large_object_size=5, s3_store_large_object_chunk_size=6) self.config(group='s3_region2', s3_store_access_key='user', s3_store_secret_key='key', s3_store_host='http://s3-region2.com', s3_store_bucket='glance', s3_store_large_object_size=5, s3_store_large_object_chunk_size=6) # Ensure stores + locations cleared location.SCHEME_TO_CLS_BACKEND_MAP = {} store.create_multi_stores(self.conf) self.addCleanup(setattr, location, 'SCHEME_TO_CLS_BACKEND_MAP', dict()) self.addCleanup(self.conf.reset) self.store = s3.Store(self.conf, backend="s3_region1") self.store.configure() self.register_store_backend_schemes(self.store, 's3', 's3_region1')