def test_get_non_existing(self): """ Test that trying to retrieve a s3 that doesn't exist raises an error """ uri = "s3://user:key@auth_address/badbucket/%s" % FAKE_UUID loc = get_location_from_uri(uri) self.assertRaises(exception.NotFound, self.store.get, loc) uri = "s3://user:key@auth_address/tank/noexist" loc = get_location_from_uri(uri) self.assertRaises(exception.NotFound, self.store.get, loc)
def test_add(self): """Test that we can add an image via the filesystem backend""" ChunkedFile.CHUNKSIZE = 1024 expected_image_id = utils.generate_uuid() expected_file_size = 1024 * 5 # 5K expected_file_contents = "*" * expected_file_size expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_location = "file://%s/%s" % (stubs.FAKE_FILESYSTEM_ROOTDIR, expected_image_id) image_file = StringIO.StringIO(expected_file_contents) location, size, checksum = self.store.add(expected_image_id, image_file, expected_file_size) self.assertEquals(expected_location, location) self.assertEquals(expected_file_size, size) self.assertEquals(expected_checksum, checksum) uri = "file:///tmp/tank-tests/%s" % expected_image_id loc = get_location_from_uri(uri) (new_image_file, new_image_size) = self.store.get(loc) new_image_contents = "" new_image_file_size = 0 for chunk in new_image_file: new_image_file_size += len(chunk) new_image_contents += chunk self.assertEquals(expected_file_contents, new_image_contents) self.assertEquals(expected_file_size, new_image_file_size)
def test_add(self): """Test that we can add an image via the s3 backend""" expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = hashlib.md5(expected_s3_contents).hexdigest() expected_location = format_s3_location( S3_CONF['s3_store_access_key'], S3_CONF['s3_store_secret_key'], S3_CONF['s3_store_host'], S3_CONF['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = StringIO.StringIO() for chunk in new_image_s3: new_image_contents.write(chunk) new_image_s3_size = new_image_contents.len self.assertEquals(expected_s3_contents, new_image_contents.getvalue()) self.assertEquals(expected_s3_size, new_image_s3_size)
def get_from_backend(uri, **kwargs): """Yields chunks of data from backend specified by uri""" store = get_store_from_uri(uri) loc = location.get_location_from_uri(uri) return store.get(loc)
def test_get_non_existing(self): """ Test that trying to retrieve a file that doesn't exist raises an error """ loc = get_location_from_uri("file:///tmp/tank-tests/non-existing") self.assertRaises(exception.NotFound, self.store.get, loc)
def test_add(self): """Test that we can add an image via the s3 backend""" expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = hashlib.md5(expected_s3_contents).hexdigest() expected_location = format_s3_location(S3_CONF['s3_store_access_key'], S3_CONF['s3_store_secret_key'], S3_CONF['s3_store_host'], S3_CONF['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = StringIO.StringIO() for chunk in new_image_s3: new_image_contents.write(chunk) new_image_s3_size = new_image_contents.len self.assertEquals(expected_s3_contents, new_image_contents.getvalue()) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_http_delete_raise_error(self): uri = "https://netloc/path/to/file.tar.gz" loc = get_location_from_uri(uri) self.assertRaises(NotImplementedError, self.store.delete, loc) create_stores(utils.TestConfigOpts({})) self.assertRaises(exception.StoreDeleteNotSupported, delete_from_backend, uri)
def test_delete_non_existing(self): """ Test that trying to delete a s3 that doesn't exist raises an error """ uri = "s3://user:key@auth_address/tank/noexist" loc = get_location_from_uri(uri) self.assertRaises(exception.NotFound, self.store.delete, loc)
def test_delete(self): """ Test we can delete an existing image in the s3 store """ uri = "s3://user:key@auth_address/tank/%s" % FAKE_UUID loc = get_location_from_uri(uri) self.store.delete(loc) self.assertRaises(exception.NotFound, self.store.get, loc)
def test_delete(self): """ Test we can delete an existing image in the filesystem store """ uri = "file:///tmp/tank-tests/2" loc = get_location_from_uri(uri) self.store.delete(loc) self.assertRaises(exception.NotFound, self.store.get, loc)
def delete_from_backend(uri, **kwargs): """Removes chunks of data from backend specified by uri""" store = get_store_from_uri(uri) loc = location.get_location_from_uri(uri) try: return store.delete(loc) except NotImplementedError: raise exception.StoreDeleteNotSupported
def test_https_get(self): uri = "https://netloc/path/to/file.tar.gz" expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s', 'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n'] loc = get_location_from_uri(uri) (image_file, image_size) = self.store.get(loc) self.assertEqual(image_size, 31) chunks = [c for c in image_file] self.assertEqual(chunks, expected_returns)
def get_store_from_location(uri): """ Given a location (assumed to be a URL), attempt to determine the store from the location. We use here a simple guess that the scheme of the parsed URL is the store... :param uri: Location to check for the store """ loc = location.get_location_from_uri(uri) return loc.store_name
def test_https_get(self): uri = "https://netloc/path/to/file.tar.gz" expected_returns = [ 'I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s', 'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n' ] loc = get_location_from_uri(uri) (image_file, image_size) = self.store.get(loc) self.assertEqual(image_size, 31) chunks = [c for c in image_file] self.assertEqual(chunks, expected_returns)
def test_get(self): """Test a "normal" retrieval of an image in chunks""" loc = get_location_from_uri( "s3://user:key@auth_address/tank/%s" % FAKE_UUID) (image_s3, image_size) = self.store.get(loc) self.assertEqual(image_size, FIVE_KB) expected_data = "*" * FIVE_KB data = "" for chunk in image_s3: data += chunk self.assertEqual(expected_data, data)
def test_get(self): """Test a "normal" retrieval of an image in chunks""" loc = get_location_from_uri("s3://user:key@auth_address/tank/%s" % FAKE_UUID) (image_s3, image_size) = self.store.get(loc) self.assertEqual(image_size, FIVE_KB) expected_data = "*" * FIVE_KB data = "" for chunk in image_s3: data += chunk self.assertEqual(expected_data, data)
def test_get(self): """Test a "normal" retrieval of an image in chunks""" uri = "file:///tmp/tank-tests/2" loc = get_location_from_uri(uri) (image_file, image_size) = self.store.get(loc) expected_data = "chunk00000remainder" expected_num_chunks = 2 data = "" num_chunks = 0 for chunk in image_file: num_chunks += 1 data += chunk self.assertEqual(expected_data, data) self.assertEqual(expected_num_chunks, num_chunks)
def test_add_host_variations(self): """ Test that having http(s):// in the s3serviceurl in config options works as expected. """ variations = ['http://localhost:80', 'http://localhost', 'http://localhost/v1', 'http://localhost/v1/', 'https://localhost', 'https://localhost:8080', 'https://localhost/v1', 'https://localhost/v1/', 'localhost', 'localhost:8080/v1'] for variation in variations: expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = \ hashlib.md5(expected_s3_contents).hexdigest() new_conf = S3_CONF.copy() new_conf['s3_store_host'] = variation expected_location = format_s3_location( new_conf['s3_store_access_key'], new_conf['s3_store_secret_key'], new_conf['s3_store_host'], new_conf['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) self.store = Store(test_utils.TestConfigOpts(new_conf)) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = new_image_s3.getvalue() new_image_s3_size = new_image_s3.len self.assertEquals(expected_s3_contents, new_image_contents) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_add_host_variations(self): """ Test that having http(s):// in the s3serviceurl in config options works as expected. """ variations = [ 'http://localhost:80', 'http://localhost', 'http://localhost/v1', 'http://localhost/v1/', 'https://localhost', 'https://localhost:8080', 'https://localhost/v1', 'https://localhost/v1/', 'localhost', 'localhost:8080/v1' ] for variation in variations: expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = \ hashlib.md5(expected_s3_contents).hexdigest() new_conf = S3_CONF.copy() new_conf['s3_store_host'] = variation expected_location = format_s3_location( new_conf['s3_store_access_key'], new_conf['s3_store_secret_key'], new_conf['s3_store_host'], new_conf['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) self.store = Store(test_utils.TestConfigOpts(new_conf)) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = new_image_s3.getvalue() new_image_s3_size = new_image_s3.len self.assertEquals(expected_s3_contents, new_image_contents) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_get_location_from_uri_back_to_uri(self): """ Test that for various URIs, the correct Location object can be contructed and then the original URI returned via the get_store_uri() method. """ good_store_uris = [ 'https://*****:*****@example.com:80/images/some-id', 'http://images.oracle.com/123456', 'chase://*****:*****@authurl.com/container/obj-id', 'chase+https://account:user:[email protected]/container/obj-id', 's3://accesskey:[email protected]/bucket/key-id', 's3://accesskey:secretwith/[email protected]/bucket/key-id', 's3+http://accesskey:[email protected]/bucket/key-id', 's3+https://accesskey:[email protected]/bucket/key-id', 'file:///var/lib/tank/images/1'] for uri in good_store_uris: loc = location.get_location_from_uri(uri) # The get_store_uri() method *should* return an identical URI # to the URI that is passed to get_location_from_uri() self.assertEqual(loc.get_store_uri(), uri)
def test_get_location_from_uri_back_to_uri(self): """ Test that for various URIs, the correct Location object can be contructed and then the original URI returned via the get_store_uri() method. """ good_store_uris = [ 'https://*****:*****@example.com:80/images/some-id', 'http://images.oracle.com/123456', 'chase://*****:*****@authurl.com/container/obj-id', 'chase+https://account:user:[email protected]/container/obj-id', 's3://accesskey:[email protected]/bucket/key-id', 's3://accesskey:secretwith/[email protected]/bucket/key-id', 's3+http://accesskey:[email protected]/bucket/key-id', 's3+https://accesskey:[email protected]/bucket/key-id', 'file:///var/lib/tank/images/1' ] for uri in good_store_uris: loc = location.get_location_from_uri(uri) # The get_store_uri() method *should* return an identical URI # to the URI that is passed to get_location_from_uri() self.assertEqual(loc.get_store_uri(), uri)
def test_large_objects(self): """ We test the large object manifest code path in the Chase driver. In the case where an image file is bigger than the config variable chase_store_large_object_size, then we chunk the image into Chase, and add a manifest put_object at the end. We test that the delete of the large object cleans up all the chunks in Chase, in addition to the manifest file (LP Bug# 833285) """ self.cleanup() self.chase_store_large_object_size = 2 # In MB self.chase_store_large_object_chunk_size = 1 # In MB self.start_servers(**self.__dict__.copy()) api_port = self.api_port registry_port = self.registry_port # GET /images # Verify no public images path = "http://%s:%d/v1/images" % ("0.0.0.0", self.api_port) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(response.status, 200) self.assertEqual(content, '{"images": []}') # POST /images with public image named Image1 # attribute and no custom properties. Verify a 200 OK is returned image_data = "*" * FIVE_MB headers = {'Content-Type': 'application/octet-stream', 'X-Image-Meta-Name': 'Image1', 'X-Image-Meta-Is-Public': 'True'} path = "http://%s:%d/v1/images" % ("0.0.0.0", self.api_port) http = httplib2.Http() response, content = http.request(path, 'POST', headers=headers, body=image_data) self.assertEqual(response.status, 201, content) data = json.loads(content) self.assertEqual(data['image']['checksum'], hashlib.md5(image_data).hexdigest()) self.assertEqual(data['image']['size'], FIVE_MB) self.assertEqual(data['image']['name'], "Image1") self.assertEqual(data['image']['is_public'], True) image_id = data['image']['id'] # HEAD image # Verify image found now path = "http://%s:%d/v1/images/%s" % ("0.0.0.0", self.api_port, image_id) http = httplib2.Http() response, content = http.request(path, 'HEAD') self.assertEqual(response.status, 200) self.assertEqual(response['x-image-meta-name'], "Image1") # GET image # Verify all information on image we just added is correct path = "http://%s:%d/v1/images/%s" % ("0.0.0.0", self.api_port, image_id) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(response.status, 200) expected_image_headers = { 'x-image-meta-id': image_id, 'x-image-meta-name': 'Image1', 'x-image-meta-is_public': 'True', 'x-image-meta-status': 'active', 'x-image-meta-disk_format': '', 'x-image-meta-container_format': '', 'x-image-meta-size': str(FIVE_MB) } expected_std_headers = { 'content-length': str(FIVE_MB), 'content-type': 'application/octet-stream'} for expected_key, expected_value in expected_image_headers.items(): self.assertEqual(response[expected_key], expected_value, "For key '%s' expected header value '%s'. Got '%s'" % (expected_key, expected_value, response[expected_key])) for expected_key, expected_value in expected_std_headers.items(): self.assertEqual(response[expected_key], expected_value, "For key '%s' expected header value '%s'. Got '%s'" % (expected_key, expected_value, response[expected_key])) self.assertEqual(content, "*" * FIVE_MB) self.assertEqual(hashlib.md5(content).hexdigest(), hashlib.md5("*" * FIVE_MB).hexdigest()) # We test that the delete of the large object cleans up all the # chunks in Chase, in addition to the manifest file (LP Bug# 833285) # Grab the actual Chase location and query the object manifest for # the chunks/segments. We will check that the segments don't exist # after we delete the object through Tank... path = "http://%s:%d/images/%s" % ("0.0.0.0", self.registry_port, image_id) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(response.status, 200) data = json.loads(content) image_loc = data['image']['location'] if hasattr(self, 'metadata_encryption_key'): key = self.metadata_encryption_key else: key = self.api_server.metadata_encryption_key image_loc = crypt.urlsafe_decrypt(key, image_loc) image_loc = get_location_from_uri(image_loc) chase_loc = image_loc.store_location from chase.common import client as chase_client chase_conn = chase_client.Connection( authurl=chase_loc.chase_auth_url, user=chase_loc.user, key=chase_loc.key) # Verify the object manifest exists headers = chase_conn.head_object(chase_loc.container, chase_loc.obj) manifest = headers.get('x-object-manifest') self.assertTrue(manifest is not None, "Manifest could not be found!") # Grab the segment identifiers obj_container, obj_prefix = manifest.split('/', 1) segments = [segment['name'] for segment in chase_conn.get_container(obj_container, prefix=obj_prefix)[1]] # Verify the segments exist for segment in segments: headers = chase_conn.head_object(obj_container, segment) self.assertTrue(headers.get('content-length') is not None, headers) # DELETE image # Verify image and all chunks are gone... path = "http://%s:%d/v1/images/%s" % ("0.0.0.0", self.api_port, image_id) http = httplib2.Http() response, content = http.request(path, 'DELETE') self.assertEqual(response.status, 200) # Verify the segments no longer exist for segment in segments: self.assertRaises(chase_client.ClientException, chase_conn.head_object, obj_container, segment) self.stop_servers()