def test_add(self): """Test that we can add an image via the filesystem backend""" ChunkedFile.CHUNKSIZE = 1024 expected_image_id = utils.generate_uuid() expected_file_size = 1024 * 5 # 5K expected_file_contents = "*" * expected_file_size expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_location = "file://%s/%s" % (stubs.FAKE_FILESYSTEM_ROOTDIR, expected_image_id) image_file = StringIO.StringIO(expected_file_contents) location, size, checksum = self.store.add(expected_image_id, image_file, expected_file_size) self.assertEquals(expected_location, location) self.assertEquals(expected_file_size, size) self.assertEquals(expected_checksum, checksum) uri = "file:///tmp/tank-tests/%s" % expected_image_id loc = get_location_from_uri(uri) (new_image_file, new_image_size) = self.store.get(loc) new_image_contents = "" new_image_file_size = 0 for chunk in new_image_file: new_image_file_size += len(chunk) new_image_contents += chunk self.assertEquals(expected_file_contents, new_image_contents) self.assertEquals(expected_file_size, new_image_file_size)
def test_generate_uuid_format(self): """Check the format of a uuid""" uuid = utils.generate_uuid() self.assertTrue(isinstance(uuid, basestring)) self.assertTrue(len(uuid), 36) # make sure there are 4 dashes self.assertTrue(len(uuid.replace('-', '')), 36)
def test_add(self): """Test that we can add an image via the s3 backend""" expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = hashlib.md5(expected_s3_contents).hexdigest() expected_location = format_s3_location(S3_CONF['s3_store_access_key'], S3_CONF['s3_store_secret_key'], S3_CONF['s3_store_host'], S3_CONF['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = StringIO.StringIO() for chunk in new_image_s3: new_image_contents.write(chunk) new_image_s3_size = new_image_contents.len self.assertEquals(expected_s3_contents, new_image_contents.getvalue()) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_add(self): """Test that we can add an image via the s3 backend""" expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = hashlib.md5(expected_s3_contents).hexdigest() expected_location = format_s3_location( S3_CONF['s3_store_access_key'], S3_CONF['s3_store_secret_key'], S3_CONF['s3_store_host'], S3_CONF['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = StringIO.StringIO() for chunk in new_image_s3: new_image_contents.write(chunk) new_image_s3_size = new_image_contents.len self.assertEquals(expected_s3_contents, new_image_contents.getvalue()) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_add_host_variations(self): """ Test that having http(s):// in the s3serviceurl in config options works as expected. """ variations = ['http://localhost:80', 'http://localhost', 'http://localhost/v1', 'http://localhost/v1/', 'https://localhost', 'https://localhost:8080', 'https://localhost/v1', 'https://localhost/v1/', 'localhost', 'localhost:8080/v1'] for variation in variations: expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = \ hashlib.md5(expected_s3_contents).hexdigest() new_conf = S3_CONF.copy() new_conf['s3_store_host'] = variation expected_location = format_s3_location( new_conf['s3_store_access_key'], new_conf['s3_store_secret_key'], new_conf['s3_store_host'], new_conf['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) self.store = Store(test_utils.TestConfigOpts(new_conf)) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = new_image_s3.getvalue() new_image_s3_size = new_image_s3.len self.assertEquals(expected_s3_contents, new_image_contents) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_add_host_variations(self): """ Test that having http(s):// in the s3serviceurl in config options works as expected. """ variations = [ 'http://localhost:80', 'http://localhost', 'http://localhost/v1', 'http://localhost/v1/', 'https://localhost', 'https://localhost:8080', 'https://localhost/v1', 'https://localhost/v1/', 'localhost', 'localhost:8080/v1' ] for variation in variations: expected_image_id = utils.generate_uuid() expected_s3_size = FIVE_KB expected_s3_contents = "*" * expected_s3_size expected_checksum = \ hashlib.md5(expected_s3_contents).hexdigest() new_conf = S3_CONF.copy() new_conf['s3_store_host'] = variation expected_location = format_s3_location( new_conf['s3_store_access_key'], new_conf['s3_store_secret_key'], new_conf['s3_store_host'], new_conf['s3_store_bucket'], expected_image_id) image_s3 = StringIO.StringIO(expected_s3_contents) self.store = Store(test_utils.TestConfigOpts(new_conf)) location, size, checksum = self.store.add(expected_image_id, image_s3, expected_s3_size) self.assertEquals(expected_location, location) self.assertEquals(expected_s3_size, size) self.assertEquals(expected_checksum, checksum) loc = get_location_from_uri(expected_location) (new_image_s3, new_image_size) = self.store.get(loc) new_image_contents = new_image_s3.getvalue() new_image_s3_size = new_image_s3.len self.assertEquals(expected_s3_contents, new_image_contents) self.assertEquals(expected_s3_size, new_image_s3_size)
def test_remote_image(self): """Verify an image added using a 'Location' header can be retrieved""" self.cleanup() self.start_servers(**self.__dict__.copy()) # 1. POST /images with public image named Image1 image_data = "*" * FIVE_KB headers = {'Content-Type': 'application/octet-stream', 'X-Image-Meta-Name': 'Image1', 'X-Image-Meta-Is-Public': 'True'} path = "http://%s:%d/v1/images" % ("0.0.0.0", self.api_port) http = httplib2.Http() response, content = http.request(path, 'POST', headers=headers, body=image_data) self.assertEqual(response.status, 201) data = json.loads(content) self.assertEqual(data['image']['checksum'], hashlib.md5(image_data).hexdigest()) self.assertEqual(data['image']['size'], FIVE_KB) image_id1 = data['image']['id'] # 2. GET first image # Verify all information on image we just added is correct path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id1) http = httplib2.Http() response, content = http.request(path % args, 'GET') self.assertEqual(response.status, 200) self.assertEqual(response['content-length'], str(FIVE_KB)) self.assertEqual(content, "*" * FIVE_KB) # 3. GET first image from registry in order to find S3 location path = "http://%s:%d/images/%s" args = ("0.0.0.0", self.registry_port, image_id1) http = httplib2.Http() response, content = http.request(path % args, 'GET') if hasattr(self, 'metadata_encryption_key'): key = self.metadata_encryption_key else: key = self.api_server.metadata_encryption_key loc = json.loads(content)['image']['location'] s3_store_location = crypt.urlsafe_decrypt(key, loc) # 4. POST /images using location generated by Image1 image_id2 = utils.generate_uuid() image_data = "*" * FIVE_KB headers = {'Content-Type': 'application/octet-stream', 'X-Image-Meta-Id': image_id2, 'X-Image-Meta-Name': 'Image2', 'X-Image-Meta-Is-Public': 'True', 'X-Image-Meta-Location': s3_store_location} path = "http://%s:%d/v1/images" % ("0.0.0.0", self.api_port) http = httplib2.Http() response, content = http.request(path, 'POST', headers=headers) self.assertEqual(response.status, 201) self.assertEqual(data['image']['size'], FIVE_KB) self.assertEqual(data['image']['checksum'], hashlib.md5(image_data).hexdigest()) # 5. GET second image and make sure it can stream the image path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id2) http = httplib2.Http() response, content = http.request(path % args, 'GET') self.assertEqual(response.status, 200) self.assertEqual(response['content-length'], str(FIVE_KB)) self.assertEqual(content, "*" * FIVE_KB) # 6. DELETE first and second images path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id1) http = httplib2.Http() http.request(path % args, 'DELETE') path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id2) http = httplib2.Http() http.request(path % args, 'DELETE') self.stop_servers()
def test_remote_image(self): """Verify an image added using a 'Location' header can be retrieved""" self.cleanup() self.start_servers(**self.__dict__.copy()) # 1. POST /images with public image named Image1 image_data = "*" * FIVE_KB headers = { 'Content-Type': 'application/octet-stream', 'X-Image-Meta-Name': 'Image1', 'X-Image-Meta-Is-Public': 'True' } path = "http://%s:%d/v1/images" % ("0.0.0.0", self.api_port) http = httplib2.Http() response, content = http.request(path, 'POST', headers=headers, body=image_data) self.assertEqual(response.status, 201) data = json.loads(content) self.assertEqual(data['image']['checksum'], hashlib.md5(image_data).hexdigest()) self.assertEqual(data['image']['size'], FIVE_KB) image_id1 = data['image']['id'] # 2. GET first image # Verify all information on image we just added is correct path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id1) http = httplib2.Http() response, content = http.request(path % args, 'GET') self.assertEqual(response.status, 200) self.assertEqual(response['content-length'], str(FIVE_KB)) self.assertEqual(content, "*" * FIVE_KB) # 3. GET first image from registry in order to find S3 location path = "http://%s:%d/images/%s" args = ("0.0.0.0", self.registry_port, image_id1) http = httplib2.Http() response, content = http.request(path % args, 'GET') if hasattr(self, 'metadata_encryption_key'): key = self.metadata_encryption_key else: key = self.api_server.metadata_encryption_key loc = json.loads(content)['image']['location'] s3_store_location = crypt.urlsafe_decrypt(key, loc) # 4. POST /images using location generated by Image1 image_id2 = utils.generate_uuid() image_data = "*" * FIVE_KB headers = { 'Content-Type': 'application/octet-stream', 'X-Image-Meta-Id': image_id2, 'X-Image-Meta-Name': 'Image2', 'X-Image-Meta-Is-Public': 'True', 'X-Image-Meta-Location': s3_store_location } path = "http://%s:%d/v1/images" % ("0.0.0.0", self.api_port) http = httplib2.Http() response, content = http.request(path, 'POST', headers=headers) self.assertEqual(response.status, 201) self.assertEqual(data['image']['size'], FIVE_KB) self.assertEqual(data['image']['checksum'], hashlib.md5(image_data).hexdigest()) # 5. GET second image and make sure it can stream the image path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id2) http = httplib2.Http() response, content = http.request(path % args, 'GET') self.assertEqual(response.status, 200) self.assertEqual(response['content-length'], str(FIVE_KB)) self.assertEqual(content, "*" * FIVE_KB) # 6. DELETE first and second images path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id1) http = httplib2.Http() http.request(path % args, 'DELETE') path = "http://%s:%d/v1/images/%s" args = ("0.0.0.0", self.api_port, image_id2) http = httplib2.Http() http.request(path % args, 'DELETE') self.stop_servers()
def test_generate_uuid_unique(self): """Ensure generate_uuid will return unique values""" uuids = [utils.generate_uuid() for i in range(5)] # casting to set will drop duplicate values unique = set(uuids) self.assertEqual(len(uuids), len(list(unique)))
import sys import unittest import urlparse import stubout import boto.s3.connection from tank.common import exception from tank.common import utils from tank.store import BackendException, UnsupportedBackend from tank.store.location import get_location_from_uri from tank.store.s3 import Store from tank.tests import utils as test_utils FAKE_UUID = utils.generate_uuid() FIVE_KB = (5 * 1024) S3_CONF = {'verbose': True, 'debug': True, 's3_store_access_key': 'user', 's3_store_secret_key': 'key', 's3_store_host': 'localhost:8080', 's3_store_bucket': 'tank'} # We stub out as little as possible to ensure that the code paths # between tank.store.s3 and boto.s3.connection are tested # thoroughly def stub_out_s3(stubs):
import httplib import sys import unittest import urlparse import stubout import boto.s3.connection from tank.common import exception from tank.common import utils from tank.store import BackendException, UnsupportedBackend from tank.store.location import get_location_from_uri from tank.store.s3 import Store from tank.tests import utils as test_utils FAKE_UUID = utils.generate_uuid() FIVE_KB = (5 * 1024) S3_CONF = { 'verbose': True, 'debug': True, 's3_store_access_key': 'user', 's3_store_secret_key': 'key', 's3_store_host': 'localhost:8080', 's3_store_bucket': 'tank' } # We stub out as little as possible to ensure that the code paths # between tank.store.s3 and boto.s3.connection are tested # thoroughly