def teardown_package(): global orig_collate locale.setlocale(locale.LC_COLLATE, orig_collate) # clean up containers and objects left behind after running tests conn = Connection(config) conn.authenticate() account = Account(conn, config.get('account', config['username'])) account.delete_containers() global in_process if in_process: try: for server in _test_coros: server.kill() except Exception: pass try: rmtree(os.path.dirname(_testdir)) except Exception: pass utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX = \ orig_hash_path_suff_pref utils.SWIFT_CONF_FILE = orig_swift_conf_name constraints.reload_constraints()
def teardown_package(): global orig_collate locale.setlocale(locale.LC_COLLATE, orig_collate) # clean up containers and objects left behind after running tests global config if config: conn = Connection(config) conn.authenticate() account = Account(conn, config.get('account', config['username'])) account.delete_containers() global in_process global _test_socks if in_process: try: for i, server in enumerate(_test_coros): server.kill() if not server.dead: # kill it from the socket level _test_socks[i].close() except Exception: pass try: rmtree(os.path.dirname(_testdir)) except Exception: pass reset_globals()
def teardown_package(): global orig_collate locale.setlocale(locale.LC_COLLATE, orig_collate) # clean up containers and objects left behind after running tests global config conn = Connection(config) conn.authenticate() account = Account(conn, config.get("account", config["username"])) account.delete_containers() global in_process global _test_socks if in_process: try: for i, server in enumerate(_test_coros): server.kill() if not server.dead: # kill it from the socket level _test_socks[i].close() except Exception: pass try: rmtree(os.path.dirname(_testdir)) except Exception: pass utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX = orig_hash_path_suff_pref utils.SWIFT_CONF_FILE = orig_swift_conf_name constraints.reload_constraints() reset_globals()
def setUp(self): if 'slo' not in cluster_info: raise SkipTest("SLO not enabled") self.conn = Connection(tf.config) self.conn.authenticate() self.account = Account(self.conn, tf.config.get('account', tf.config['username'])) self.account.delete_containers() # create a container with versioning self.versions_container = self.account.container(Utils.create_name()) self.container = self.account.container(Utils.create_name()) self.segments_container = self.account.container(Utils.create_name()) if not self.container.create( hdrs={'X-Versions-Location': self.versions_container.name}): raise ResponseError(self.conn.response) if 'versions' not in self.container.info(): raise SkipTest("Object versioning not enabled") for cont in (self.versions_container, self.segments_container): if not cont.create(): raise ResponseError(self.conn.response) # create some segments self.seg_info = {} for letter, size in (('a', 1024 * 1024), ('b', 1024 * 1024)): seg_name = letter file_item = self.segments_container.file(seg_name) file_item.write(letter * size) self.seg_info[seg_name] = { 'size_bytes': size, 'etag': file_item.md5, 'path': '/%s/%s' % (self.segments_container.name, seg_name) }
def setUp(cls): if cls.tempurl_enabled is None: cls.tempurl_enabled = 'tempurl' in cluster_info if not cls.tempurl_enabled: return super(TestContainerTempurlEnv, cls).setUp() cls.tempurl_key = Utils.create_name() cls.tempurl_key2 = Utils.create_name() # creating another account and connection # for ACL tests config2 = deepcopy(tf.config) config2['account'] = tf.config['account2'] config2['username'] = tf.config['username2'] config2['password'] = tf.config['password2'] cls.conn2 = Connection(config2) cls.conn2.authenticate() cls.account2 = Account(cls.conn2, config2.get('account', config2['username'])) cls.account2 = cls.conn2.get_account() cls.container = cls.account.container(Utils.create_name()) if not cls.container.create({ 'x-container-meta-temp-url-key': cls.tempurl_key, 'x-container-meta-temp-url-key-2': cls.tempurl_key2, 'x-container-read': cls.account2.name }): raise ResponseError(cls.conn.response) cls.obj = cls.container.file(Utils.create_name()) cls.obj.write("obj contents") cls.other_obj = cls.container.file(Utils.create_name()) cls.other_obj.write("other obj contents")
def setUp(self): if 'slo' not in cluster_info: raise SkipTest("SLO not enabled") self.conn = Connection(tf.config) self.conn.authenticate() self.account = Account( self.conn, tf.config.get('account', tf.config['username'])) self.account.delete_containers() # create a container with versioning self.versions_container = self.account.container(Utils.create_name()) self.container = self.account.container(Utils.create_name()) self.segments_container = self.account.container(Utils.create_name()) if not self.container.create( hdrs={'X-Versions-Location': self.versions_container.name}): raise ResponseError(self.conn.response) if 'versions' not in self.container.info(): raise SkipTest("Object versioning not enabled") for cont in (self.versions_container, self.segments_container): if not cont.create(): raise ResponseError(self.conn.response) # create some segments self.seg_info = {} for letter, size in (('a', 1024 * 1024), ('b', 1024 * 1024)): seg_name = letter file_item = self.segments_container.file(seg_name) file_item.write(letter * size) self.seg_info[seg_name] = { 'size_bytes': size, 'etag': file_item.md5, 'path': '/%s/%s' % (self.segments_container.name, seg_name)}
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.tempurl_enabled is None: cls.tempurl_enabled = 'tempurl' in cluster_info if not cls.tempurl_enabled: return cls.tempurl_key = Utils.create_name() cls.tempurl_key2 = Utils.create_name() cls.account = Account( cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.account.update_metadata({ 'temp-url-key': cls.tempurl_key, 'temp-url-key-2': cls.tempurl_key2 }) cls.container = cls.account.container(Utils.create_name()) if not cls.container.create(): raise ResponseError(cls.conn.response) cls.obj = cls.container.file(Utils.create_name()) cls.obj.write("obj contents") cls.other_obj = cls.container.file(Utils.create_name()) cls.other_obj.write("other obj contents")
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() config2 = tf.config.copy() config2['username'] = tf.config['username3'] config2['password'] = tf.config['password3'] cls.conn2 = Connection(config2) cls.conn2.authenticate() cls.account = Account(cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) cls.container2 = cls.account.container(Utils.create_name()) for cont in (cls.container, cls.container2): if not cont.create(): raise ResponseError(cls.conn.response) # avoid getting a prefix that stops halfway through an encoded # character prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") cls.segment_prefix = prefix for letter in ('a', 'b', 'c', 'd', 'e'): file_item = cls.container.file("%s/seg_lower%s" % (prefix, letter)) file_item.write(letter * 10) file_item = cls.container.file("%s/seg_upper%s" % (prefix, letter)) file_item.write(letter.upper() * 10) for letter in ('f', 'g', 'h', 'i', 'j'): file_item = cls.container2.file("%s/seg_lower%s" % (prefix, letter)) file_item.write(letter * 10) man1 = cls.container.file("man1") man1.write('man1-contents', hdrs={"X-Object-Manifest": "%s/%s/seg_lower" % (cls.container.name, prefix)}) man2 = cls.container.file("man2") man2.write('man2-contents', hdrs={"X-Object-Manifest": "%s/%s/seg_upper" % (cls.container.name, prefix)}) manall = cls.container.file("manall") manall.write('manall-contents', hdrs={"X-Object-Manifest": "%s/%s/seg" % (cls.container.name, prefix)}) mancont2 = cls.container.file("mancont2") mancont2.write( 'mancont2-contents', hdrs={"X-Object-Manifest": "%s/%s/seg_lower" % (cls.container2.name, prefix)})
def setUp(cls): cls.conn = Connection(tf.config) cls.storage_url, cls.storage_token = cls.conn.authenticate() cls.account = Account(cls.conn, tf.config.get('account', tf.config['username'])) # Second connection for ACL tests config2 = deepcopy(tf.config) config2['account'] = tf.config['account2'] config2['username'] = tf.config['username2'] config2['password'] = tf.config['password2'] cls.conn2 = Connection(config2) cls.conn2.authenticate() # avoid getting a prefix that stops halfway through an encoded # character prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") cls.versions_container = cls.account.container(prefix + "-versions") if not cls.versions_container.create(): raise ResponseError(cls.conn.response) cls.container = cls.account.container(prefix + "-objs") container_headers = { cls.location_header_key: cls.versions_container.name } if not cls.container.create(hdrs=container_headers): if cls.conn.response.status == 412: cls.versioning_enabled = False return raise ResponseError(cls.conn.response) container_info = cls.container.info() # if versioning is off, then cls.location_header_key won't persist cls.versioning_enabled = 'versions' in container_info # setup another account to test ACLs config2 = deepcopy(tf.config) config2['account'] = tf.config['account2'] config2['username'] = tf.config['username2'] config2['password'] = tf.config['password2'] cls.conn2 = Connection(config2) cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate() cls.account2 = cls.conn2.get_account() cls.account2.delete_containers() # setup another account with no access to anything to test ACLs config3 = deepcopy(tf.config) config3['account'] = tf.config['account'] config3['username'] = tf.config['username3'] config3['password'] = tf.config['password3'] cls.conn3 = Connection(config3) cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate() cls.account3 = cls.conn3.get_account()
def setUp(cls): cls.conn = Connection(config) cls.conn.authenticate() cls.account = Account(cls.conn, config.get('account', config['username'])) cls.root_dir = os.path.join( '/mnt/gluster-object', cls.account.conn.storage_url.split('/')[2].split('_')[1]) cls.account.delete_containers() cls.file_size = 8 cls.container = cls.account.container(Utils.create_name()) if not cls.container.create(): raise ResponseError(cls.conn.response) cls.dirs = [ 'dir1', 'dir2', 'dir1/subdir1', 'dir1/subdir2', 'dir1/subdir1/subsubdir1', 'dir1/subdir1/subsubdir2', 'dir1/subdir with spaces', 'dir1/subdir+with{whatever', ] cls.files = [ 'file1', 'file A', 'dir1/file2', 'dir1/subdir1/file2', 'dir1/subdir1/file3', 'dir1/subdir1/file4', 'dir1/subdir1/subsubdir1/file5', 'dir1/subdir1/subsubdir1/file6', 'dir1/subdir1/subsubdir1/file7', 'dir1/subdir1/subsubdir1/file8', 'dir1/subdir1/subsubdir2/file9', 'dir1/subdir1/subsubdir2/file0', 'dir1/subdir with spaces/file B', 'dir1/subdir+with{whatever/file D', ] stored_files = set() for d in cls.dirs: file = cls.container.file(d) file.write(hdrs={'Content-Type': 'application/directory'}) for f in cls.files: file = cls.container.file(f) file.write_random( cls.file_size, hdrs={'Content-Type': 'application/octet-stream'}) stored_files.add(f) cls.stored_files = sorted(stored_files) cls.sorted_objects = sorted(set(cls.dirs + cls.files))
def setUp(cls): cls.conn = Connection(config) cls.conn.authenticate() cls.account = Account(cls.conn, config.get('account', config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) if not cls.container.create(): raise ResponseError(cls.conn.response) cls.file_size = 8 cls.root_dir = os.path.join('/mnt/gluster-object', cls.account.conn.storage_url.split('/')[2].split('_')[1]) devices = config.get('devices', '/mnt/gluster-object') cls.client = InternalClient('/etc/swift/object-expirer.conf', 'Test Object Expirer', 1) cls.expirer = Manager(['object-expirer'])
def setUp(cls): cls.conn = Connection(config) cls.conn.authenticate() cls.account = Account(cls.conn, config.get('account', config['username'])) cls.account.delete_containers() cls.containers = {} #create two containers one for object other for versions of objects for i in range(2): hdrs={} if i==0: hdrs={'X-Versions-Location':'versions'} cont = cls.containers['object'] = cls.account.container('object') else: cont = cls.containers['versions'] = cls.account.container('versions') if not cont.create(hdrs=hdrs): raise ResponseError(cls.conn.response) cls.containers.append(cont)
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.static_web_enabled is None: cls.static_web_enabled = 'staticweb' in tf.cluster_info if not cls.static_web_enabled: return cls.account = Account( cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) if not cls.container.create( hdrs={'X-Container-Read': '.r:*,.rlistings'}): raise ResponseError(cls.conn.response) objects = ['index', 'error', 'listings_css', 'dir/', 'dir/obj', 'dir/some sub%dir/', 'dir/some sub%dir/obj'] cls.objects = {} for item in sorted(objects): if '/' in item.rstrip('/'): parent, _ = item.rstrip('/').rsplit('/', 1) path = '%s/%s' % (cls.objects[parent + '/'].name, Utils.create_name()) else: path = Utils.create_name() if item[-1] == '/': cls.objects[item] = cls.container.file(path) cls.objects[item].write(hdrs={ 'Content-Type': 'application/directory'}) else: cls.objects[item] = cls.container.file(path) cls.objects[item].write(('%s contents' % item).encode('utf8'))
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.enabled is None: cls.enabled = 'tempurl' in cluster_info and 'slo' in cluster_info cls.tempurl_key = Utils.create_name() cls.account = Account( cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.account.update_metadata({'temp-url-key': cls.tempurl_key}) cls.manifest_container = cls.account.container(Utils.create_name()) cls.segments_container = cls.account.container(Utils.create_name()) if not cls.manifest_container.create(): raise ResponseError(cls.conn.response) if not cls.segments_container.create(): raise ResponseError(cls.conn.response) seg1 = cls.segments_container.file(Utils.create_name()) seg1.write('1' * 1024 * 1024) seg2 = cls.segments_container.file(Utils.create_name()) seg2.write('2' * 1024 * 1024) cls.manifest_data = [{'size_bytes': 1024 * 1024, 'etag': seg1.md5, 'path': '/%s/%s' % (cls.segments_container.name, seg1.name)}, {'size_bytes': 1024 * 1024, 'etag': seg2.md5, 'path': '/%s/%s' % (cls.segments_container.name, seg2.name)}] cls.manifest = cls.manifest_container.file(Utils.create_name()) cls.manifest.write( json.dumps(cls.manifest_data), parms={'multipart-manifest': 'put'})
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.domain_remap_enabled is None: cls.domain_remap_enabled = 'domain_remap' in cluster_info if not cls.domain_remap_enabled: return cls.account = Account(cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) if not cls.container.create(): raise ResponseError(cls.conn.response) cls.obj = cls.container.file(Utils.create_name()) cls.obj.write('obj contents') cls.obj_slash = cls.container.file('/v1') cls.obj_slash.write('obj contents')
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.multiple_policies_enabled is None: try: cls.policies = tf.FunctionalStoragePolicyCollection.from_info() except AssertionError: pass if cls.policies and len(cls.policies) > 1: cls.multiple_policies_enabled = True else: cls.multiple_policies_enabled = False cls.versioning_enabled = True # We don't actually know the state of versioning, but without # multiple policies the tests should be skipped anyway. Claiming # versioning support lets us report the right reason for skipping. return policy = cls.policies.select() version_policy = cls.policies.exclude(name=policy['name']).select() cls.account = Account(cls.conn, tf.config.get('account', tf.config['username'])) # Second connection for ACL tests config2 = deepcopy(tf.config) config2['account'] = tf.config['account2'] config2['username'] = tf.config['username2'] config2['password'] = tf.config['password2'] cls.conn2 = Connection(config2) cls.conn2.authenticate() # avoid getting a prefix that stops halfway through an encoded # character prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") cls.versions_container = cls.account.container(prefix + "-versions") if not cls.versions_container.create( {'X-Storage-Policy': policy['name']}): raise ResponseError(cls.conn.response) cls.container = cls.account.container(prefix + "-objs") if not cls.container.create( hdrs={ cls.location_header_key: cls.versions_container.name, 'X-Storage-Policy': version_policy['name'] }): if cls.conn.response.status == 412: cls.versioning_enabled = False return raise ResponseError(cls.conn.response) container_info = cls.container.info() # if versioning is off, then X-Versions-Location won't persist cls.versioning_enabled = 'versions' in container_info # setup another account to test ACLs config2 = deepcopy(tf.config) config2['account'] = tf.config['account2'] config2['username'] = tf.config['username2'] config2['password'] = tf.config['password2'] cls.conn2 = Connection(config2) cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate() cls.account2 = cls.conn2.get_account() cls.account2.delete_containers() # setup another account with no access to anything to test ACLs config3 = deepcopy(tf.config) config3['account'] = tf.config['account'] config3['username'] = tf.config['username3'] config3['password'] = tf.config['password3'] cls.conn3 = Connection(config3) cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate() cls.account3 = cls.conn3.get_account()
class TestSloWithVersioning(unittest2.TestCase): def setUp(self): if 'slo' not in cluster_info: raise SkipTest("SLO not enabled") if tf.in_process: tf.skip_if_no_xattrs() self.conn = Connection(tf.config) self.conn.authenticate() self.account = Account( self.conn, tf.config.get('account', tf.config['username'])) self.account.delete_containers() # create a container with versioning self.versions_container = self.account.container(Utils.create_name()) self.container = self.account.container(Utils.create_name()) self.segments_container = self.account.container(Utils.create_name()) if not self.container.create( hdrs={'X-Versions-Location': self.versions_container.name}): raise ResponseError(self.conn.response) if 'versions' not in self.container.info(): raise SkipTest("Object versioning not enabled") for cont in (self.versions_container, self.segments_container): if not cont.create(): raise ResponseError(self.conn.response) # create some segments self.seg_info = {} for letter, size in (('a', 1024 * 1024), ('b', 1024 * 1024)): seg_name = letter file_item = self.segments_container.file(seg_name) file_item.write(letter * size) self.seg_info[seg_name] = { 'size_bytes': size, 'etag': file_item.md5, 'path': '/%s/%s' % (self.segments_container.name, seg_name)} def _create_manifest(self, seg_name): # create a manifest in the versioning container file_item = self.container.file("my-slo-manifest") file_item.write( json.dumps([self.seg_info[seg_name]]), parms={'multipart-manifest': 'put'}) return file_item def _assert_is_manifest(self, file_item, seg_name): manifest_body = file_item.read(parms={'multipart-manifest': 'get'}) resp_headers = dict(file_item.conn.response.getheaders()) self.assertIn('x-static-large-object', resp_headers) self.assertEqual('application/json; charset=utf-8', file_item.content_type) try: manifest = json.loads(manifest_body) except ValueError: self.fail("GET with multipart-manifest=get got invalid json") self.assertEqual(1, len(manifest)) key_map = {'etag': 'hash', 'size_bytes': 'bytes', 'path': 'name'} for k_client, k_slo in key_map.items(): self.assertEqual(self.seg_info[seg_name][k_client], manifest[0][k_slo]) def _assert_is_object(self, file_item, seg_name): file_contents = file_item.read() self.assertEqual(1024 * 1024, len(file_contents)) self.assertEqual(seg_name, file_contents[0]) self.assertEqual(seg_name, file_contents[-1]) def tearDown(self): # remove versioning to allow simple container delete self.container.update_metadata(hdrs={'X-Versions-Location': ''}) self.account.delete_containers() def test_slo_manifest_version(self): file_item = self._create_manifest('a') # sanity check: read the manifest, then the large object self._assert_is_manifest(file_item, 'a') self._assert_is_object(file_item, 'a') # upload new manifest file_item = self._create_manifest('b') # sanity check: read the manifest, then the large object self._assert_is_manifest(file_item, 'b') self._assert_is_object(file_item, 'b') versions_list = self.versions_container.files() self.assertEqual(1, len(versions_list)) version_file = self.versions_container.file(versions_list[0]) # check the version is still a manifest self._assert_is_manifest(version_file, 'a') self._assert_is_object(version_file, 'a') # delete the newest manifest file_item.delete() # expect the original manifest file to be restored self._assert_is_manifest(file_item, 'a') self._assert_is_object(file_item, 'a')
def setUp(cls): cls.conn = Connection(tf.config) cls.conn.authenticate() config2 = deepcopy(tf.config) config2['account'] = tf.config['account2'] config2['username'] = tf.config['username2'] config2['password'] = tf.config['password2'] cls.conn2 = Connection(config2) cls.conn2.authenticate() cls.account2 = cls.conn2.get_account() cls.account2.delete_containers() config3 = tf.config.copy() config3['username'] = tf.config['username3'] config3['password'] = tf.config['password3'] cls.conn3 = Connection(config3) cls.conn3.authenticate() if cls.slo_enabled is None: cls.slo_enabled = 'slo' in cluster_info if not cls.slo_enabled: return cls.account = Account(cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) cls.container2 = cls.account.container(Utils.create_name()) for cont in (cls.container, cls.container2): if not cont.create(): raise ResponseError(cls.conn.response) cls.seg_info = seg_info = cls.create_segments(cls.container) file_item = cls.container.file("manifest-abcde") file_item.write( json.dumps([seg_info['seg_a'], seg_info['seg_b'], seg_info['seg_c'], seg_info['seg_d'], seg_info['seg_e']]), parms={'multipart-manifest': 'put'}) # Put the same manifest in the container2 file_item = cls.container2.file("manifest-abcde") file_item.write( json.dumps([seg_info['seg_a'], seg_info['seg_b'], seg_info['seg_c'], seg_info['seg_d'], seg_info['seg_e']]), parms={'multipart-manifest': 'put'}) file_item = cls.container.file('manifest-cd') cd_json = json.dumps([seg_info['seg_c'], seg_info['seg_d']]) file_item.write(cd_json, parms={'multipart-manifest': 'put'}) cd_etag = hashlib.md5(seg_info['seg_c']['etag'] + seg_info['seg_d']['etag']).hexdigest() file_item = cls.container.file("manifest-bcd-submanifest") file_item.write( json.dumps([seg_info['seg_b'], {'etag': cd_etag, 'size_bytes': (seg_info['seg_c']['size_bytes'] + seg_info['seg_d']['size_bytes']), 'path': '/%s/%s' % (cls.container.name, 'manifest-cd')}]), parms={'multipart-manifest': 'put'}) bcd_submanifest_etag = hashlib.md5( seg_info['seg_b']['etag'] + cd_etag).hexdigest() file_item = cls.container.file("manifest-abcde-submanifest") file_item.write( json.dumps([ seg_info['seg_a'], {'etag': bcd_submanifest_etag, 'size_bytes': (seg_info['seg_b']['size_bytes'] + seg_info['seg_c']['size_bytes'] + seg_info['seg_d']['size_bytes']), 'path': '/%s/%s' % (cls.container.name, 'manifest-bcd-submanifest')}, seg_info['seg_e']]), parms={'multipart-manifest': 'put'}) abcde_submanifest_etag = hashlib.md5( seg_info['seg_a']['etag'] + bcd_submanifest_etag + seg_info['seg_e']['etag']).hexdigest() abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] + seg_info['seg_b']['size_bytes'] + seg_info['seg_c']['size_bytes'] + seg_info['seg_d']['size_bytes'] + seg_info['seg_e']['size_bytes']) file_item = cls.container.file("ranged-manifest") file_item.write( json.dumps([ {'etag': abcde_submanifest_etag, 'size_bytes': abcde_submanifest_size, 'path': '/%s/%s' % (cls.container.name, 'manifest-abcde-submanifest'), 'range': '-1048578'}, # 'c' + ('d' * 2**20) + 'e' {'etag': abcde_submanifest_etag, 'size_bytes': abcde_submanifest_size, 'path': '/%s/%s' % (cls.container.name, 'manifest-abcde-submanifest'), 'range': '524288-1572863'}, # 'a' * 2**19 + 'b' * 2**19 {'etag': abcde_submanifest_etag, 'size_bytes': abcde_submanifest_size, 'path': '/%s/%s' % (cls.container.name, 'manifest-abcde-submanifest'), 'range': '3145727-3145728'}]), # 'cd' parms={'multipart-manifest': 'put'}) ranged_manifest_etag = hashlib.md5( abcde_submanifest_etag + ':3145727-4194304;' + abcde_submanifest_etag + ':524288-1572863;' + abcde_submanifest_etag + ':3145727-3145728;').hexdigest() ranged_manifest_size = 2 * 1024 * 1024 + 4 file_item = cls.container.file("ranged-submanifest") file_item.write( json.dumps([ seg_info['seg_c'], {'etag': ranged_manifest_etag, 'size_bytes': ranged_manifest_size, 'path': '/%s/%s' % (cls.container.name, 'ranged-manifest')}, {'etag': ranged_manifest_etag, 'size_bytes': ranged_manifest_size, 'path': '/%s/%s' % (cls.container.name, 'ranged-manifest'), 'range': '524289-1572865'}, {'etag': ranged_manifest_etag, 'size_bytes': ranged_manifest_size, 'path': '/%s/%s' % (cls.container.name, 'ranged-manifest'), 'range': '-3'}]), parms={'multipart-manifest': 'put'}) file_item = cls.container.file("manifest-db") file_item.write( json.dumps([ {'path': seg_info['seg_d']['path'], 'etag': None, 'size_bytes': None}, {'path': seg_info['seg_b']['path'], 'etag': None, 'size_bytes': None}, ]), parms={'multipart-manifest': 'put'}) file_item = cls.container.file("ranged-manifest-repeated-segment") file_item.write( json.dumps([ {'path': seg_info['seg_a']['path'], 'etag': None, 'size_bytes': None, 'range': '-1048578'}, {'path': seg_info['seg_a']['path'], 'etag': None, 'size_bytes': None}, {'path': seg_info['seg_b']['path'], 'etag': None, 'size_bytes': None, 'range': '-1048578'}, ]), parms={'multipart-manifest': 'put'})
class TestSloWithVersioning(Base): def setUp(self): if 'slo' not in cluster_info: raise SkipTest("SLO not enabled") self.conn = Connection(tf.config) self.conn.authenticate() self.account = Account(self.conn, tf.config.get('account', tf.config['username'])) self.account.delete_containers() # create a container with versioning self.versions_container = self.account.container(Utils.create_name()) self.container = self.account.container(Utils.create_name()) self.segments_container = self.account.container(Utils.create_name()) if not self.container.create( hdrs={'X-Versions-Location': self.versions_container.name}): raise ResponseError(self.conn.response) if 'versions' not in self.container.info(): raise SkipTest("Object versioning not enabled") for cont in (self.versions_container, self.segments_container): if not cont.create(): raise ResponseError(self.conn.response) # create some segments self.seg_info = {} for letter, size in (('a', 1024 * 1024), ('b', 1024 * 1024)): seg_name = letter file_item = self.segments_container.file(seg_name) file_item.write(letter * size) self.seg_info[seg_name] = { 'size_bytes': size, 'etag': file_item.md5, 'path': '/%s/%s' % (self.segments_container.name, seg_name) } def _create_manifest(self, seg_name): # create a manifest in the versioning container file_item = self.container.file("my-slo-manifest") file_item.write(json.dumps([self.seg_info[seg_name]]), parms={'multipart-manifest': 'put'}) return file_item def _assert_is_manifest(self, file_item, seg_name): manifest_body = file_item.read(parms={'multipart-manifest': 'get'}) resp_headers = dict(file_item.conn.response.getheaders()) self.assertIn('x-static-large-object', resp_headers) self.assertEqual('application/json; charset=utf-8', file_item.content_type) try: manifest = json.loads(manifest_body) except ValueError: self.fail("GET with multipart-manifest=get got invalid json") self.assertEqual(1, len(manifest)) key_map = {'etag': 'hash', 'size_bytes': 'bytes', 'path': 'name'} for k_client, k_slo in key_map.items(): self.assertEqual(self.seg_info[seg_name][k_client], manifest[0][k_slo]) def _assert_is_object(self, file_item, seg_name): file_contents = file_item.read() self.assertEqual(1024 * 1024, len(file_contents)) self.assertEqual(seg_name, file_contents[0]) self.assertEqual(seg_name, file_contents[-1]) def tearDown(self): # remove versioning to allow simple container delete self.container.update_metadata(hdrs={'X-Versions-Location': ''}) self.account.delete_containers() def test_slo_manifest_version(self): file_item = self._create_manifest('a') # sanity check: read the manifest, then the large object self._assert_is_manifest(file_item, 'a') self._assert_is_object(file_item, 'a') # upload new manifest file_item = self._create_manifest('b') # sanity check: read the manifest, then the large object self._assert_is_manifest(file_item, 'b') self._assert_is_object(file_item, 'b') versions_list = self.versions_container.files() self.assertEqual(1, len(versions_list)) version_file = self.versions_container.file(versions_list[0]) # check the version is still a manifest self._assert_is_manifest(version_file, 'a') self._assert_is_object(version_file, 'a') # delete the newest manifest file_item.delete() # expect the original manifest file to be restored self._assert_is_manifest(file_item, 'a') self._assert_is_object(file_item, 'a')