def test_tempurl_object_download(self): time.sleep(61) container_name = '{0}_{1}'.format( self.base_container_name, randomstring.get_random_string()) self.client.create_container(container_name) self.addCleanup( self.client.force_delete_containers, [container_name]) object_name = '{0}_{1}'.format( self.base_object_name, randomstring.get_random_string()) object_data = 'Test file data' content_length = str(len(object_data)) etag = md5hash.get_md5_hash(object_data) headers = {'Content-Length': content_length, 'Content-Type': CONTENT_TYPE_TEXT, 'Etag': etag} self.client.create_object( container_name, object_name, headers=headers, data=object_data) temp_key = '{0}_{1}'.format( 'temp_url_dl_test_key', randomstring.get_random_string()) key = md5hash.get_md5_hash(temp_key) headers = {'X-Account-Meta-Temp-URL-Key': key} resp = self.client.set_temp_url_key(headers=headers) self.assertEqual(resp.status_code, 204) tempurl_data = self.client.create_temp_url( 'GET', container_name, object_name, '86400', key) dl_tempurl = '{0}?temp_url_sig={1}&temp_url_expires={2}'.format( tempurl_data['target_url'], tempurl_data['signature'], tempurl_data['expires']) resp = self.client.get(dl_tempurl) if resp.headers['content-disposition'] is not None: expected = 'attachment; filename="{0}"'.format(object_name) recieved = resp.headers['content-disposition'] self.assertEqual(expected, recieved) else: self.assertIsNotNone(resp.headers['content-disposition'])
def test_tempurl_object_upload(self): time.sleep(61) container_name = '{0}_{1}'.format( self.base_container_name, randomstring.get_random_string()) self.client.create_container(container_name) self.addCleanup( self.client.force_delete_containers, [container_name]) object_name = '{0}_{1}'.format( self.base_object_name, randomstring.get_random_string()) headers = {'Content-Length': '0'} self.client.create_object( container_name, object_name, headers=headers) temp_key = '{0}_{1}'.format( 'temp_url_dl_test_key', randomstring.get_random_string()) key = md5hash.get_md5_hash(temp_key) headers = {'X-Account-Meta-Temp-URL-Key': key} resp = self.client.set_temp_url_key(headers=headers) self.assertEqual(resp.status_code, 204) tempurl_data = self.client.create_temp_url( 'PUT', container_name, object_name, '86400', key) ul_tempurl = '{0}?temp_url_sig={1}&temp_url_expires={2}'.format( tempurl_data['target_url'], tempurl_data['signature'], tempurl_data['expires']) object_data = 'Test file data' content_length = str(len(object_data)) etag = md5hash.get_md5_hash(object_data) headers = {'Content-Length': content_length, 'Content-Type': CONTENT_TYPE_TEXT, 'Etag': etag} resp = self.client.put(ul_tempurl, data=object_data, headers=headers) self.assertEqual(resp.status_code, 201) resp = self.client.get_object(container_name, object_name) self.assertEqual(resp.content, object_data)
def test_tempurl_object_download(self): time.sleep(61) container_name = '{0}_{1}'.format(self.base_container_name, randomstring.get_random_string()) self.client.create_container(container_name) self.addCleanup(self.client.force_delete_containers, [container_name]) object_name = '{0}_{1}'.format(self.base_object_name, randomstring.get_random_string()) object_data = 'Test file data' content_length = str(len(object_data)) etag = md5hash.get_md5_hash(object_data) headers = { 'Content-Length': content_length, 'Content-Type': CONTENT_TYPE_TEXT, 'Etag': etag } self.client.create_object(container_name, object_name, headers=headers, data=object_data) temp_key = '{0}_{1}'.format('temp_url_dl_test_key', randomstring.get_random_string()) key = md5hash.get_md5_hash(temp_key) headers = {'X-Account-Meta-Temp-URL-Key': key} resp = self.client.set_temp_url_key(headers=headers) self.assertEqual(resp.status_code, 204) tempurl_data = self.client.create_temp_url('GET', container_name, object_name, '86400', key) dl_tempurl = '{0}?temp_url_sig={1}&temp_url_expires={2}'.format( tempurl_data['target_url'], tempurl_data['signature'], tempurl_data['expires']) resp = self.client.get(dl_tempurl) if resp.headers['content-disposition'] is not None: expected = 'attachment; filename="{0}"'.format(object_name) recieved = resp.headers['content-disposition'] self.assertEqual(expected, recieved) else: self.assertIsNotNone(resp.headers['content-disposition'])
def create_archive(self, object_names, compression_type, archive_name=BULK_ARCHIVE_NAME): """ Bulk creates objects in the opencafe's temp directory specified in the engine config. Each object's data will be the md5sum of the object's name. @type object_names: strings @param object_names: a list of object names @type object_names: string @param object_names: file compression to apply to the archive @rtype: string @return: Returns full path of the archive that was created in opencafe's temp directory specified in the engine config """ supported = [None, "gz", "bz2"] if compression_type not in supported: raise NameError("supported compression: {0}".format(supported)) ext = '' if not compression_type: ext = 'tar' compression_type = '' else: ext = 'tar.{0}'.format(compression_type) archive_name = '{0}.{1}.{2}'.format( archive_name, randstring.get_random_string(), ext) archive_dir = self.engine_config.temp_directory archive_filename = '{0}/{1}'.format(archive_dir, archive_name) archive = tarfile.open( archive_filename, 'w:{0}'.format(compression_type)) for object_name in object_names: object_data = get_md5_hash(object_name) object_size = len(object_data) object_time = int(mktime(datetime.now().timetuple())) object_buffer = StringIO(object_data) object_buffer.seek(0) object_info = tarfile.TarInfo(name=object_name) object_info.size = object_size object_info.mtime = object_time archive.addfile(tarinfo=object_info, fileobj=object_buffer) archive.close() archive_path = "{0}/{1}".format( self.engine_config.temp_directory, archive_name) return archive_path
def test_object_retrieval_with_if_match_header(self): container_name = self.setup_container(self.base_container_name) object_name = self.base_object_name object_data = 'Test file data' content_length = str(len(object_data)) etag = md5hash.get_md5_hash(object_data) headers = {'Content-Length': content_length, 'Content-Type': CONTENT_TYPE_TEXT, 'Etag': etag} self.client.create_object( container_name, object_name, headers=headers, data=object_data) headers = {'If-Match': etag} response = self.client.get_object( container_name, object_name, headers=headers) method = 'object retrieval with if match header' expected = 200 received = response.status_code self.assertEqual( expected, received, msg=STATUS_CODE_MSG.format( method=method, expected=expected, received=str(received)))
def test_tempurl_object_upload(self): time.sleep(61) container_name = '{0}_{1}'.format(self.base_container_name, randomstring.get_random_string()) self.client.create_container(container_name) self.addCleanup(self.client.force_delete_containers, [container_name]) object_name = '{0}_{1}'.format(self.base_object_name, randomstring.get_random_string()) headers = {'Content-Length': '0'} self.client.create_object(container_name, object_name, headers=headers) temp_key = '{0}_{1}'.format('temp_url_dl_test_key', randomstring.get_random_string()) key = md5hash.get_md5_hash(temp_key) headers = {'X-Account-Meta-Temp-URL-Key': key} resp = self.client.set_temp_url_key(headers=headers) self.assertEqual(resp.status_code, 204) tempurl_data = self.client.create_temp_url('PUT', container_name, object_name, '86400', key) ul_tempurl = '{0}?temp_url_sig={1}&temp_url_expires={2}'.format( tempurl_data['target_url'], tempurl_data['signature'], tempurl_data['expires']) object_data = 'Test file data' content_length = str(len(object_data)) etag = md5hash.get_md5_hash(object_data) headers = { 'Content-Length': content_length, 'Content-Type': CONTENT_TYPE_TEXT, 'Etag': etag } resp = self.client.put(ul_tempurl, data=object_data, headers=headers) self.assertEqual(resp.status_code, 201) resp = self.client.get_object(container_name, object_name) self.assertEqual(resp.content, object_data)
def create_formpost(self, container, files, object_prefix='', redirect='http://example.com/formpost', max_file_size=104857600, max_file_count=10, expires=None, key='', signature="", x_delete_at=None, x_delete_after=None): """ Creates RFC-2388. @param container: Name of the container to post objects to. @type container: string @param files: Files to post in the form. The dictionaries representing a file should be formatted as follows: { 'name': '<form name>', 'filename': '<filename>', 'content_type': '<content_type>', 'data': '<filedata>' } Where only name is required, defaults to other values will be as follows: filename - the value stored in name. content_type - 'text/plain' data - the md5 hash of the value stored in name. @type files: list of dictionaries @param object_prefix: prefix to be used in the name of the objects created. @type object_prefix: string @param redirect: URL to be returned as the 'location' header in the HTTP response. @type redirect: string @param max_file_size: The maximum file size in bytes which can be uploaded with the form. @type max_file_size: int @param max_file_count: The maximum number of files allowed to be uploaded with the form. @type max_file_count: int @param expires: The unix time relating to when the form expires and will no longer allow uploads to the container. @type expires: int @param key: The account's X-Tempurl-Key used in creating the signatre which authorizes the form to be POSTed. @type key: string @param signature: The HMAC-SHA1 signature of the form. @type signature: string @param x_delete_at: The unix time relating to when the object will be deleted from the container. @type x_delete_at: int @param x_delete_after: The amount of time, in seconds, after which the object will be deleted from the container. @type x_delete_after: int @return: Data to be POSTed in the following format: { 'target_url': '<url to POST to>', 'headers': '<headers to be added to the request>, 'body': '<body to be posted to the target url>' } @rtype: dictionary """ base_url, path = self.storage_url.split('/v1') path = '/v1{0}/{1}'.format(path, container) if object_prefix: path = '{0}/{1}'.format(path, object_prefix) if not expires: expires = int(time() + 600) url = ''.join([base_url, path]) hmac_body = '{0}\n{1}\n{2}\n{3}\n{4}'.format(path, redirect, max_file_size, max_file_count, expires) if not signature: signature = hmac.new(key, hmac_body, sha1).hexdigest() form = [] if redirect != '': form.append({ 'headers': { 'Content-Disposition': 'form-data; name="redirect"' }, 'data': redirect }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="max_file_size"' }, 'data': str(max_file_size) }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="max_file_count"' }, 'data': str(max_file_count) }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="expires"' }, 'data': str(expires) }) if x_delete_at: form.append({ 'headers': { 'Content-Disposition': 'form-data; name="x_delete_at"' }, 'data': str(x_delete_at) }) if x_delete_after: form.append({ 'headers': { 'Content-Disposition': 'form-data; name="x_delete_after"' }, 'data': str(x_delete_after) }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="signature"' }, 'data': signature }) for data_file in files: form_name = data_file.get('name') form_filename = data_file.get('filename', form_name) form_content_type = data_file.get('content_type', 'text/plain') form_data = data_file.get('data', get_md5_hash(form_name)) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="{0}"; filename="{1}"'.format( form_name, form_filename), 'Content-Type': form_content_type }, 'data': form_data }) data = [] boundary = '----WebKitFormBoundary40Q4WaJHO84PBBIa' for section in form: data.append('--{0}\r\n'.format(boundary)) for key, value in section['headers'].iteritems(): data.append('{0}: {1}\r\n'.format(key, value)) data.append('\r\n') data.append(section['data']) data.append('\r\n') data.append('\r\n--{0}'.format(boundary)) post_headers = { 'Cache-Control': 'max-age=0', 'Accept': '*/*;q=0.8', 'Content-Type': 'multipart/form-data; boundary={0}'.format(boundary) } return { 'target_url': url, 'headers': post_headers, 'body': ''.join(data) }
def create_formpost(self, container, files, object_prefix='', redirect='http://example.com/formpost', max_file_size=104857600, max_file_count=10, expires=None, key='', signature="", x_delete_at=None, x_delete_after=None): """ Creates a multipart/form-data body (RFC-2388) that can be used for POSTs to Swift. @param container: Name of the container to post objects to. @type container: string @param files: Files to post in the form. The dictionaries representing a file should be formatted as follows: { 'name': '<form name>', 'filename': '<filename>', 'content_type': '<content_type>', 'data': '<filedata>' } Where only name is required, defaults to other values will be as follows: filename - the value stored in name. content_type - 'text/plain' data - the md5 hash of the value stored in name. @type files: list of dictionaries @param object_prefix: prefix to be used in the name of the objects created. @type object_prefix: string @param redirect: URL to be returned as the 'location' header in the HTTP response. @type redirect: string @param max_file_size: The maximum file size in bytes which can be uploaded with the form. @type max_file_size: int @param max_file_count: The maximum number of files allowed to be uploaded with the form. @type max_file_count: int @param expires: The unix time relating to when the form expires and will no longer allow uploads to the container. @type expires: int @param key: The account's X-Tempurl-Key used in creating the signatre which authorizes the form to be POSTed. @type key: string @param signature: The HMAC-SHA1 signature of the form. @type signature: string @param x_delete_at: The unix time relating to when the object will be deleted from the container. @type x_delete_at: int @param x_delete_after: The amount of time, in seconds, after which the object will be deleted from the container. @type x_delete_after: int @return: Data to be POSTed in the following format: { 'target_url': '<url to POST to>', 'headers': '<headers to be added to the request>, 'body': '<body to be posted to the target url>' } @rtype: dictionary """ base_url, account_hash = self.client.storage_url.split('/v1/') path = '/v1/{0}/{1}'.format(account_hash, container) if object_prefix: path = '{0}/{1}'.format(path, object_prefix) if not expires: expires = int(time() + 600) url = ''.join([base_url, path]) hmac_body = '{0}\n{1}\n{2}\n{3}\n{4}'.format( path, redirect, max_file_size, max_file_count, expires) if not signature: signature = hmac.new(key, hmac_body, sha1).hexdigest() form = [] if redirect: form.append({ 'headers': {'Content-Disposition': 'form-data; name="redirect"'}, 'data': redirect}) form.append({ 'headers': {'Content-Disposition': 'form-data; name="max_file_size"'}, 'data': str(max_file_size)}) form.append({ 'headers': {'Content-Disposition': 'form-data; name="max_file_count"'}, 'data': str(max_file_count)}) form.append({ 'headers': {'Content-Disposition': 'form-data; name="expires"'}, 'data': str(expires)}) if x_delete_at: form.append({ 'headers': {'Content-Disposition': 'form-data; name="x_delete_at"'}, 'data': str(x_delete_at)}) if x_delete_after: form.append({ 'headers': {'Content-Disposition': 'form-data; name="x_delete_after"'}, 'data': str(x_delete_after)}) form.append({ 'headers': {'Content-Disposition': 'form-data; name="signature"'}, 'data': signature}) for data_file in files: form_name = data_file.get('name') form_filename = data_file.get('filename', form_name) form_content_type = data_file.get('content_type', 'text/plain') form_data = data_file.get('data', get_md5_hash(form_name)) form.append({ 'headers': {'Content-Disposition': 'form-data; name="{0}"; filename="{1}"'.format( form_name, form_filename), 'Content-Type': form_content_type}, 'data': form_data}) data = [] boundary = '----WebKitFormBoundary40Q4WaJHO84PBBIa' for section in form: data.append('--{0}\r\n'.format(boundary)) for key, value in section['headers'].iteritems(): data.append('{0}: {1}\r\n'.format(key, value)) data.append('\r\n') data.append(section['data']) data.append('\r\n') data.append('\r\n--{0}'.format(boundary)) post_headers = { 'Cache-Control': 'max-age=0', 'Accept': '*/*;q=0.8', 'Content-Type': 'multipart/form-data; boundary={0}'.format( boundary)} return {'target_url': url, 'headers': post_headers, 'body': ''.join(data)}
def ddtest_extract_archive_to_existing_container(self, archive_format): """ Scenario: upload a archive with the extract-archive query string parameter Precondition: Container exists Expected Results: archive is extracted to objects in an existing container @param archive_format: Type of archive file to upload. @type archive_format: string """ container_name = self.create_temp_container( descriptor=BASE_NAME) data = self.read_archive_data(self.archive_paths[archive_format]) headers = {'Accept': 'application/json'} response = self.client.create_archive_object( data, archive_format, upload_path=container_name, headers=headers) expected = HTTP_OK received = response.status_code self.assertEqual( expected, received, "extract tar archive expected successful status code: {0}" " received: {1}".format(expected, received)) # inspect the body of the response expected = self.num_archive_files received = int(response.entity.num_files_created) self.assertEqual( expected, received, msg="response body 'Number Files Created' expected: {0}" " received {1}".format(expected, received)) expected = '201 Created' received = response.entity.status self.assertEqual( expected, received, msg="response body 'Response Status' expected: {0}" " received {1}".format(expected, received)) expected = 0 received = len(response.entity.errors) self.assertEqual( expected, received, msg="response body 'Errors' expected None received {0}".format( response.entity.errors)) # check the actual number of objects and object names params = {'format': 'json'} response_objects = self.behaviors.list_objects( container_name, params=params, expected_objects=self.obj_names) expected = self.num_archive_files received = len(response_objects) self.assertEqual( expected, received, msg="container list expected: {0} extracted objects." " received: {1} extracted objects".format(expected, received)) # check that all the objects where extracted to the existing container resp_obj_names = [obj.name for obj in response_objects] self.assertEqual(sorted(self.obj_names), sorted(resp_obj_names)) # check that the content of the obj is correct # the content should be the md5hash of the objects name for obj_name in resp_obj_names: # the content of the obj should be the md5 sum of the obj name response = self.client.get_object(container_name, obj_name) expected = get_md5_hash(obj_name) received = response.content self.assertEqual( expected, received, msg="obj content expected: {0} received: {1}".format( expected, received))
def ddtest_extract_archive_without_existing_container( self, archive_format): """ Scenario: upload a archived file with the extract-archive query string parameter Precondition: Container does not exist Expected Results: archive with object names containing slashes are extracted to objects. names without slashes are ignored @param archive_format: Type of archive file to upload. @type archive_format: string """ expected_listings = {} for name in self.obj_names_with_slashes: container_name, object_name = name.split('/', 1) if container_name not in expected_listings: expected_listings[container_name] = [] expected_listings[container_name].append(object_name) expected_containers = list(expected_listings.iterkeys()) self.addCleanup( self.behaviors.force_delete_containers, list(expected_listings.iterkeys())) data = self.read_archive_data(self.archive_paths[archive_format]) headers = {'Accept': 'application/json'} response = self.client.create_archive_object( data, archive_format, headers=headers) expected = HTTP_OK received = response.status_code self.assertEqual( expected, received, "extract tar archive expected successful status code: {0}" " received: {1}".format(expected, received)) # inspect the body of the response expected = len(self.obj_names_with_slashes) received = int(response.entity.num_files_created) self.assertEqual( expected, received, msg="response body 'Number Files Created' expected: {0}" " received {1}".format(expected, received)) expected = '201 Created' received = response.entity.status self.assertEqual( expected, received, msg="response body 'Response Status' expected: {0}" " received {1}".format(expected, received)) expected = 0 received = len(response.entity.errors) self.assertEqual( expected, received, msg="response body 'Errors' expected None received {0}".format( response.entity.errors)) # check the actual number of objects and object names params = {'format': 'json', 'marker': BASE_NAME} response = self.behaviors.list_containers( params=params, expected_containers=expected_containers) resp_container_names = [] resp_container_names[:-1] = [container.name for container in response] # archive object names without slashes are ignored for name in self.obj_names_without_slashes: self.assertNotIn(name, resp_container_names) # names with slashes should create containers with objects in them for container_name in expected_containers: """ an archive named foo/bar will create a container named 'foo' with an object named 'bar' in it. """ expected_objects = expected_listings.get(container_name) # check to see if the expected container name is in the container # list response self.assertTrue(container_name in resp_container_names) # check to see if the expected number of objects and obj name are # in the obj list response params = {'format': 'json'} response_objects = self.behaviors.list_objects( container_name, params=params, expected_objects=expected_objects) resp_obj_names = [obj.name for obj in response_objects] expected = len(expected_objects) received = len(resp_obj_names) self.assertEqual( expected, received, msg="container list expected: {0} extracted objects." " received: {1} extracted objects".format(expected, received)) for object_name in expected_objects: self.assertIn(object_name, resp_obj_names) # the content of the obj should be the md5 sum of the obj name response = self.client.get_object(container_name, object_name) expected = get_md5_hash('{}/{}'.format( container_name, object_name)) received = response.content self.assertEqual( expected, received, msg="obj content expected: {0} received: {1}".format( expected, received))
def generate_bandwidth_from_server_to_client(self, public_ip_address, gb_file_size, server_filepath, client_filepath): """ @summary: Creates and transfers a file from server to client @param public_ip_address: The eth0 address of the instance @type public_ip_address: String @param gb_file_size: The size of the file to be generated in Gigabytes @type gb_file_size: Float @param server_filepath: The path name including file name on server @type server_filepath: String @param client_filepath: The path name including file name on client @type client_filepath: String @return: On successful bandwidth generation, return tx_bytes @rtype: int @todo: Use json bridge to poll global db for bw_usage_cache update instead of sleeping on ssh_timeout """ time.sleep(self.connection_timeout) # delete same filename locally if it existed in a prior run if os.path.exists(client_filepath): os.remove(client_filepath) # get the initial values from the network interface rx_bytes, tx_bytes = self.get_network_bytes_for_interface('eth0') if not self.create_large_file(server_filepath, gb_file_size): raise Exception("File was not created on server: {0}, {1}" .format(public_ip_address, server_filepath)) server_filelocation, server_filename = os.path.split(server_filepath) md5sum_server = self.get_md5sum_for_remote_file(server_filelocation, server_filename) if not md5sum_server: raise Exception("No md5sum from file on server: {0}, {1}" .format(public_ip_address, server_filepath)) if not self.ssh_client.retrieve_file_from(client_filepath, server_filepath): raise Exception("The file {0} was not downloaded from " "the server {1}, {2}".format(client_filepath, public_ip_address, server_filepath)) md5sum_client = get_md5_hash(data=client_filepath, block_size_multiplier=16) if md5sum_server != md5sum_client: raise Exception("The md5sums did not match: {0}, {1} != {2}, {3}" .format(md5sum_server, public_ip_address, md5sum_client, "localhost")) # clean up and delete the local file we just downloaded if os.path.exists(client_filepath): os.remove(client_filepath) # get the byte values after generating bandwidth and subtract rx_bytes_after, tx_bytes_after = ( self.get_network_bytes_for_interface('eth0')) tx_bytes = int(tx_bytes_after) - int(tx_bytes) time.sleep(self.connection_timeout) return tx_bytes
def ddtest_extract_archive_without_existing_container( self, archive_format, **kwargs): """ Scenario: upload a archived file with the extract-archive query string parameter Precondition: Container does not exist Expected Results: archive with object names containing slashes are extracted to objects. names without slashes are ignored @param archive_format: Type of archive file to upload. @type archive_format: string """ expected_listings = {} for name in self.obj_names_with_slashes: container_name, object_name = name.split('/', 1) if container_name not in expected_listings: expected_listings[container_name] = [] expected_listings[container_name].append(object_name) expected_containers = list(expected_listings.iterkeys()) self.addCleanup(self.behaviors.force_delete_containers, list(expected_listings.iterkeys())) data = self.read_archive_data(self.archive_paths[archive_format]) headers = {'Accept': 'application/json'} response = self.client.create_archive_object(data, archive_format, headers=headers) expected = HTTP_OK received = response.status_code self.assertEqual( expected, received, "extract tar archive expected successful status code: {0}" " received: {1}".format(expected, received)) # inspect the body of the response expected = len(self.obj_names_with_slashes) received = int(response.entity.num_files_created) self.assertEqual( expected, received, msg="response body 'Number Files Created' expected: {0}" " received {1}".format(expected, received)) expected = '201 Created' received = response.entity.status self.assertEqual(expected, received, msg="response body 'Response Status' expected: {0}" " received {1}".format(expected, received)) expected = 0 received = len(response.entity.errors) self.assertEqual( expected, received, msg="response body 'Errors' expected None received {0}".format( response.entity.errors)) # check the actual number of objects and object names params = {'format': 'json', 'marker': BASE_NAME} response = self.behaviors.list_containers( params=params, expected_containers=expected_containers) resp_container_names = [] resp_container_names[:-1] = [container.name for container in response] # archive object names without slashes are ignored for name in self.obj_names_without_slashes: self.assertNotIn(name, resp_container_names) # names with slashes should create containers with objects in them for container_name in expected_containers: """ an archive named foo/bar will create a container named 'foo' with an object named 'bar' in it. """ expected_objects = expected_listings.get(container_name) # check to see if the expected container name is in the container # list response self.assertTrue(container_name in resp_container_names) # check to see if the expected number of objects and obj name are # in the obj list response params = {'format': 'json'} response_objects = self.behaviors.list_objects( container_name, params=params, expected_objects=expected_objects) resp_obj_names = [obj.name for obj in response_objects] expected = len(expected_objects) received = len(resp_obj_names) self.assertEqual( expected, received, msg="container list expected: {0} extracted objects." " received: {1} extracted objects".format(expected, received)) for object_name in expected_objects: self.assertIn(object_name, resp_obj_names) # the content of the obj should be the md5 sum of the obj name response = self.client.get_object(container_name, object_name) expected = get_md5_hash('{}/{}'.format(container_name, object_name)) received = response.content self.assertEqual( expected, received, msg="obj content expected: {0} received: {1}".format( expected, received))
def ddtest_extract_archive_to_existing_container(self, archive_format, **kwargs): """ Scenario: upload a archive with the extract-archive query string parameter Precondition: Container exists Expected Results: archive is extracted to objects in an existing container @param archive_format: Type of archive file to upload. @type archive_format: string """ container_name = self.create_temp_container(descriptor=BASE_NAME) data = self.read_archive_data(self.archive_paths[archive_format]) headers = {'Accept': 'application/json'} response = self.client.create_archive_object( data, archive_format, upload_path=container_name, headers=headers) expected = HTTP_OK received = response.status_code self.assertEqual( expected, received, "extract tar archive expected successful status code: {0}" " received: {1}".format(expected, received)) # inspect the body of the response expected = self.num_archive_files received = int(response.entity.num_files_created) self.assertEqual( expected, received, msg="response body 'Number Files Created' expected: {0}" " received {1}".format(expected, received)) expected = '201 Created' received = response.entity.status self.assertEqual(expected, received, msg="response body 'Response Status' expected: {0}" " received {1}".format(expected, received)) expected = 0 received = len(response.entity.errors) self.assertEqual( expected, received, msg="response body 'Errors' expected None received {0}".format( response.entity.errors)) # check the actual number of objects and object names params = {'format': 'json'} response_objects = self.behaviors.list_objects( container_name, params=params, expected_objects=self.obj_names) expected = self.num_archive_files received = len(response_objects) self.assertEqual(expected, received, msg="container list expected: {0} extracted objects." " received: {1} extracted objects".format( expected, received)) # check that all the objects where extracted to the existing container resp_obj_names = [obj.name for obj in response_objects] self.assertEqual(sorted(self.obj_names), sorted(resp_obj_names)) # check that the content of the obj is correct # the content should be the md5hash of the objects name for obj_name in resp_obj_names: # the content of the obj should be the md5 sum of the obj name response = self.client.get_object(container_name, obj_name) expected = get_md5_hash(obj_name) received = response.content self.assertEqual( expected, received, msg="obj content expected: {0} received: {1}".format( expected, received))