Пример #1
0
    def timestamp(self):
        """
        S3Timestamp from Date header. If X-Amz-Date header specified, it
        will be prior to Date header.

        :return : S3Timestamp instance
        """
        if not self._timestamp:
            try:
                if self._is_query_auth and 'Timestamp' in self.params:
                    # If Timestamp specified in query, it should be prior
                    # to any Date header (is this right?)
                    timestamp = mktime(
                        self.params['Timestamp'], SIGV2_TIMESTAMP_FORMAT)
                else:
                    timestamp = mktime(
                        self.headers.get('X-Amz-Date',
                                         self.headers.get('Date')))
            except ValueError:
                raise AccessDenied('AWS authentication requires a valid Date '
                                   'or x-amz-date header')

            if timestamp < 0:
                raise AccessDenied('AWS authentication requires a valid Date '
                                   'or x-amz-date header')
            try:
                self._timestamp = S3Timestamp(timestamp)
            except ValueError:
                # Must be far-future; blame clock skew
                raise RequestTimeTooSkewed()

        return self._timestamp
Пример #2
0
    def _test_request_timestamp_sigv4(self, date_header):
        # signature v4 here
        environ = {
            'REQUEST_METHOD': 'GET'}

        if 'X-Amz-Date' in date_header:
            included_header = 'x-amz-date'
        elif 'Date' in date_header:
            included_header = 'date'
        else:
            self.fail('Invalid date header specified as test')

        headers = {
            'Authorization':
                'AWS4-HMAC-SHA256 '
                'Credential=test/20130524/US/s3/aws4_request, '
                'SignedHeaders=host;%s,'
                'Signature=X' % included_header,
            'X-Amz-Content-SHA256': '0123456789'}

        headers.update(date_header)
        req = Request.blank('/', environ=environ, headers=headers)
        sigv4_req = SigV4Request(req.environ)

        if 'X-Amz-Date' in date_header:
            timestamp = mktime(
                date_header['X-Amz-Date'], SIGV4_X_AMZ_DATE_FORMAT)
        elif 'Date' in date_header:
            timestamp = mktime(date_header['Date'])

        self.assertEqual(timestamp, int(sigv4_req.timestamp))
Пример #3
0
    def timestamp(self):
        """
        Return timestamp string according to the auth type
        The difference from v2 is v4 have to see 'X-Amz-Date' even though
        it's query auth type.
        """
        if not self._timestamp:
            try:
                if self._is_query_auth and 'X-Amz-Date' in self.params:
                    # NOTE(andrey-mp): Date in Signature V4 has different
                    # format
                    timestamp = mktime(
                        self.params['X-Amz-Date'], SIGV4_X_AMZ_DATE_FORMAT)
                else:
                    if self.headers.get('X-Amz-Date'):
                        timestamp = mktime(
                            self.headers.get('X-Amz-Date'),
                            SIGV4_X_AMZ_DATE_FORMAT)
                    else:
                        timestamp = mktime(self.headers.get('Date'))
            except (ValueError, TypeError):
                raise AccessDenied('AWS authentication requires a valid Date '
                                   'or x-amz-date header')

            if timestamp < 0:
                raise AccessDenied('AWS authentication requires a valid Date '
                                   'or x-amz-date header')

            try:
                self._timestamp = S3Timestamp(timestamp)
            except ValueError:
                # Must be far-future; blame clock skew
                raise RequestTimeTooSkewed()

        return self._timestamp
Пример #4
0
    def _test_request_timestamp_sigv2(self, date_header):
        # signature v4 here
        environ = {
            'REQUEST_METHOD': 'GET'}

        headers = {'Authorization': 'AWS test:tester:hmac'}
        headers.update(date_header)
        req = Request.blank('/', environ=environ, headers=headers)
        sigv2_req = S3_Request(req.environ)

        if 'X-Amz-Date' in date_header:
            timestamp = mktime(req.headers.get('X-Amz-Date'))
        elif 'Date' in date_header:
            timestamp = mktime(req.headers.get('Date'))
        else:
            self.fail('Invalid date header specified as test')
        self.assertEqual(timestamp, int(sigv2_req.timestamp))
Пример #5
0
    def test_mktime(self):
        date_headers = [
            'Thu, 01 Jan 1970 00:00:00 -0000',
            'Thu, 01 Jan 1970 00:00:00 GMT',
            'Thu, 01 Jan 1970 00:00:00 UTC',
            'Thu, 01 Jan 1970 08:00:00 +0800',
            'Wed, 31 Dec 1969 16:00:00 -0800',
            'Wed, 31 Dec 1969 16:00:00 PST',
        ]
        for header in date_headers:
            ts = utils.mktime(header)
            self.assertEqual(0, ts, 'Got %r for header %s' % (ts, header))

        # Last-Modified response style
        self.assertEqual(0, utils.mktime('1970-01-01T00:00:00'))

        # X-Amz-Date style
        self.assertEqual(0, utils.mktime('19700101T000000Z',
                                         request.SIGV4_X_AMZ_DATE_FORMAT))
Пример #6
0
    def test_object_PUT_copy_self_metadata_replace(self):
        date_header = self.get_date_header()
        timestamp = mktime(date_header)
        last_modified = S3Timestamp(timestamp).s3xmlformat
        header = {'x-amz-metadata-directive': 'REPLACE',
                  'Date': date_header}
        status, headers, body = self._test_object_PUT_copy_self(
            swob.HTTPOk, header, timestamp=timestamp)
        self.assertEqual(status.split()[0], '200')
        self.assertEqual(headers['Content-Type'], 'application/xml')
        self.assertTrue(headers.get('etag') is None)
        elem = fromstring(body, 'CopyObjectResult')
        self.assertEqual(elem.find('LastModified').text, last_modified)
        self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)

        _, _, headers = self.swift.calls_with_headers[-1]
        self.assertEqual(headers['X-Copy-From'], '/bucket/object')
        self.assertEqual(headers['Content-Length'], '0')
Пример #7
0
        def do_test(src_path=None):
            date_header = self.get_date_header()
            timestamp = mktime(date_header)
            last_modified = S3Timestamp(timestamp).s3xmlformat
            status, headers, body = self._test_object_PUT_copy(
                swob.HTTPOk, put_header={'Date': date_header},
                timestamp=timestamp, src_path=src_path)
            self.assertEqual(status.split()[0], '200')
            self.assertEqual(headers['Content-Type'], 'application/xml')

            self.assertTrue(headers.get('etag') is None)
            self.assertTrue(headers.get('x-amz-meta-something') is None)
            elem = fromstring(body, 'CopyObjectResult')
            self.assertEqual(elem.find('LastModified').text, last_modified)
            self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)

            _, _, headers = self.swift.calls_with_headers[-1]
            self.assertEqual(headers['X-Copy-From'], '/some/source')
            self.assertEqual(headers['Content-Length'], '0')
Пример #8
0
    def test_object_PUT_copy_no_slash(self):
        date_header = self.get_date_header()
        timestamp = mktime(date_header)
        last_modified = S3Timestamp(timestamp).s3xmlformat
        # Some clients (like Boto) don't include the leading slash;
        # AWS seems to tolerate this so we should, too
        status, headers, body = self._test_object_PUT_copy(
            swob.HTTPOk, src_path='some/source',
            put_header={'Date': date_header}, timestamp=timestamp)
        self.assertEqual(status.split()[0], '200')
        self.assertEqual(headers['Content-Type'], 'application/xml')
        self.assertTrue(headers.get('etag') is None)
        self.assertTrue(headers.get('x-amz-meta-something') is None)
        elem = fromstring(body, 'CopyObjectResult')
        self.assertEqual(elem.find('LastModified').text, last_modified)
        self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)

        _, _, headers = self.swift.calls_with_headers[-1]
        self.assertEqual(headers['X-Copy-From'], '/some/source')
        self.assertEqual(headers['Content-Length'], '0')
Пример #9
0
    def test_object_multi_upload(self):
        bucket = 'bucket'
        keys = ['obj1', 'obj2']
        uploads = []

        results_generator = self._initiate_multi_uploads_result_generator(
            bucket, keys)

        # Initiate Multipart Upload
        for expected_key, (status, headers, body) in \
                izip(keys, results_generator):
            self.assertEqual(status, 200)
            self.assertCommonResponseHeaders(headers)
            self.assertTrue('content-type' in headers)
            self.assertEqual(headers['content-type'], 'application/xml')
            self.assertTrue('content-length' in headers)
            self.assertEqual(headers['content-length'], str(len(body)))
            elem = fromstring(body, 'InitiateMultipartUploadResult')
            self.assertEqual(elem.find('Bucket').text, bucket)
            key = elem.find('Key').text
            self.assertEqual(expected_key, key)
            upload_id = elem.find('UploadId').text
            self.assertTrue(upload_id is not None)
            self.assertTrue((key, upload_id) not in uploads)
            uploads.append((key, upload_id))

        self.assertEqual(len(uploads), len(keys))  # sanity

        # List Multipart Uploads
        query = 'uploads'
        status, headers, body = \
            self.conn.make_request('GET', bucket, query=query)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        elem = fromstring(body, 'ListMultipartUploadsResult')
        self.assertEqual(elem.find('Bucket').text, bucket)
        self.assertEqual(elem.find('KeyMarker').text, None)
        self.assertEqual(elem.find('NextKeyMarker').text, uploads[-1][0])
        self.assertEqual(elem.find('UploadIdMarker').text, None)
        self.assertEqual(elem.find('NextUploadIdMarker').text, uploads[-1][1])
        self.assertEqual(elem.find('MaxUploads').text, '1000')
        self.assertTrue(elem.find('EncodingType') is None)
        self.assertEqual(elem.find('IsTruncated').text, 'false')
        self.assertEqual(len(elem.findall('Upload')), 2)
        for (expected_key, expected_upload_id), u in \
                izip(uploads, elem.findall('Upload')):
            key = u.find('Key').text
            upload_id = u.find('UploadId').text
            self.assertEqual(expected_key, key)
            self.assertEqual(expected_upload_id, upload_id)
            self.assertEqual(u.find('Initiator/ID').text,
                             self.conn.user_id)
            self.assertEqual(u.find('Initiator/DisplayName').text,
                             self.conn.user_id)
            self.assertEqual(u.find('Owner/ID').text, self.conn.user_id)
            self.assertEqual(u.find('Owner/DisplayName').text,
                             self.conn.user_id)
            self.assertEqual(u.find('StorageClass').text, 'STANDARD')
            self.assertTrue(u.find('Initiated').text is not None)

        # Upload Part
        key, upload_id = uploads[0]
        content = 'a' * MIN_SEGMENT_SIZE
        etag = md5(content).hexdigest()
        status, headers, body = \
            self._upload_part(bucket, key, upload_id, content)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers, etag)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], '0')
        expected_parts_list = [(headers['etag'], mktime(headers['date']))]

        # Upload Part Copy
        key, upload_id = uploads[1]
        src_bucket = 'bucket2'
        src_obj = 'obj3'
        src_content = 'b' * MIN_SEGMENT_SIZE
        etag = md5(src_content).hexdigest()

        # prepare src obj
        self.conn.make_request('PUT', src_bucket)
        self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
        _, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
        self.assertCommonResponseHeaders(headers)

        status, headers, body, resp_etag = \
            self._upload_part_copy(src_bucket, src_obj, bucket,
                                   key, upload_id)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        self.assertTrue('etag' not in headers)
        elem = fromstring(body, 'CopyPartResult')

        last_modified = elem.find('LastModified').text
        self.assertTrue(last_modified is not None)

        self.assertEqual(resp_etag, etag)

        # Check last-modified timestamp
        key, upload_id = uploads[1]
        query = 'uploadId=%s' % upload_id
        status, headers, body = \
            self.conn.make_request('GET', bucket, key, query=query)

        elem = fromstring(body, 'ListPartsResult')

        # FIXME: COPY result drops milli/microseconds but GET doesn't
        last_modified_gets = [p.find('LastModified').text
                              for p in elem.iterfind('Part')]
        self.assertEqual(
            last_modified_gets[0].rsplit('.', 1)[0],
            last_modified.rsplit('.', 1)[0],
            '%r != %r' % (last_modified_gets[0], last_modified))
        # There should be *exactly* two parts in the result
        self.assertEqual(1, len(last_modified_gets))

        # List Parts
        key, upload_id = uploads[0]
        query = 'uploadId=%s' % upload_id
        status, headers, body = \
            self.conn.make_request('GET', bucket, key, query=query)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        elem = fromstring(body, 'ListPartsResult')
        self.assertEqual(elem.find('Bucket').text, bucket)
        self.assertEqual(elem.find('Key').text, key)
        self.assertEqual(elem.find('UploadId').text, upload_id)
        self.assertEqual(elem.find('Initiator/ID').text, self.conn.user_id)
        self.assertEqual(elem.find('Initiator/DisplayName').text,
                         self.conn.user_id)
        self.assertEqual(elem.find('Owner/ID').text, self.conn.user_id)
        self.assertEqual(elem.find('Owner/DisplayName').text,
                         self.conn.user_id)
        self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
        self.assertEqual(elem.find('PartNumberMarker').text, '0')
        self.assertEqual(elem.find('NextPartNumberMarker').text, '1')
        self.assertEqual(elem.find('MaxParts').text, '1000')
        self.assertEqual(elem.find('IsTruncated').text, 'false')
        self.assertEqual(len(elem.findall('Part')), 1)

        # etags will be used to generate xml for Complete Multipart Upload
        etags = []
        for (expected_etag, expected_date), p in \
                izip(expected_parts_list, elem.findall('Part')):
            last_modified = p.find('LastModified').text
            self.assertTrue(last_modified is not None)
            # TODO: sanity check
            #       (kota_) How do we check the sanity?
            #       the last-modified header drops milli-seconds info
            #       by the constraint of the format.
            #       For now, we can do either the format check or round check
            # last_modified_from_xml = mktime(last_modified)
            # self.assertEqual(expected_date,
            #                   last_modified_from_xml)
            self.assertEqual(expected_etag, p.find('ETag').text)
            self.assertEqual(MIN_SEGMENT_SIZE, int(p.find('Size').text))
            etags.append(p.find('ETag').text)

        # Abort Multipart Upload
        key, upload_id = uploads[1]
        query = 'uploadId=%s' % upload_id
        status, headers, body = \
            self.conn.make_request('DELETE', bucket, key, query=query)
        self.assertEqual(status, 204)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], '0')

        # Complete Multipart Upload
        key, upload_id = uploads[0]
        xml = self._gen_comp_xml(etags)
        status, headers, body = \
            self._complete_multi_upload(bucket, key, upload_id, xml)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        elem = fromstring(body, 'CompleteMultipartUploadResult')
        self.assertEqual('http://localhost:8080/bucket/obj1',
                         elem.find('Location').text)
        self.assertEqual(elem.find('Bucket').text, bucket)
        self.assertEqual(elem.find('Key').text, key)
        # TODO: confirm completed etag value
        self.assertTrue(elem.find('ETag').text is not None)
Пример #10
0
    def test_object_multi_upload(self):
        bucket = 'bucket'
        keys = ['obj1', 'obj2']
        uploads = []

        results_generator = self._initiate_multi_uploads_result_generator(
            bucket, keys)

        # Initiate Multipart Upload
        for expected_key, (status, headers, body) in \
                izip(keys, results_generator):
            self.assertEqual(status, 200)
            self.assertCommonResponseHeaders(headers)
            self.assertTrue('content-type' in headers)
            self.assertEqual(headers['content-type'], 'application/xml')
            self.assertTrue('content-length' in headers)
            self.assertEqual(headers['content-length'], str(len(body)))
            elem = fromstring(body, 'InitiateMultipartUploadResult')
            self.assertEqual(elem.find('Bucket').text, bucket)
            key = elem.find('Key').text
            self.assertEqual(expected_key, key)
            upload_id = elem.find('UploadId').text
            self.assertTrue(upload_id is not None)
            self.assertTrue((key, upload_id) not in uploads)
            uploads.append((key, upload_id))

        self.assertEqual(len(uploads), len(keys))  # sanity

        # List Multipart Uploads
        query = 'uploads'
        status, headers, body = \
            self.conn.make_request('GET', bucket, query=query)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        elem = fromstring(body, 'ListMultipartUploadsResult')
        self.assertEqual(elem.find('Bucket').text, bucket)
        self.assertIsNone(elem.find('KeyMarker').text)
        self.assertEqual(elem.find('NextKeyMarker').text, uploads[-1][0])
        self.assertIsNone(elem.find('UploadIdMarker').text)
        self.assertEqual(elem.find('NextUploadIdMarker').text, uploads[-1][1])
        self.assertEqual(elem.find('MaxUploads').text, '1000')
        self.assertTrue(elem.find('EncodingType') is None)
        self.assertEqual(elem.find('IsTruncated').text, 'false')
        self.assertEqual(len(elem.findall('Upload')), 2)
        for (expected_key, expected_upload_id), u in \
                izip(uploads, elem.findall('Upload')):
            key = u.find('Key').text
            upload_id = u.find('UploadId').text
            self.assertEqual(expected_key, key)
            self.assertEqual(expected_upload_id, upload_id)
            self.assertEqual(u.find('Initiator/ID').text, self.conn.user_id)
            self.assertEqual(
                u.find('Initiator/DisplayName').text, self.conn.user_id)
            self.assertEqual(u.find('Owner/ID').text, self.conn.user_id)
            self.assertEqual(
                u.find('Owner/DisplayName').text, self.conn.user_id)
            self.assertEqual(u.find('StorageClass').text, 'STANDARD')
            self.assertTrue(u.find('Initiated').text is not None)

        # Upload Part
        key, upload_id = uploads[0]
        content = 'a' * MIN_SEGMENT_SIZE
        etag = md5(content).hexdigest()
        status, headers, body = \
            self._upload_part(bucket, key, upload_id, content)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers, etag)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], '0')
        expected_parts_list = [(headers['etag'], mktime(headers['date']))]

        # Upload Part Copy
        key, upload_id = uploads[1]
        src_bucket = 'bucket2'
        src_obj = 'obj3'
        src_content = 'b' * MIN_SEGMENT_SIZE
        etag = md5(src_content).hexdigest()

        # prepare src obj
        self.conn.make_request('PUT', src_bucket)
        self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
        _, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
        self.assertCommonResponseHeaders(headers)

        status, headers, body, resp_etag = \
            self._upload_part_copy(src_bucket, src_obj, bucket,
                                   key, upload_id)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        self.assertTrue('etag' not in headers)
        elem = fromstring(body, 'CopyPartResult')

        last_modified = elem.find('LastModified').text
        self.assertTrue(last_modified is not None)

        self.assertEqual(resp_etag, etag)

        # Check last-modified timestamp
        key, upload_id = uploads[1]
        query = 'uploadId=%s' % upload_id
        status, headers, body = \
            self.conn.make_request('GET', bucket, key, query=query)

        elem = fromstring(body, 'ListPartsResult')

        # FIXME: COPY result drops milli/microseconds but GET doesn't
        last_modified_gets = [
            p.find('LastModified').text for p in elem.iterfind('Part')
        ]
        self.assertEqual(last_modified_gets[0].rsplit('.', 1)[0],
                         last_modified.rsplit('.', 1)[0],
                         '%r != %r' % (last_modified_gets[0], last_modified))
        # There should be *exactly* two parts in the result
        self.assertEqual(1, len(last_modified_gets))

        # List Parts
        key, upload_id = uploads[0]
        query = 'uploadId=%s' % upload_id
        status, headers, body = \
            self.conn.make_request('GET', bucket, key, query=query)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        elem = fromstring(body, 'ListPartsResult')
        self.assertEqual(elem.find('Bucket').text, bucket)
        self.assertEqual(elem.find('Key').text, key)
        self.assertEqual(elem.find('UploadId').text, upload_id)
        self.assertEqual(elem.find('Initiator/ID').text, self.conn.user_id)
        self.assertEqual(
            elem.find('Initiator/DisplayName').text, self.conn.user_id)
        self.assertEqual(elem.find('Owner/ID').text, self.conn.user_id)
        self.assertEqual(
            elem.find('Owner/DisplayName').text, self.conn.user_id)
        self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
        self.assertEqual(elem.find('PartNumberMarker').text, '0')
        self.assertEqual(elem.find('NextPartNumberMarker').text, '1')
        self.assertEqual(elem.find('MaxParts').text, '1000')
        self.assertEqual(elem.find('IsTruncated').text, 'false')
        self.assertEqual(len(elem.findall('Part')), 1)

        # etags will be used to generate xml for Complete Multipart Upload
        etags = []
        for (expected_etag, expected_date), p in \
                izip(expected_parts_list, elem.findall('Part')):
            last_modified = p.find('LastModified').text
            self.assertTrue(last_modified is not None)
            # TODO: sanity check
            #       (kota_) How do we check the sanity?
            #       the last-modified header drops milli-seconds info
            #       by the constraint of the format.
            #       For now, we can do either the format check or round check
            # last_modified_from_xml = mktime(last_modified)
            # self.assertEqual(expected_date,
            #                   last_modified_from_xml)
            self.assertEqual(expected_etag, p.find('ETag').text)
            self.assertEqual(MIN_SEGMENT_SIZE, int(p.find('Size').text))
            etags.append(p.find('ETag').text)

        # Abort Multipart Upload
        key, upload_id = uploads[1]
        query = 'uploadId=%s' % upload_id
        status, headers, body = \
            self.conn.make_request('DELETE', bucket, key, query=query)
        self.assertEqual(status, 204)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], '0')

        # Complete Multipart Upload
        key, upload_id = uploads[0]
        xml = self._gen_comp_xml(etags)
        status, headers, body = \
            self._complete_multi_upload(bucket, key, upload_id, xml)
        self.assertEqual(status, 200)
        self.assertCommonResponseHeaders(headers)
        self.assertTrue('content-type' in headers)
        self.assertEqual(headers['content-type'], 'application/xml')
        self.assertTrue('content-length' in headers)
        self.assertEqual(headers['content-length'], str(len(body)))
        elem = fromstring(body, 'CompleteMultipartUploadResult')
        self.assertEqual('http://localhost:8080/bucket/obj1',
                         elem.find('Location').text)
        self.assertEqual(elem.find('Bucket').text, bucket)
        self.assertEqual(elem.find('Key').text, key)
        # TODO: confirm completed etag value
        self.assertTrue(elem.find('ETag').text is not None)