def test_get_chunk_source_resume_404(self): h = self.handler with set_http_connect(404, 200, body='foobar'): source = h._get_chunk_source() self.assertEqual('foobar', source.read()) self.assertEqual(h.failed_chunks, [self.chunks[0]])
def test_put_stream_connect_exception(self): api = self.api name = utils.random_string() chunks = { 0: [ {"url": "http://1.2.3.4:6000/AAAA", "pos": "0", "size": 32}, {"url": "http://1.2.3.4:6000/BBBB", "pos": "0", "size": 32}, {"url": "http://1.2.3.4:6000/CCCC", "pos": "0", "size": 32} ] } src = empty_stream() sysmeta = {'content_length': 0, 'id': utils.random_string(), 'version': utils.random_string(), 'mime_type': utils.random_string(), 'chunk_method': utils.random_string(), 'policy': utils.random_string()} with set_http_connect(201, Exception(), Exception()): chunks, bytes_transferred, content_checksum = api._put_stream( self.account, self.container, name, src, sysmeta, chunks) self.assertEqual(len(chunks), 1) chunk = {"url": "http://1.2.3.4:6000/AAAA", "pos": "0", "size": 0, "hash": "d41d8cd98f00b204e9800998ecf8427e"} self.assertEqual(chunk, chunks[0])
def test_write_transfer(self): checksum = self.checksum() test_data = ('1234' * 1024)[:-10] size = len(test_data) meta_chunk = self.meta_chunk() nb = len(meta_chunk) resps = [201] * nb source = StringIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) with set_http_connect(*resps, cb_body=cb_body): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) final_checksum = self.checksum(test_data).hexdigest() self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(final_checksum, checksum) bodies = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(''.join(info['parts'])) # TODO check trailers? bodies.append(body) self.assertEqual(len(bodies), nb) for body in bodies: self.assertEqual(len(test_data), len(body)) self.assertEqual(self.checksum(body).hexdigest(), final_checksum)
def test_write_transfer(self): checksum = self.checksum() segment_size = self.storage_method.ec_segment_size test_data = ('1234' * segment_size)[:-10] size = len(test_data) test_data_checksum = self.checksum(test_data).hexdigest() nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb source = StringIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) # TODO test headers with set_http_connect(*resps, cb_body=cb_body): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(checksum, self.checksum(test_data).hexdigest()) fragments = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(''.join(info['parts'])) fragments.append(body) metachunk_size = int(trailers[chunk_headers['metachunk_size']]) metachunk_hash = trailers[chunk_headers['metachunk_hash']] self.assertEqual(metachunk_size, size) self.assertEqual(metachunk_hash, test_data_checksum) self.assertEqual(len(fragments), nb) fragment_size = self.storage_method.ec_fragment_size # retrieve segments frags = [] for frag in fragments: data = [frag[x:x + fragment_size] for x in range(0, len(frag), fragment_size)] frags.append(data) fragments = zip(*frags) final_data = '' for frag in fragments: self.assertEqual(len(frag), nb) frag = list(frag) final_data += self.storage_method.driver.decode(frag) self.assertEqual(len(test_data), len(final_data)) self.assertEqual( test_data_checksum, self.checksum(final_data).hexdigest())
def test_write_exception(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [500] * nb with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) self.assertRaises(exc.OioException, handler.stream, source, size)
def test_write_exception(self): checksum = self.checksum() source = empty_stream() meta_chunk = self.meta_chunk() size = CHUNK_SIZE resps = [500] * len(meta_chunk) with set_http_connect(*resps): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) self.assertRaises(exc.OioException, handler.stream, source, size)
def test_write_quorum_error(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE meta_chunk = self.meta_chunk() quorum_size = self.storage_method.quorum resps = [500] * quorum_size resps += [201] * (len(meta_chunk) - quorum_size) with set_http_connect(*resps): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) self.assertRaises(exc.OioException, handler.stream, source, size)
def test_write_quorum_error(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity quorum_size = self.storage_method.quorum resps = [500] * quorum_size resps += [201] * (nb - quorum_size) with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) # TODO use specialized Exception self.assertRaises(exc.OioException, handler.stream, source, size)
def test_write_simple(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(chunks), nb) self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_CHECKSUM)
def test_write_simple(self): checksum = self.checksum() source = empty_stream() meta_chunk = self.meta_chunk() size = CHUNK_SIZE resps = [201] * len(meta_chunk) with set_http_connect(*resps): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream( source, size) self.assertEqual(len(chunks), len(meta_chunk)) self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_CHECKSUM)
def test_write_timeout_source(self): class TestReader(object): def read(self, size): raise Timeout(1.0) checksum = self.checksum() source = TestReader() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) self.assertRaises(Timeout, handler.stream, source, size)
def test_write_exception_source(self): class TestReader(object): def read(self, size): raise Exception('failure') checksum = self.checksum() source = TestReader() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) # TODO specialize exception self.assertRaises(Exception, handler.stream, source, size)
def test_write_timeout_source(self): class TestReader(object): def read(self, size): raise Timeout(1.0) checksum = self.checksum() source = TestReader() size = CHUNK_SIZE meta_chunk = self.meta_chunk() nb = len(meta_chunk) resps = [201] * nb with set_http_connect(*resps): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) self.assertRaises(Timeout, handler.stream, source, size)
def test_read(self): segment_size = self.storage_method.ec_segment_size data = ('1234' * segment_size)[:-10] d = [data[x:x + segment_size] for x in range(0, len(data), segment_size)] fragmented_data = [] for c in d: fragments = self.storage_method.driver.encode(c) if not fragments: break fragmented_data.append(fragments) result = '' for fragment_data in fragmented_data: result += self.storage_method.driver.decode( fragment_data) self.assertEqual(len(data), len(result)) self.assertEqual(data, result) chunk_fragments = list(zip(*fragmented_data)) nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity self.assertEqual(len(chunk_fragments), nb) chunks_resps = [(200, ''.join(chunk_fragments[i])) for i in range(self.storage_method.ec_nb_data)] resps, body_iter = zip(*chunks_resps) meta_start = None meta_end = None headers = {} meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(data) with set_http_connect(*resps, body_iter=body_iter): handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() body = '' for part in stream: for body_chunk in part['iter']: body += body_chunk self.assertEqual(len(data), len(body)) self.assertEqual(data, body)
def test_write_exception_source(self): class TestReader(object): def read(self, size): raise Exception('failure') checksum = self.checksum() source = TestReader() size = CHUNK_SIZE meta_chunk = self.meta_chunk() nb = len(meta_chunk) resps = [201] * nb with set_http_connect(*resps): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) # TODO specialize exception self.assertRaises(Exception, handler.stream, source, size)
def test_object_store(self): api = self.api name = utils.random_string() raw_chunks = [ {"url": "http://1.2.3.4:6000/AAAA", "pos": "0", "size": 32}, {"url": "http://1.2.3.4:6000/BBBB", "pos": "1", "size": 32}, {"url": "http://1.2.3.4:6000/CCCC", "pos": "2", "size": 32} ] meta = {object_headers['id']: utils.random_string(), object_headers['policy']: self.policy, object_headers['mime_type']: "octet/stream", object_headers['chunk_method']: "bytes", object_headers['version']: utils.random_string()} api._content_prepare = Mock(return_value=(meta, raw_chunks)) api._content_create = Mock(return_value=({}, {})) with set_http_connect(201, 201, 201): api.object_create( self.account, self.container, obj_name=name, data="x", headers=self.headers)
def test_put_stream_connect_timeout(self): api = self.api name = utils.random_string() chunks = { 0: [ {"url": "http://1.2.3.4:6000/AAAA", "pos": "0", "size": 32} ] } src = empty_stream() sysmeta = {'content_length': 0, 'id': utils.random_string(), 'version': utils.random_string(), 'mime_type': utils.random_string(), 'chunk_method': utils.random_string(), 'policy': utils.random_string()} with set_http_connect(200, slow_connect=True): chunks, bytes_transferred, content_checksum = api._put_stream( self.account, self.container, name, src, sysmeta, chunks)
def test_write_partial_exception(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * (nb - 1) resps.append(Exception("failure")) with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(chunks), nb) for i in range(nb - 1): self.assertEqual(chunks[i].get('error'), None) self.assertEqual(chunks[nb - 1].get('error'), 'failure') self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_CHECKSUM)
def test_write_quorum_success(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity quorum_size = self.storage_method.quorum resps = [201] * quorum_size resps += [500] * (nb - quorum_size) with set_http_connect(*resps): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(chunks), nb) for i in range(quorum_size): self.assertEqual(chunks[i].get('error'), None) for i in xrange(quorum_size, nb): self.assertEqual(chunks[i].get('error'), 'HTTP 500') self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_CHECKSUM)
def test_put_stream_client_timeout(self): api = self.api name = utils.random_string() chunks = { 0: [ {"url": "http://1.2.3.4:6000/AAAA", "pos": "0", "size": 32} ] } src = fakes.FakeTimeoutStream(5) sysmeta = {'content_length': 0, 'id': utils.random_string(), 'version': utils.random_string(), 'mime_type': utils.random_string(), 'chunk_method': utils.random_string(), 'policy': utils.random_string()} with set_http_connect(200): self.assertRaises( exceptions.ClientReadTimeout, api._put_stream, self.account, self.container, name, src, sysmeta, chunks)
def test_write_timeout(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE meta_chunk = self.meta_chunk() resps = [201] * (len(meta_chunk) - 1) resps.append(Timeout(1.0)) with set_http_connect(*resps): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(chunks), len(meta_chunk)) for i in range(len(meta_chunk) - 1): self.assertEqual(chunks[i].get('error'), None) self.assertEqual( chunks[len(meta_chunk) - 1].get('error'), '1.0 second') self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_CHECKSUM)
def test_get_chunk_source(self): with set_http_connect(200, body='foobar'): source = self.handler._get_chunk_source() self.assertEqual('foobar', source.read())