def get_stream(self): range_infos = self._get_range_infos() chunk_iter = iter(self.chunks) # we use eventlet GreenPool to manage readers with utils.ContextPool(self.storage_method.ec_nb_data) as pool: pile = GreenPile(pool) # we use eventlet GreenPile to spawn readers for _j in range(self.storage_method.ec_nb_data): pile.spawn(self._get_fragment, chunk_iter, self.storage_method) readers = [] for reader, parts_iter in pile: if reader.status in (200, 206): readers.append((reader, parts_iter)) # TODO log failures? # with EC we need at least ec_nb_data valid readers if len(readers) >= self.storage_method.ec_nb_data: # all readers should return the same Content-Length # so just take the headers from one of them resp_headers = HeadersDict(readers[0][0].headers) fragment_length = int(resp_headers.get('Content-Length')) r = [it for reader, it in readers] stream = ECStream(self.storage_method, r, range_infos, self.meta_length, fragment_length) # start the stream stream.start() return stream else: raise exc.OioException("Not enough valid sources to read")
def get_stream(self): range_infos = self._get_range_infos() chunk_iter = iter(self.chunks) # we use eventlet GreenPool to manage readers with green.ContextPool(self.storage_method.ec_nb_data) as pool: pile = GreenPile(pool) # we use eventlet GreenPile to spawn readers for _j in range(self.storage_method.ec_nb_data): pile.spawn(self._get_fragment, chunk_iter, range_infos, self.storage_method) readers = [] for reader, parts_iter in pile: if reader.status in (200, 206): readers.append((reader, parts_iter)) # TODO log failures? # with EC we need at least ec_nb_data valid readers if len(readers) >= self.storage_method.ec_nb_data: # all readers should return the same Content-Length # so just take the headers from one of them resp_headers = HeadersDict(readers[0][0].headers) fragment_length = int(resp_headers.get('Content-Length')) read_iterators = [it for _, it in readers] stream = ECStream(self.storage_method, read_iterators, range_infos, self.meta_length, fragment_length) # start the stream stream.start() return stream else: raise exceptions.ServiceUnavailable( 'Not enough valid sources to read (%d/%d)' % ( len(readers), self.storage_method.ec_nb_data))
class FakeResponse(object): def __init__(self, status, body='', headers=None, slow=0): self.status = status self.body = body self.headers = HeadersDict(headers) self.stream = BytesIO(body) self.slow = slow def getheader(self, name, default=None): return self.headers.get(name, default) def getheaders(self): if 'Content-Length' not in self.headers: self.headers['Content-Length'] = len(self.body) return self.headers.items() def _slow(self): sleep(self.slow) def read(self, amt=0): if self.slow: self._slow() return self.stream.read(amt) def __repr__(self): return 'FakeResponse(status=%s)' % self.status def reason(self): return str(self.status)
def get_stream(self): range_infos = self._get_range_infos() # the meta chunk length # (the amount of actual data stored into the meta chunk) meta_length = self.chunks[0]['size'] chunk_iter = iter(self.chunks) # we use eventlet GreenPool to manage readers with utils.ContextPool(self.storage_method.ec_nb_data) as pool: pile = GreenPile(pool) # we use eventlet GreenPile to spawn readers for _j in range(self.storage_method.ec_nb_data): pile.spawn(self._get_fragment, chunk_iter, self.storage_method) readers = [] for reader, parts_iter in pile: if reader.status in (200, 206): readers.append((reader, parts_iter)) # TODO log failures? # with EC we need at least ec_nb_data valid readers if len(readers) >= self.storage_method.ec_nb_data: # all readers should return the same Content-Length # so just take the headers from one of them resp_headers = HeadersDict(readers[0][0].headers) fragment_length = int(resp_headers.get('Content-Length')) r = [it for reader, it in readers] stream = ECStream(self.storage_method, r, range_infos, meta_length, fragment_length) # start the stream stream.start() return stream else: raise exc.OioException("Not enough valid sources to read")
class FakeResponse(object): def __init__(self, status, body='', headers=None, slow=0): self.status = status self.body = body self.headers = HeadersDict(headers) self.stream = BytesIO(body) self.slow = slow def getheader(self, name, default=None): return self.headers.get(name, default) def getheaders(self): if 'Content-Length' not in self.headers: self.headers['Content-Length'] = len(self.body) return self.headers.items() def _slow(self): sleep(self.slow) def read(self, amt=0): if self.slow: self._slow() return self.stream.read(amt) def __repr__(self): return 'FakeResponse(status=%s)' % self.status
def _next(self): fragment_iterators = [] for iterator in self.readers: part_info = next(iterator) fragment_iterators.append(part_info['iter']) headers = HeadersDict(part_info['headers']) return headers, fragment_iterators
def getheader(self, name, default=None): return HeadersDict(self.getheaders()).get(name, default)
def getheaders(self): headers = HeadersDict({ 'content-length': len(self.body), }) headers.update(self.headers) return headers.items()
def __init__(self, status, body='', headers=None, slow=0): self.status = status self.body = body self.headers = HeadersDict(headers) self.stream = BytesIO(body) self.slow = slow
def test_read_old_chunk(self): metachunk_hash = md5().hexdigest() trailers = {'x-oio-chunk-meta-metachunk-size': '1', 'x-oio-chunk-meta-metachunk-hash': metachunk_hash} chunkid = random_chunk_id() chunkdata = random_buffer(string.printable, 1).encode('utf-8') chunkurl = self._rawx_url(chunkid) chunkpath = self._chunk_path(chunkid) headers = self._chunk_attr(chunkid, chunkdata) self._check_not_present(chunkurl) resp, _ = self._http_request(chunkurl, 'PUT', chunkdata, headers, trailers) self.assertEqual(201, resp.status) resp1, data1 = self._http_request(chunkurl, 'GET', '', {}) self.assertEqual(200, resp1.status) headers1 = HeadersDict(resp1.getheaders()) with open(chunkpath, 'r') as fd: meta1, _ = read_chunk_metadata(fd, chunkid) convert_to_old_chunk( chunkpath, self.account, self.container, self.content_path, self.content_version, self.content_id) resp2, data2 = self._http_request(chunkurl, 'GET', '', {}) self.assertEqual(200, resp2.status) headers2 = HeadersDict(resp2.getheaders()) with open(chunkpath, 'r') as fd: meta2, _ = read_chunk_metadata(fd, chunkid) self.assertEqual(data1, data2) del headers1[CHUNK_HEADERS['full_path']] del headers1[CHUNK_HEADERS['oio_version']] del headers2[CHUNK_HEADERS['oio_version']] del headers1["date"] del headers2["date"] self.assertDictEqual(headers1, headers2) del meta1['full_path'] del meta1['oio_version'] del meta2['oio_version'] self.assertDictEqual(meta1, meta2) # Copy old chunk copyid = random_chunk_id() copyid = chunkid[:-60] + copyid[-60:] copyurl = self._rawx_url(copyid) copypath = self._chunk_path(copyid) copycontentid = random_id(32) copyheaders = {} copyheaders["Destination"] = copyurl copyheaders['x-oio-chunk-meta-full-path'] = encode_fullpath( "account-snapshot", "container-snapshot", self.content_path+"-snapshot", 1456938361143741, copycontentid) resp, _ = self._http_request(chunkurl, 'COPY', '', copyheaders) self.assertEqual(201, resp.status) resp2, data2 = self._http_request(chunkurl, 'GET', '', {}) self.assertEqual(200, resp2.status) headers2 = HeadersDict(resp2.getheaders()) with open(chunkpath, 'r') as fd: meta2, _ = read_chunk_metadata(fd, chunkid) self.assertEqual(1, len(meta2['links'])) self.assertEqual(copyheaders['x-oio-chunk-meta-full-path'], meta2['links'][copyid]) meta2['links'] = dict() self.assertEqual(data1, data2) del headers2[CHUNK_HEADERS['oio_version']] del headers2["date"] self.assertDictEqual(headers1, headers2) del meta2['oio_version'] self.assertDictEqual(meta1, meta2) resp3, data3 = self._http_request(copyurl, 'GET', '', {}) self.assertEqual(200, resp3.status) headers3 = HeadersDict(resp3.getheaders()) with open(copypath, 'r') as fd: meta3, _ = read_chunk_metadata(fd, copyid) self.assertEqual( copyheaders['x-oio-chunk-meta-full-path'], headers3['x-oio-chunk-meta-full-path']) del headers3['x-oio-chunk-meta-full-path'] self.assertEqual( cid_from_name("account-snapshot", "container-snapshot"), headers3['x-oio-chunk-meta-container-id']) del headers1['x-oio-chunk-meta-container-id'] del headers3['x-oio-chunk-meta-container-id'] self.assertEqual( self.content_path+"-snapshot", unquote(headers3['x-oio-chunk-meta-content-path'])) del headers1['x-oio-chunk-meta-content-path'] del headers3['x-oio-chunk-meta-content-path'] self.assertEqual( '1456938361143741', headers3['x-oio-chunk-meta-content-version']) del headers1['x-oio-chunk-meta-content-version'] del headers3['x-oio-chunk-meta-content-version'] self.assertEqual( copycontentid, headers3['x-oio-chunk-meta-content-id']) del headers1['x-oio-chunk-meta-content-id'] del headers3['x-oio-chunk-meta-content-id'] self.assertEqual(copyid, headers3['x-oio-chunk-meta-chunk-id']) del headers1['x-oio-chunk-meta-chunk-id'] del headers3['x-oio-chunk-meta-chunk-id'] self.assertEqual( copyheaders['x-oio-chunk-meta-full-path'], meta3['full_path']) del meta3['full_path'] self.assertEqual( cid_from_name("account-snapshot", "container-snapshot"), meta3['container_id']) del meta1['container_id'] del meta3['container_id'] self.assertEqual(self.content_path+"-snapshot", meta3['content_path']) del meta1['content_path'] del meta3['content_path'] self.assertEqual('1456938361143741', meta3['content_version']) del meta1['content_version'] del meta3['content_version'] self.assertEqual(copycontentid, meta3['content_id']) del meta1['content_id'] del meta3['content_id'] self.assertEqual(copyid, meta3['chunk_id']) del meta1['chunk_id'] del meta3['chunk_id'] # FIXME the old chunk is invisible self.assertEqual(0, len(meta3['links'])) self.assertEqual(data1, data3) del headers3[CHUNK_HEADERS['oio_version']] del headers3["date"] self.assertDictEqual(headers1, headers3) del meta3['oio_version'] self.assertDictEqual(meta1, meta3)