class Content(object): def __init__(self, conf, container_id, metadata, chunks, storage_method): self.conf = conf self.container_id = container_id self.metadata = metadata self.chunks = ChunksHelper(chunks) self.storage_method = storage_method self.logger = get_logger(self.conf) self.cs_client = ConscienceClient(conf) self.blob_client = BlobClient() self.container_client = ContainerClient(self.conf) self.content_id = self.metadata["id"] self.stgpol = self.metadata["policy"] self.path = self.metadata["name"] self.length = int(self.metadata["length"]) self.version = self.metadata["version"] self.checksum = self.metadata["hash"] self.mime_type = self.metadata["mime_type"] self.chunk_method = self.metadata["chunk_method"] def _get_spare_chunk(self, chunks_notin, chunks_broken): spare_data = { "notin": ChunksHelper(chunks_notin, False).raw(), "broken": ChunksHelper(chunks_broken, False).raw() } try: spare_resp = self.container_client.content_spare( cid=self.container_id, content=self.content_id, data=spare_data, stgpol=self.stgpol) except ClientException as e: raise exc.SpareChunkException("No spare chunk (%s)" % e.message) url_list = [] for c in spare_resp["chunks"]: url_list.append(c["id"]) return url_list def _update_spare_chunk(self, current_chunk, new_url): old = [{ 'type': 'chunk', 'id': current_chunk.url, 'hash': current_chunk.checksum, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id }] new = [{ 'type': 'chunk', 'id': new_url, 'hash': current_chunk.checksum, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id }] update_data = {'old': old, 'new': new} self.container_client.container_raw_update(cid=self.container_id, data=update_data) def _create_object(self): self.container_client.content_create(cid=self.container_id, path=self.path, content_id=self.content_id, stgpol=self.stgpol, size=self.length, checksum=self.checksum, version=self.version, chunk_method=self.chunk_method, mime_type=self.mime_type, data=self.chunks.raw()) def rebuild_chunk(self, chunk_id, allow_same_rawx=False): raise NotImplementedError() def create(self, stream): raise NotImplementedError() def fetch(self): raise NotImplementedError() def delete(self): self.container_client.content_delete(cid=self.container_id, path=self.path) def move_chunk(self, chunk_id): current_chunk = self.chunks.filter(id=chunk_id).one() if current_chunk is None: raise OrphanChunk("Chunk not found in content") other_chunks = self.chunks.filter( metapos=current_chunk.metapos).exclude(id=chunk_id).all() spare_urls = self._get_spare_chunk(other_chunks, [current_chunk]) self.logger.debug("copy chunk from %s to %s", current_chunk.url, spare_urls[0]) self.blob_client.chunk_copy(current_chunk.url, spare_urls[0]) self._update_spare_chunk(current_chunk, spare_urls[0]) try: self.blob_client.chunk_delete(current_chunk.url) except: self.logger.warn("Failed to delete chunk %s" % current_chunk.url) current_chunk.url = spare_urls[0] return current_chunk.raw()
class BlobMoverWorker(object): def __init__(self, conf, logger, volume): self.conf = conf self.logger = logger or get_logger(conf) self.volume = volume self.run_time = 0 self.passes = 0 self.errors = 0 self.last_reported = 0 self.last_usage_check = 0 self.chunks_run_time = 0 self.bytes_running_time = 0 self.bytes_processed = 0 self.total_bytes_processed = 0 self.total_chunks_processed = 0 self.usage_target = int_value(conf.get('usage_target'), 0) self.usage_check_interval = int_value(conf.get('usage_check_interval'), 3600) self.report_interval = int_value(conf.get('report_interval'), 3600) self.max_chunks_per_second = int_value(conf.get('chunks_per_second'), 30) self.max_bytes_per_second = int_value(conf.get('bytes_per_second'), 10000000) self.blob_client = BlobClient() self.container_client = ContainerClient(conf) def mover_pass(self): self.namespace, self.address = check_volume(self.volume) start_time = report_time = time.time() total_errors = 0 mover_time = 0 paths = paths_gen(self.volume) for path in paths: loop_time = time.time() now = time.time() if now - self.last_usage_check >= self.usage_check_interval: used, total = statfs(self.volume) usage = (float(used) / total) * 100 if usage <= self.usage_target: self.logger.info( 'current usage %.2f%%: target reached (%.2f%%)', usage, self.usage_target) self.last_usage_check = now break self.safe_chunk_move(path) self.chunks_run_time = ratelimit(self.chunks_run_time, self.max_chunks_per_second) self.total_chunks_processed += 1 now = time.time() if now - self.last_reported >= self.report_interval: self.logger.info( '%(start_time)s ' '%(passes)d ' '%(errors)d ' '%(c_rate).2f ' '%(b_rate).2f ' '%(total).2f ' '%(mover_time).2f' '%(mover_rate).2f' % { 'start_time': time.ctime(report_time), 'passes': self.passes, 'errors': self.errors, 'c_rate': self.passes / (now - report_time), 'b_rate': self.bytes_processed / (now - report_time), 'total': (now - start_time), 'mover_time': mover_time, 'mover_rate': mover_time / (now - start_time) }) report_time = now total_errors += self.errors self.passes = 0 self.bytes_processed = 0 self.last_reported = now mover_time += (now - loop_time) elapsed = (time.time() - start_time) or 0.000001 self.logger.info( '%(elapsed).02f ' '%(errors)d ' '%(chunk_rate).2f ' '%(bytes_rate).2f ' '%(mover_time).2f ' '%(mover_rate).2f' % { 'elapsed': elapsed, 'errors': total_errors + self.errors, 'chunk_rate': self.total_chunks_processed / elapsed, 'bytes_rate': self.total_bytes_processed / elapsed, 'mover_time': mover_time, 'mover_rate': mover_time / elapsed }) def safe_chunk_move(self, path): try: self.chunk_move(path) except Exception as e: self.errors += 1 self.logger.error('ERROR while moving chunk %s: %s', path, e) self.passes += 1 def load_chunk_metadata(self, path): with open(path) as f: return read_chunk_metadata(f) def chunk_move(self, path): meta = self.load_chunk_metadata(path) content_cid = meta['content_cid'] content_path = meta['content_path'] chunk_url = 'http://%s/%s' % \ (self.address, meta['chunk_id']) try: _, data = self.container_client.content_show(cid=content_cid, path=content_path) except exc.NotFound: raise exc.OrphanChunk('Content not found') current_chunk = None notin = [] for c in data: if c['pos'] == meta['chunk_pos']: notin.append(c) for c in notin: if c['url'] == chunk_url: current_chunk = c notin.remove(c) if not current_chunk: raise exc.OrphanChunk('Chunk not found in content') spare_data = {'notin': notin, 'broken': [current_chunk], 'size': 0} spare_resp = self.container_client.content_spare(cid=content_cid, path=content_path, data=spare_data) new_chunk = spare_resp['chunks'][0] self.blob_client.chunk_copy(current_chunk['url'], new_chunk['id']) old = [{ 'type': 'chunk', 'id': current_chunk['url'], 'hash': meta['chunk_hash'], 'size': int(meta['chunk_size']) }] new = [{ 'type': 'chunk', 'id': new_chunk['id'], 'hash': meta['chunk_hash'], 'size': int(meta['chunk_size']) }] update_data = {'old': old, 'new': new} self.container_client.container_raw_update(cid=content_cid, data=update_data) self.blob_client.chunk_delete(current_chunk['url']) self.logger.info('moved chunk %s to %s', current_chunk['url'], new_chunk['id'])
class BlobMoverWorker(object): def __init__(self, conf, logger, volume): self.conf = conf self.logger = logger or get_logger(conf) self.volume = volume self.run_time = 0 self.passes = 0 self.errors = 0 self.last_reported = 0 self.last_usage_check = 0 self.chunks_run_time = 0 self.bytes_running_time = 0 self.bytes_processed = 0 self.total_bytes_processed = 0 self.total_chunks_processed = 0 self.usage_target = int_value( conf.get('usage_target'), 0) self.usage_check_interval = int_value( conf.get('usage_check_interval'), 3600) self.report_interval = int_value( conf.get('report_interval'), 3600) self.max_chunks_per_second = int_value( conf.get('chunks_per_second'), 30) self.max_bytes_per_second = int_value( conf.get('bytes_per_second'), 10000000) self.blob_client = BlobClient() self.container_client = ContainerClient(conf) def mover_pass(self): self.namespace, self.address = check_volume(self.volume) start_time = report_time = time.time() total_errors = 0 mover_time = 0 paths = paths_gen(self.volume) for path in paths: loop_time = time.time() now = time.time() if now - self.last_usage_check >= self.usage_check_interval: used, total = statfs(self.volume) usage = (float(used) / total) * 100 if usage <= self.usage_target: self.logger.info( 'current usage %.2f%%: target reached (%.2f%%)', usage, self.usage_target) self.last_usage_check = now break self.safe_chunk_move(path) self.chunks_run_time = ratelimit( self.chunks_run_time, self.max_chunks_per_second ) self.total_chunks_processed += 1 now = time.time() if now - self.last_reported >= self.report_interval: self.logger.info( '%(start_time)s ' '%(passes)d ' '%(errors)d ' '%(c_rate).2f ' '%(b_rate).2f ' '%(total).2f ' '%(mover_time).2f' '%(mover_rate).2f' % { 'start_time': time.ctime(report_time), 'passes': self.passes, 'errors': self.errors, 'c_rate': self.passes / (now - report_time), 'b_rate': self.bytes_processed / (now - report_time), 'total': (now - start_time), 'mover_time': mover_time, 'mover_rate': mover_time / (now - start_time) } ) report_time = now total_errors += self.errors self.passes = 0 self.bytes_processed = 0 self.last_reported = now mover_time += (now - loop_time) elapsed = (time.time() - start_time) or 0.000001 self.logger.info( '%(elapsed).02f ' '%(errors)d ' '%(chunk_rate).2f ' '%(bytes_rate).2f ' '%(mover_time).2f ' '%(mover_rate).2f' % { 'elapsed': elapsed, 'errors': total_errors + self.errors, 'chunk_rate': self.total_chunks_processed / elapsed, 'bytes_rate': self.total_bytes_processed / elapsed, 'mover_time': mover_time, 'mover_rate': mover_time / elapsed } ) def safe_chunk_move(self, path): try: self.chunk_move(path) except Exception as e: self.errors += 1 self.logger.error('ERROR while moving chunk %s: %s', path, e) self.passes += 1 def load_chunk_metadata(self, path): with open(path) as f: return read_chunk_metadata(f) def chunk_move(self, path): meta = self.load_chunk_metadata(path) content_cid = meta['content_cid'] content_path = meta['content_path'] chunk_url = 'http://%s/%s' % \ (self.address, meta['chunk_id']) try: data = self.container_client.content_show( cid=content_cid, path=content_path) except exc.NotFound: raise exc.OrphanChunk('Content not found') current_chunk = None notin = [] for c in data: if c['pos'] == meta['chunk_pos']: notin.append(c) for c in notin: if c['url'] == chunk_url: current_chunk = c notin.remove(c) if not current_chunk: raise exc.OrphanChunk('Chunk not found in content') spare_data = {'notin': notin, 'broken': [current_chunk], 'size': 0} spare_resp = self.container_client.content_spare( cid=content_cid, path=content_path, data=spare_data) new_chunk = spare_resp['chunks'][0] self.blob_client.chunk_copy( current_chunk['url'], new_chunk['id']) old = [{'type': 'chunk', 'id': current_chunk['url'], 'hash': meta['chunk_hash'], 'size': int(meta['chunk_size'])}] new = [{'type': 'chunk', 'id': new_chunk['id'], 'hash': meta['chunk_hash'], 'size': int(meta['chunk_size'])}] update_data = {'old': old, 'new': new} self.container_client.container_raw_update( cid=content_cid, data=update_data) self.blob_client.chunk_delete(current_chunk['url']) self.logger.info( 'moved chunk %s to %s', current_chunk['url'], new_chunk['id'])
class Content(object): def __init__(self, conf, container_id, metadata, chunks, stgpol_args): self.conf = conf self.container_id = container_id self.metadata = metadata self.chunks = ChunksHelper(chunks) self.stgpol_args = stgpol_args self.logger = get_logger(self.conf) self.cs_client = ConscienceClient(conf) self.container_client = ContainerClient(self.conf) self.blob_client = BlobClient() self.session = requests.Session() self.content_id = self.metadata["id"] self.stgpol_name = self.metadata["policy"] self.path = self.metadata["name"] self.length = int(self.metadata["length"]) self.version = self.metadata["version"] self.hash = self.metadata["hash"] self.mime_type = self.metadata["mime-type"] self.chunk_method = self.metadata["chunk-method"] def _meta2_get_spare_chunk(self, chunks_notin, chunks_broken): spare_data = { "notin": ChunksHelper(chunks_notin, False).raw(), "broken": ChunksHelper(chunks_broken, False).raw() } try: spare_resp = self.container_client.content_spare( cid=self.container_id, content=self.content_id, data=spare_data, stgpol=self.stgpol_name) except ClientException as e: raise exc.SpareChunkException("No spare chunk (%s)" % e.message) url_list = [] for c in spare_resp["chunks"]: url_list.append(c["id"]) return url_list def _meta2_update_spare_chunk(self, current_chunk, new_url): old = [{'type': 'chunk', 'id': current_chunk.url, 'hash': current_chunk.hash, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id}] new = [{'type': 'chunk', 'id': new_url, 'hash': current_chunk.hash, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id}] update_data = {'old': old, 'new': new} self.container_client.container_raw_update( cid=self.container_id, data=update_data) def _meta2_create_object(self): self.container_client.content_create(cid=self.container_id, path=self.path, content_id=self.content_id, stgpol=self.stgpol_name, size=self.length, checksum=self.hash, version=self.version, chunk_method=self.chunk_method, mime_type=self.mime_type, data=self.chunks.raw()) def rebuild_chunk(self, chunk_id): raise NotImplementedError() def upload(self, stream): try: self._upload(stream) except: # Keep the stack trace exc_info = sys.exc_info() for chunk in self.chunks: try: self.blob_client.chunk_delete(chunk.url) except: self.logger.warn("Failed to delete %s", chunk.url) # Raise with the original stack trace raise exc_info[0], exc_info[1], exc_info[2] def _upload(self, stream): raise NotImplementedError() def download(self): raise NotImplementedError() def delete(self): self.container_client.content_delete(cid=self.container_id, path=self.path) def move_chunk(self, chunk_id): current_chunk = self.chunks.filter(id=chunk_id).one() if current_chunk is None: raise OrphanChunk("Chunk not found in content") other_chunks = self.chunks.filter( metapos=current_chunk.metapos).exclude(id=chunk_id).all() spare_urls = self._meta2_get_spare_chunk(other_chunks, [current_chunk]) self.logger.debug("copy chunk from %s to %s", current_chunk.url, spare_urls[0]) self.blob_client.chunk_copy(current_chunk.url, spare_urls[0]) self._meta2_update_spare_chunk(current_chunk, spare_urls[0]) try: self.blob_client.chunk_delete(current_chunk.url) except: self.logger.warn("Failed to delete chunk %s" % current_chunk.url) current_chunk.url = spare_urls[0] return current_chunk.raw()
class Content(object): def __init__(self, conf, container_id, metadata, chunks, storage_method, account, container_name, container_client=None): self.conf = conf self.container_id = container_id self.metadata = metadata self.chunks = ChunksHelper(chunks) self.storage_method = storage_method self.logger = get_logger(self.conf) self.blob_client = BlobClient() self.container_client = (container_client or ContainerClient(self.conf, logger=self.logger)) # FIXME: all these may be properties self.content_id = self.metadata["id"] self.path = self.metadata["name"] self.length = int(self.metadata["length"]) self.version = self.metadata["version"] self.checksum = self.metadata["hash"] self.chunk_method = self.metadata["chunk_method"] self.account = account self.container_name = container_name if 'full_path' in self.metadata: self.full_path = metadata['full_path'] else: self.full_path = [ '{0}/{1}/{2}/{3}'.format(quote_plus(self.account), quote_plus(self.container_name), quote_plus(self.path), self.version) ] @property def mime_type(self): return self.metadata["mime_type"] @mime_type.setter def mime_type(self, value): self.metadata["mime_type"] = value @property def policy(self): return self.metadata["policy"] @policy.setter def policy(self, value): self.metadata["policy"] = value @property def properties(self): return self.metadata.get('properties') @properties.setter def properties(self, value): if not isinstance(value, dict): raise ValueError("'value' must be a dict") self.metadata['properties'] = value def _get_spare_chunk(self, chunks_notin, chunks_broken): spare_data = { "notin": ChunksHelper(chunks_notin, False).raw(), "broken": ChunksHelper(chunks_broken, False).raw() } try: spare_resp = self.container_client.content_spare( cid=self.container_id, path=self.content_id, data=spare_data, stgpol=self.policy) except ClientException as e: raise exc.SpareChunkException("No spare chunk (%s)" % e.message) url_list = [] for c in spare_resp["chunks"]: url_list.append(c["id"]) return url_list def _add_raw_chunk(self, current_chunk, url): data = { 'type': 'chunk', 'id': url, 'hash': current_chunk.checksum, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id } self.container_client.container_raw_insert(data, cid=self.container_id) def _update_spare_chunk(self, current_chunk, new_url): old = { 'type': 'chunk', 'id': current_chunk.url, 'hash': current_chunk.checksum, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id } new = { 'type': 'chunk', 'id': new_url, 'hash': current_chunk.checksum, 'size': current_chunk.size, 'pos': current_chunk.pos, 'content': self.content_id } self.container_client.container_raw_update(old, new, cid=self.container_id) def _generate_sysmeta(self): sysmeta = dict() sysmeta['id'] = self.content_id sysmeta['version'] = self.version sysmeta['policy'] = self.policy sysmeta['mime_type'] = self.mime_type sysmeta['chunk_method'] = self.chunk_method sysmeta['chunk_size'] = self.metadata['chunk_size'] sysmeta['oio_version'] = OIO_VERSION sysmeta['full_path'] = self.full_path sysmeta['content_path'] = self.path sysmeta['container_id'] = self.container_id return sysmeta def _create_object(self, **kwargs): data = {'chunks': self.chunks.raw(), 'properties': self.properties} self.container_client.content_create(cid=self.container_id, path=self.path, content_id=self.content_id, stgpol=self.policy, size=self.length, checksum=self.checksum, version=self.version, chunk_method=self.chunk_method, mime_type=self.mime_type, data=data, **kwargs) def rebuild_chunk(self, chunk_id, allow_same_rawx=False, chunk_pos=None): raise NotImplementedError() def create(self, stream, **kwargs): raise NotImplementedError() def fetch(self): raise NotImplementedError() def delete(self, **kwargs): self.container_client.content_delete(cid=self.container_id, path=self.path, **kwargs) def move_chunk(self, chunk_id): current_chunk = self.chunks.filter(id=chunk_id).one() if current_chunk is None: raise OrphanChunk("Chunk not found in content") other_chunks = self.chunks.filter( metapos=current_chunk.metapos).exclude(id=chunk_id).all() spare_urls = self._get_spare_chunk(other_chunks, [current_chunk]) self.logger.debug("copy chunk from %s to %s", current_chunk.url, spare_urls[0]) self.blob_client.chunk_copy(current_chunk.url, spare_urls[0]) self._update_spare_chunk(current_chunk, spare_urls[0]) try: self.blob_client.chunk_delete(current_chunk.url) except: self.logger.warn("Failed to delete chunk %s" % current_chunk.url) current_chunk.url = spare_urls[0] return current_chunk.raw()
class BlobRebuilderWorker(object): def __init__(self, conf, logger, volume): self.conf = conf self.logger = logger or get_logger(conf) self.volume = volume self.run_time = 0 self.passes = 0 self.errors = 0 self.last_reported = 0 self.chunks_run_time = 0 self.bytes_running_time = 0 self.bytes_processed = 0 self.total_bytes_processed = 0 self.total_chunks_processed = 0 self.dry_run = true_value( conf.get('dry_run', False)) self.report_interval = int_value( conf.get('report_interval'), 3600) self.max_chunks_per_second = int_value( conf.get('chunks_per_second'), 30) self.max_bytes_per_second = int_value( conf.get('bytes_per_second'), 10000000) self.rdir_fetch_limit = int_value( conf.get('rdir_fetch_limit'), 100) self.blob_client = BlobClient() self.container_client = ContainerClient(conf) self.rdir_client = RdirClient(conf) def rebuilder_pass_with_lock(self): self.rdir_client.admin_lock(self.volume, "rebuilder on %s" % gethostname()) try: self.rebuilder_pass() finally: self.rdir_client.admin_unlock(self.volume) def rebuilder_pass(self): start_time = report_time = time.time() total_errors = 0 rebuilder_time = 0 chunks = self.rdir_client.chunk_fetch(self.volume, limit=self.rdir_fetch_limit, rebuild=True) for container_id, content_id, chunk_id, data in chunks: loop_time = time.time() if self.dry_run: self.dryrun_chunk_rebuild(container_id, content_id, chunk_id) else: self.safe_chunk_rebuild(container_id, content_id, chunk_id) self.chunks_run_time = ratelimit( self.chunks_run_time, self.max_chunks_per_second ) self.total_chunks_processed += 1 now = time.time() if now - self.last_reported >= self.report_interval: self.logger.info( '%(start_time)s ' '%(passes)d ' '%(errors)d ' '%(c_rate).2f ' '%(b_rate).2f ' '%(total).2f ' '%(rebuilder_time).2f' '%(rebuilder_rate).2f' % { 'start_time': time.ctime(report_time), 'passes': self.passes, 'errors': self.errors, 'c_rate': self.passes / (now - report_time), 'b_rate': self.bytes_processed / (now - report_time), 'total': (now - start_time), 'rebuilder_time': rebuilder_time, 'rebuilder_rate': rebuilder_time / (now - start_time) } ) report_time = now total_errors += self.errors self.passes = 0 self.bytes_processed = 0 self.last_reported = now rebuilder_time += (now - loop_time) elapsed = (time.time() - start_time) or 0.000001 self.logger.info( '%(elapsed).02f ' '%(errors)d ' '%(chunk_rate).2f ' '%(bytes_rate).2f ' '%(rebuilder_time).2f ' '%(rebuilder_rate).2f' % { 'elapsed': elapsed, 'errors': total_errors + self.errors, 'chunk_rate': self.total_chunks_processed / elapsed, 'bytes_rate': self.total_bytes_processed / elapsed, 'rebuilder_time': rebuilder_time, 'rebuilder_rate': rebuilder_time / elapsed } ) def dryrun_chunk_rebuild(self, container_id, content_id, chunk_id): self.logger.info("[dryrun] Rebuilding " "container %s, content %s, chunk %s" % (container_id, content_id, chunk_id)) self.passes += 1 def safe_chunk_rebuild(self, container_id, content_id, chunk_id): self.logger.info('Rebuilding (container %s, content %s, chunk %s)' % (container_id, content_id, chunk_id)) try: self.chunk_rebuild(container_id, content_id, chunk_id) except Exception as e: self.errors += 1 self.logger.error('ERROR while rebuilding chunk %s|%s|%s) : %s', container_id, content_id, chunk_id, e) self.passes += 1 def _meta2_get_chunks_at_pos(self, container_id, content_id, chunk_id): current_chunk_url = 'http://%s/%s' % (self.volume, chunk_id) try: data = self.container_client.content_show( cid=container_id, content=content_id) except exc.NotFound: raise exc.OrphanChunk('Content not found') current_chunk = None for c in data: if c['url'] == current_chunk_url: current_chunk = c break if not current_chunk: raise exc.OrphanChunk('Chunk not found in content') duplicate_chunks = [] for c in data: if c['pos'] == current_chunk['pos'] \ and c['url'] != current_chunk['url']: duplicate_chunks.append(c) if len(duplicate_chunks) == 0: raise exc.UnrecoverableContent('No copy of missing chunk') return current_chunk, duplicate_chunks def _meta2_get_spare_chunk(self, container_id, content_id, notin, broken): spare_data = {'notin': notin, 'broken': [broken], 'size': 0} try: spare_resp = self.container_client.content_spare( cid=container_id, content=content_id, data=spare_data) except ClientException as e: raise exc.SpareChunkException('No spare chunk (%s)' % e.message) return spare_resp['chunks'][0] def _meta2_replace_chunk(self, container_id, content_id, current_chunk, new_chunk): old = [{'type': 'chunk', 'id': current_chunk['url'], 'hash': current_chunk['hash'], 'size': current_chunk['size'], 'pos': current_chunk['pos'], 'content': content_id}] new = [{'type': 'chunk', 'id': new_chunk['id'], 'hash': current_chunk['hash'], 'size': current_chunk['size'], 'pos': current_chunk['pos'], 'content': content_id}] update_data = {'old': old, 'new': new} self.container_client.container_raw_update( cid=container_id, data=update_data) # TODO rain support def chunk_rebuild(self, container_id, content_id, chunk_id): current_chunk, duplicate_chunks = self._meta2_get_chunks_at_pos( container_id, content_id, chunk_id) spare_chunk = self._meta2_get_spare_chunk( container_id, content_id, duplicate_chunks, current_chunk) uploaded = False for src in duplicate_chunks: try: self.blob_client.chunk_copy(src['url'], spare_chunk['id']) self.logger.debug('copy chunk from %s to %s', src['url'], spare_chunk['id']) uploaded = True break except Exception as e: self.logger.debug('Failed to copy chunk from %s to %s: %s', src['url'], spare_chunk['id'], type(e)) if not uploaded: raise exc.UnrecoverableContent('No copy available ' 'of missing chunk') self._meta2_replace_chunk(container_id, content_id, current_chunk, spare_chunk) self.rdir_client.chunk_push(self.volume, container_id, content_id, chunk_id, rtime=int(time.time())) self.bytes_processed += current_chunk['size'] self.total_bytes_processed += current_chunk['size']