def create_quantize_tasks(src_layer, dest_layer, shape, mip=0, fill_missing=False, chunk_size=(128, 128, 64), encoding='raw', bounds=None): shape = Vec(*shape) info = create_quantized_affinity_info(src_layer, dest_layer, shape, mip, chunk_size, encoding) destvol = CloudVolume(dest_layer, info=info, mip=mip) destvol.commit_info() downsample_scales.create_downsample_scales(dest_layer, mip=mip, ds_shape=shape, chunk_size=chunk_size, encoding=encoding) if bounds is None: bounds = destvol.mip_bounds(mip) else: bounds = destvol.bbox_to_mip(bounds, mip=0, to_mip=mip) bounds = bounds.expand_to_chunk_size(destvol.mip_chunk_size(mip), destvol.mip_voxel_offset(mip)) class QuantizeTasksIterator(FinelyDividedTaskIterator): def task(self, shape, offset): return partial( QuantizeTask, source_layer_path=src_layer, dest_layer_path=dest_layer, shape=shape.tolist(), offset=offset.tolist(), fill_missing=fill_missing, mip=mip, ) def on_finish(self): destvol.provenance.sources = [src_layer] destvol.provenance.processing.append({ 'method': { 'task': 'QuantizeTask', 'source_layer_path': src_layer, 'dest_layer_path': dest_layer, 'shape': shape.tolist(), 'fill_missing': fill_missing, 'mip': mip, }, 'by': operator_contact(), 'date': strftime('%Y-%m-%d %H:%M %Z'), }) destvol.commit_provenance() return QuantizeTasksIterator(bounds, shape)
def create_meshing_tasks( layer_path, mip, shape=(448, 448, 448), simplification=True, max_simplification_error=40, mesh_dir=None, cdn_cache=False, dust_threshold=None, object_ids=None, progress=False, fill_missing=False, encoding='precomputed' ): shape = Vec(*shape) vol = CloudVolume(layer_path, mip) if mesh_dir is None: mesh_dir = 'mesh_mip_{}_err_{}'.format(mip, max_simplification_error) if not 'mesh' in vol.info: vol.info['mesh'] = mesh_dir vol.commit_info() class MeshTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): return MeshTask( shape=shape.clone(), offset=offset.clone(), layer_path=layer_path, mip=vol.mip, simplification_factor=(0 if not simplification else 100), max_simplification_error=max_simplification_error, mesh_dir=mesh_dir, cache_control=('' if cdn_cache else 'no-cache'), dust_threshold=dust_threshold, progress=progress, object_ids=object_ids, fill_missing=fill_missing, encoding=encoding, ) def on_finish(self): vol.provenance.processing.append({ 'method': { 'task': 'MeshTask', 'layer_path': layer_path, 'mip': vol.mip, 'shape': shape.tolist(), 'max_simplification_error': max_simplification_error, 'mesh_dir': mesh_dir, 'cdn_cache': cdn_cache, 'dust_threshold': dust_threshold, 'encoding': encoding, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return MeshTaskIterator(vol.mip_bounds(mip), shape)
def create_blackout_tasks(cloudpath: str, bounds: Bbox, mip: int = 0, shape: ShapeType = (2048, 2048, 64), value: int = 0, non_aligned_writes: bool = False): vol = CloudVolume(cloudpath, mip=mip) shape = Vec(*shape) bounds = Bbox.create(bounds) bounds = vol.bbox_to_mip(bounds, mip=0, to_mip=mip) if not non_aligned_writes: bounds = bounds.expand_to_chunk_size(vol.chunk_size, vol.voxel_offset) bounds = Bbox.clamp(bounds, vol.mip_bounds(mip)) class BlackoutTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): bounded_shape = min2(shape, vol.bounds.maxpt - offset) return partial( igneous.tasks.BlackoutTask, cloudpath=cloudpath, mip=mip, shape=shape.clone(), offset=offset.clone(), value=value, non_aligned_writes=non_aligned_writes, ) def on_finish(self): vol.provenance.processing.append({ 'method': { 'task': 'BlackoutTask', 'cloudpath': cloudpath, 'mip': mip, 'non_aligned_writes': non_aligned_writes, 'value': value, 'shape': shape.tolist(), 'bounds': [ bounds.minpt.tolist(), bounds.maxpt.tolist(), ], }, 'by': operator_contact(), 'date': strftime('%Y-%m-%d %H:%M %Z'), }) return BlackoutTaskIterator(bounds, shape)
def create_blackout_tasks(cloudpath, bounds, mip=0, shape=(2048, 2048, 64), value=0, non_aligned_writes=False): vol = CloudVolume(cloudpath, mip=mip) shape = Vec(*shape) bounds = Bbox.create(bounds) bounds = vol.bbox_to_mip(bounds, mip=0, to_mip=mip) bounds = Bbox.clamp(bounds, vol.mip_bounds(mip)) class BlackoutTaskIterator(): def __len__(self): return num_tasks(bounds, shape) def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): bounded_shape = min2(shape, vol.bounds.maxpt - startpt) yield igneous.tasks.BlackoutTask( cloudpath=cloudpath, mip=mip, shape=shape.clone(), offset=startpt.clone(), value=value, non_aligned_writes=non_aligned_writes, ) vol.provenance.processing.append({ 'method': { 'task': 'BlackoutTask', 'cloudpath': cloudpath, 'mip': mip, 'non_aligned_writes': non_aligned_writes, 'value': value, 'shape': shape.tolist(), 'bounds': [ bounds.minpt.tolist(), bounds.maxpt.tolist(), ], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) return BlackoutTaskIterator()
def create_touch_tasks(self, cloudpath, mip=0, shape=(2048, 2048, 64), bounds=None): vol = CloudVolume(cloudpath, mip=mip) shape = Vec(*shape) if bounds is None: bounds = vol.bounds.clone() bounds = Bbox.create(bounds) bounds = vol.bbox_to_mip(bounds, mip=0, to_mip=mip) bounds = Bbox.clamp(bounds, vol.mip_bounds(mip)) class TouchTaskIterator(): def __len__(self): return num_tasks(bounds, shape) def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): bounded_shape = min2(shape, vol.bounds.maxpt - startpt) yield igneous.tasks.TouchTask( cloudpath=cloudpath, shape=bounded_shape.clone(), offset=startpt.clone(), mip=mip, ) vol.provenance.processing.append({ 'method': { 'task': 'TouchTask', 'mip': mip, 'shape': shape.tolist(), 'bounds': [ bounds.minpt.tolist(), bounds.maxpt.tolist(), ], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return TouchTaskIterator()
def create_deletion_tasks(layer_path, mip=0, num_mips=5, shape=None, bounds=None): vol = CloudVolume(layer_path, max_redirects=0) if shape is None: shape = vol.mip_underlying(mip)[:3] shape.x *= 2**num_mips shape.y *= 2**num_mips else: shape = Vec(*shape) if not bounds: bounds = vol.mip_bounds(mip).clone() class DeleteTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): bounded_shape = min2(shape, bounds.maxpt - offset) return partial( DeleteTask, layer_path=layer_path, shape=bounded_shape.clone(), offset=offset.clone(), mip=mip, num_mips=num_mips, ) def on_finish(self): vol = CloudVolume(layer_path, max_redirects=0) vol.provenance.processing.append({ 'method': { 'task': 'DeleteTask', 'mip': mip, 'num_mips': num_mips, 'shape': shape.tolist(), }, 'by': operator_contact(), 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return DeleteTaskIterator(bounds, shape)
def create_touch_tasks( self, cloudpath, mip=0, shape=(2048, 2048, 64), bounds=None ): vol = CloudVolume(cloudpath, mip=mip) shape = Vec(*shape) if bounds is None: bounds = vol.bounds.clone() bounds = Bbox.create(bounds) bounds = vol.bbox_to_mip(bounds, mip=0, to_mip=mip) bounds = Bbox.clamp(bounds, vol.mip_bounds(mip)) class TouchTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): bounded_shape = min2(shape, vol.bounds.maxpt - offset) return igneous.tasks.TouchTask( cloudpath=cloudpath, shape=bounded_shape.clone(), offset=offset.clone(), mip=mip, ) def on_finish(self): vol.provenance.processing.append({ 'method': { 'task': 'TouchTask', 'mip': mip, 'shape': shape.tolist(), 'bounds': [ bounds.minpt.tolist(), bounds.maxpt.tolist(), ], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return TouchTaskIterator(bounds, shape)
def create_transfer_tasks(src_layer_path, dest_layer_path, chunk_size=None, shape=Vec(2048, 2048, 64), fill_missing=False, translate=(0, 0, 0), bounds=None, mip=0, preserve_chunk_size=True, encoding=None): """ Transfer data from one data layer to another. It's possible to transfer from a lower resolution mip level within a given bounding box. The bounding box should be specified in terms of the highest resolution. """ shape = Vec(*shape) vol = CloudVolume(src_layer_path, mip=mip) translate = Vec(*translate) // vol.downsample_ratio if not chunk_size: chunk_size = vol.info['scales'][mip]['chunk_sizes'][0] chunk_size = Vec(*chunk_size) try: dvol = CloudVolume(dest_layer_path, mip=mip) except Exception: # no info file info = copy.deepcopy(vol.info) dvol = CloudVolume(dest_layer_path, info=info) dvol.commit_info() if encoding is not None: dvol.info['scales'][mip]['encoding'] = encoding dvol.info['scales'] = dvol.info['scales'][:mip + 1] dvol.info['scales'][mip]['chunk_sizes'] = [chunk_size.tolist()] dvol.commit_info() create_downsample_scales(dest_layer_path, mip=mip, ds_shape=shape, preserve_chunk_size=preserve_chunk_size, encoding=encoding) if bounds is None: bounds = vol.bounds.clone() else: bounds = vol.bbox_to_mip(bounds, mip=0, to_mip=mip) bounds = Bbox.clamp(bounds, dvol.bounds) dvol_bounds = dvol.mip_bounds(mip).clone() class TransferTaskIterator(object): def __len__(self): return int(reduce(operator.mul, np.ceil(bounds.size3() / shape))) def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): task_shape = min2(shape.clone(), dvol_bounds.maxpt - startpt) yield TransferTask( src_path=src_layer_path, dest_path=dest_layer_path, shape=task_shape, offset=startpt.clone(), fill_missing=fill_missing, translate=translate, mip=mip, ) job_details = { 'method': { 'task': 'TransferTask', 'src': src_layer_path, 'dest': dest_layer_path, 'shape': list(map(int, shape)), 'fill_missing': fill_missing, 'translate': list(map(int, translate)), 'bounds': [bounds.minpt.tolist(), bounds.maxpt.tolist()], 'mip': mip, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), } dvol = CloudVolume(dest_layer_path) dvol.provenance.sources = [src_layer_path] dvol.provenance.processing.append(job_details) dvol.commit_provenance() if vol.path.protocol != 'boss': vol.provenance.processing.append(job_details) vol.commit_provenance() return TransferTaskIterator()
def create_meshing_tasks( layer_path, mip, shape=(448, 448, 448), simplification=True, max_simplification_error=40, mesh_dir=None, cdn_cache=False, dust_threshold=None, object_ids=None, progress=False, fill_missing=False, encoding='precomputed', spatial_index=True, sharded=False, compress='gzip' ): shape = Vec(*shape) vol = CloudVolume(layer_path, mip) if mesh_dir is None: mesh_dir = 'mesh_mip_{}_err_{}'.format(mip, max_simplification_error) if not 'mesh' in vol.info: vol.info['mesh'] = mesh_dir vol.commit_info() cf = CloudFiles(layer_path) info_filename = '{}/info'.format(mesh_dir) mesh_info = cf.get_json(info_filename) or {} mesh_info['@type'] = 'neuroglancer_legacy_mesh' mesh_info['mip'] = int(vol.mip) mesh_info['chunk_size'] = shape.tolist() if spatial_index: mesh_info['spatial_index'] = { 'resolution': vol.resolution.tolist(), 'chunk_size': (shape*vol.resolution).tolist(), } cf.put_json(info_filename, mesh_info) class MeshTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): return MeshTask( shape=shape.clone(), offset=offset.clone(), layer_path=layer_path, mip=vol.mip, simplification_factor=(0 if not simplification else 100), max_simplification_error=max_simplification_error, mesh_dir=mesh_dir, cache_control=('' if cdn_cache else 'no-cache'), dust_threshold=dust_threshold, progress=progress, object_ids=object_ids, fill_missing=fill_missing, encoding=encoding, spatial_index=spatial_index, sharded=sharded, compress=compress, ) def on_finish(self): vol.provenance.processing.append({ 'method': { 'task': 'MeshTask', 'layer_path': layer_path, 'mip': vol.mip, 'shape': shape.tolist(), 'simplification': simplification, 'max_simplification_error': max_simplification_error, 'mesh_dir': mesh_dir, 'fill_missing': fill_missing, 'cdn_cache': cdn_cache, 'dust_threshold': dust_threshold, 'encoding': encoding, 'object_ids': object_ids, 'spatial_index': spatial_index, 'sharded': sharded, 'compress': compress, }, 'by': operator_contact(), 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return MeshTaskIterator(vol.mip_bounds(mip), shape)