def create_downsample_scales(layer_path, mip, ds_shape, axis='z', preserve_chunk_size=False, chunk_size=None, encoding=None): vol = CloudVolume(layer_path, mip) shape = min2(vol.volume_size, ds_shape) # sometimes we downsample a base layer of 512x512 # into underlying chunks of 64x64 which permits more scales underlying_mip = (mip + 1) if (mip + 1) in vol.available_mips else mip underlying_shape = vol.mip_underlying(underlying_mip).astype(np.float32) if chunk_size: underlying_shape = Vec(*chunk_size).astype(np.float32) toidx = {'x': 0, 'y': 1, 'z': 2} preserved_idx = toidx[axis] underlying_shape[preserved_idx] = float('inf') scales = downsample_scales.compute_plane_downsampling_scales( size=shape, preserve_axis=axis, max_downsampled_size=int(min(*underlying_shape)), ) scales = scales[1:] # omit (1,1,1) scales = [ list(map(int, vol.downsample_ratio * Vec(*factor3))) for factor3 in scales ] if len(scales) == 0: print("WARNING: No scales generated.") for scale in scales: vol.add_scale(scale, encoding=encoding, chunk_size=chunk_size) if chunk_size is None: if preserve_chunk_size or len(scales) == 0: chunk_size = vol.scales[mip]['chunk_sizes'] else: chunk_size = vol.scales[mip + 1]['chunk_sizes'] else: chunk_size = [chunk_size] if encoding is None: encoding = vol.scales[mip]['encoding'] for i in range(mip + 1, mip + len(scales) + 1): vol.scales[i]['chunk_sizes'] = chunk_size return vol.commit_info()
def create_deletion_tasks(layer_path, mip=0, num_mips=5, shape=None, bounds=None): vol = CloudVolume(layer_path, max_redirects=0) if shape is None: shape = vol.mip_underlying(mip)[:3] shape.x *= 2**num_mips shape.y *= 2**num_mips else: shape = Vec(*shape) if not bounds: bounds = vol.mip_bounds(mip).clone() class DeleteTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): bounded_shape = min2(shape, bounds.maxpt - offset) return partial( DeleteTask, layer_path=layer_path, shape=bounded_shape.clone(), offset=offset.clone(), mip=mip, num_mips=num_mips, ) def on_finish(self): vol = CloudVolume(layer_path, max_redirects=0) vol.provenance.processing.append({ 'method': { 'task': 'DeleteTask', 'mip': mip, 'num_mips': num_mips, 'shape': shape.tolist(), }, 'by': operator_contact(), 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return DeleteTaskIterator(bounds, shape)
def create_deletion_tasks(layer_path, mip=0, num_mips=5): vol = CloudVolume(layer_path) shape = vol.mip_underlying(mip)[:3] shape.x *= 2**num_mips shape.y *= 2**num_mips class DeleteTaskIterator(): def __len__(self): return reduce(operator.mul, np.ceil(vol.bounds.size3() / shape)) def __iter__(self): for startpt in xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape): bounded_shape = min2(shape, vol.bounds.maxpt - startpt) yield DeleteTask( layer_path=layer_path, shape=bounded_shape.clone(), offset=startpt.clone(), mip=mip, num_mips=num_mips, ) vol = CloudVolume(layer_path) vol.provenance.processing.append({ 'method': { 'task': 'DeleteTask', 'mip': mip, 'num_mips': num_mips, 'shape': shape.tolist(), }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance() return DeleteTaskIterator()
def create_downsample_scales(layer_path, mip, ds_shape, axis='z', preserve_chunk_size=False): vol = CloudVolume(layer_path, mip) shape = min2(vol.volume_size, ds_shape) # sometimes we downsample a base layer of 512x512 # into underlying chunks of 64x64 which permits more scales underlying_mip = (mip + 1) if (mip + 1) in vol.available_mips else mip underlying_shape = vol.mip_underlying(underlying_mip).astype(np.float32) toidx = {'x': 0, 'y': 1, 'z': 2} preserved_idx = toidx[axis] underlying_shape[preserved_idx] = float('inf') scales = downsample_scales.compute_plane_downsampling_scales( size=shape, preserve_axis=axis, max_downsampled_size=int(min(*underlying_shape)), ) scales = scales[1:] # omit (1,1,1) scales = [ list(map(int, vol.downsample_ratio * Vec(*factor3))) for factor3 in scales ] for scale in scales: vol.add_scale(scale) if preserve_chunk_size: for i in range(1, len(vol.scales)): vol.scales[i]['chunk_sizes'] = vol.scales[0]['chunk_sizes'] return vol.commit_info()