def create_meshing_tasks(task_queue, layer_path, mip, shape=Vec(512, 512, 512)): shape = Vec(*shape) max_simplification_error = 40 vol = CloudVolume(layer_path, mip) if not 'mesh' in vol.info: vol.info['mesh'] = 'mesh_mip_{}_err_{}'.format( mip, max_simplification_error) vol.commit_info() for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape), desc="Inserting Mesh Tasks"): task = MeshTask( layer_path=layer_path, mip=vol.mip, shape=shape.clone(), offset=startpt.clone(), max_simplification_error=max_simplification_error, ) task_queue.insert(task) task_queue.wait('Uploading MeshTasks') vol.provenance.processing.append({ 'method': { 'task': 'MeshTask', 'layer_path': layer_path, 'mip': vol.mip, 'shape': shape.tolist(), }, 'by': USER_EMAIL, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def create_watershed_remap_tasks(task_queue, map_path, src_layer_path, dest_layer_path, shape=Vec(2048, 2048, 64)): shape = Vec(*shape) vol = CloudVolume(src_layer_path) create_downsample_scales(dest_layer_path, mip=0, ds_shape=shape) for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape), desc="Inserting Remap Tasks"): task = WatershedRemapTask( map_path=map_path, src_path=src_layer_path, dest_path=dest_layer_path, shape=shape.clone(), offset=startpt.clone(), ) task_queue.insert(task) task_queue.wait('Uploading Remap Tasks') dvol = CloudVolume(dest_layer_path) dvol.provenance.processing.append({ 'method': { 'task': 'WatershedRemapTask', 'src': src_layer_path, 'dest': dest_layer_path, 'remap_file': map_path, 'shape': list(shape), }, 'by': '*****@*****.**', 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance()
def create_quantized_affinity_tasks(taskqueue, src_layer, dest_layer, shape, fill_missing=False): shape = Vec(*shape) info = create_quantized_affinity_info(src_layer, dest_layer, shape) destvol = CloudVolume(dest_layer, info=info) destvol.commit_info() create_downsample_scales(dest_layer, mip=0, ds_shape=shape) for startpt in tqdm(xyzrange(destvol.bounds.minpt, destvol.bounds.maxpt, shape), desc="Inserting QuantizeAffinities Tasks"): task = QuantizeAffinitiesTask( source_layer_path=src_layer, dest_layer_path=dest_layer, shape=list(shape.clone()), offset=list(startpt.clone()), fill_missing=fill_missing, ) task_queue.insert(task) task_queue.wait('Uploading')
def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): bounded_shape = min2(shape, vol.bounds.maxpt - startpt) yield igneous.tasks.BlackoutTask( cloudpath=cloudpath, mip=mip, shape=shape.clone(), offset=startpt.clone(), value=value, non_aligned_writes=non_aligned_writes, ) vol.provenance.processing.append({ 'method': { 'task': 'BlackoutTask', 'cloudpath': cloudpath, 'mip': mip, 'non_aligned_writes': non_aligned_writes, 'value': value, 'shape': shape.tolist(), 'bounds': [ bounds.minpt.tolist(), bounds.maxpt.tolist(), ], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), })
def __iter__(self): for x, y, z in xyzrange(grid_size): output_bounds = Bbox.from_slices( tuple( slice(s + x * b, s + x * b + b) for (s, x, b) in zip(output_block_start, ( z, y, x), output_block_size))) yield MaskAffinitymapTask( aff_input_layer_path=aff_input_layer_path, aff_output_layer_path=aff_output_layer_path, aff_mip=aff_mip, mask_layer_path=mask_layer_path, mask_mip=mask_mip, output_bounds=output_bounds, ) vol = CloudVolume(output_layer_path, mip=aff_mip) vol.provenance.processing.append({ 'method': { 'task': 'InferenceTask', 'aff_input_layer_path': aff_input_layer_path, 'aff_output_layer_path': aff_output_layer_path, 'aff_mip': aff_mip, 'mask_layer_path': mask_layer_path, 'mask_mip': mask_mip, 'output_block_start': output_block_start, 'output_block_size': output_block_size, 'grid_size': grid_size, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def __iter__(self): for startpt in xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape): bounded_shape = min2(shape, vol.bounds.maxpt - startpt) yield DeleteTask( layer_path=layer_path, shape=bounded_shape.clone(), offset=startpt.clone(), mip=mip, num_mips=num_mips, ) vol = CloudVolume(layer_path) vol.provenance.processing.append({ 'method': { 'task': 'DeleteTask', 'mip': mip, 'num_mips': num_mips, 'shape': shape.tolist(), }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): bounded_shape = min2(shape, vol.bounds.maxpt - startpt) yield igneous.tasks.TouchTask( cloudpath=cloudpath, shape=bounded_shape.clone(), offset=startpt.clone(), mip=mip, ) vol.provenance.processing.append({ 'method': { 'task': 'TouchTask', 'mip': mip, 'shape': shape.tolist(), 'bounds': [ bounds.minpt.tolist(), bounds.maxpt.tolist(), ], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def __iter__(self): for startpt in xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape): yield WatershedRemapTask( map_path=map_path, src_path=src_layer_path, dest_path=dest_layer_path, shape=shape.clone(), offset=startpt.clone(), ) dvol = CloudVolume(dest_layer_path) dvol.provenance.processing.append({ 'method': { 'task': 'WatershedRemapTask', 'src': src_layer_path, 'dest': dest_layer_path, 'remap_file': map_path, 'shape': list(shape), }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance()
def __iter__(self): for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape), desc="Inserting Mesh Tasks"): yield MeshTask( shape.clone(), startpt.clone(), layer_path, mip=vol.mip, max_simplification_error=max_simplification_error, mesh_dir=mesh_dir, cache_control=('' if cdn_cache else 'no-cache'), ) vol.provenance.processing.append({ 'method': { 'task': 'MeshTask', 'layer_path': layer_path, 'mip': vol.mip, 'shape': shape.tolist(), 'max_simplification_error': max_simplification_error, 'mesh_dir': mesh_dir, 'cdn_cache': cdn_cache, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def __iter__(self): for startpt in xyzrange(destvol.bounds.minpt, destvol.bounds.maxpt, shape): yield QuantizeTask( source_layer_path=src_layer, dest_layer_path=dest_layer, shape=shape.tolist(), offset=startpt.tolist(), fill_missing=fill_missing, mip=mip, ) destvol.provenance.sources = [src_layer] destvol.provenance.processing.append({ 'method': { 'task': 'QuantizeTask', 'source_layer_path': src_layer, 'dest_layer_path': dest_layer, 'shape': shape.tolist(), 'fill_missing': fill_missing, 'mip': mip, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) destvol.commit_provenance()
def __iter__(self): for startpt in xyzrange(img_offset, img_end, meta.chunk_size(mip)): startpt = startpt.clone() endpt = min2(startpt + meta.chunk_size(mip), shape) spt = (startpt + bounds.minpt).astype(int) ept = (endpt + bounds.minpt).astype(int) yield (startpt, endpt, spt, ept)
def test_luminance_levels_task(): directory = '/tmp/removeme/luminance_levels/' layer_path = 'file://' + directory delete_layer(layer_path) storage, imgd = create_layer(size=(256, 256, 128, 1), offset=(0, 0, 0), layer_type="image", layer_name='luminance_levels') tq = MockTaskQueue() tasks = tc.create_luminance_levels_tasks(layer_path=layer_path, coverage_factor=0.01, shape=None, offset=(0, 0, 0), mip=0) tq.insert_all(tasks) gt = [0] * 256 for x, y, z in lib.xyzrange((0, 0, 0), list(imgd.shape[:2]) + [1]): gt[imgd[x, y, 0, 0]] += 1 with open('/tmp/removeme/luminance_levels/levels/0/0', 'rt') as f: levels = f.read() levels = json.loads(levels) assert levels['coverage_ratio'] == 1.0 assert levels['levels'] == gt
def generate_chunks(meta, img, offset, mip): shape = Vec(*img.shape)[:3] offset = Vec(*offset)[:3] bounds = Bbox(offset, shape + offset) alignment_check = bounds.round_to_chunk_size(meta.chunk_size(mip), meta.voxel_offset(mip)) if not np.all(alignment_check.minpt == bounds.minpt): raise AlignmentError(""" Only chunk aligned writes are supported by this function. Got: {} Volume Offset: {} Nearest Aligned: {} """.format(bounds, meta.voxel_offset(mip), alignment_check)) bounds = Bbox.clamp(bounds, meta.bounds(mip)) img_offset = bounds.minpt - offset img_end = Vec.clamp(bounds.size3() + img_offset, Vec(0, 0, 0), shape) for startpt in xyzrange(img_offset, img_end, meta.chunk_size(mip)): startpt = startpt.clone() endpt = min2(startpt + meta.chunk_size(mip), shape) spt = (startpt + bounds.minpt).astype(int) ept = (endpt + bounds.minpt).astype(int) yield (startpt, endpt, spt, ept)
def create_transfer_tasks(task_queue, src_layer_path, dest_layer_path, chunk_size=None, shape=Vec(2048, 2048, 64), fill_missing=False, translate=(0, 0, 0)): shape = Vec(*shape) translate = Vec(*translate) vol = CloudVolume(src_layer_path) if not chunk_size: chunk_size = vol.info['scales'][0]['chunk_sizes'][0] chunk_size = Vec(*chunk_size) try: dvol = CloudVolume(dest_layer_path) except Exception: # no info file info = copy.deepcopy(vol.info) dvol = CloudVolume(dest_layer_path, info=info) dvol.info['scales'] = dvol.info['scales'][:1] dvol.info['scales'][0]['chunk_sizes'] = [chunk_size.tolist()] dvol.commit_info() create_downsample_scales(dest_layer_path, mip=0, ds_shape=shape, preserve_chunk_size=True) bounds = vol.bounds.clone() for startpt in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape), desc="Inserting Transfer Tasks"): task = TransferTask( src_path=src_layer_path, dest_path=dest_layer_path, shape=shape.clone(), offset=startpt.clone(), fill_missing=fill_missing, translate=translate, ) task_queue.insert(task) task_queue.wait('Uploading Transfer Tasks') dvol = CloudVolume(dest_layer_path) dvol.provenance.processing.append({ 'method': { 'task': 'TransferTask', 'src': src_layer_path, 'dest': dest_layer_path, 'shape': list(map(int, shape)), 'fill_missing': fill_missing, 'translate': list(map(int, translate)), }, 'by': USER_EMAIL, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance()
def create_downsampling_tasks(task_queue, layer_path, mip=-1, fill_missing=False, axis='z', num_mips=5, preserve_chunk_size=True): def ds_shape(mip): shape = vol.mip_underlying(mip)[:3] shape.x *= 2**num_mips shape.y *= 2**num_mips return shape vol = CloudVolume(layer_path, mip=mip) shape = ds_shape(vol.mip) vol = create_downsample_scales(layer_path, mip, shape, preserve_chunk_size=preserve_chunk_size) if not preserve_chunk_size: shape = ds_shape(vol.mip + 1) bounds = vol.bounds.clone() for startpt in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape), desc="Inserting Downsample Tasks"): task = DownsampleTask( layer_path=layer_path, mip=vol.mip, shape=shape.clone(), offset=startpt.clone(), axis=axis, fill_missing=fill_missing, ) task_queue.insert(task) task_queue.wait('Uploading') vol.provenance.processing.append({ 'method': { 'task': 'DownsampleTask', 'mip': mip, 'shape': shape.tolist(), 'axis': axis, 'method': 'downsample_with_averaging' if vol.layer_type == 'image' else 'downsample_segmentation', }, 'by': '*****@*****.**', 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def create_deletion_tasks(task_queue, layer_path): vol = CloudVolume(layer_path) shape = vol.underlying * 4 for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape), desc="Inserting Deletion Tasks"): task = DeleteTask( layer_path=layer_path, shape=shape.clone(), offset=startpt.clone(), ) task_queue.insert(task) task_queue.wait('Uploading DeleteTasks')
def upload_build_chunks(storage, volume, offset=[0, 0, 0], build_chunk_size=[1024,1024,128]): offset = Vec(*offset) shape = Vec(*volume.shape[:3]) build_chunk_size = Vec(*build_chunk_size) for spt in xyzrange( (0,0,0), shape, build_chunk_size): ept = min2(spt + build_chunk_size, shape) bbox = Bbox(spt, ept) chunk = volume[ bbox.to_slices() ] bbox += offset filename = 'build/{}'.format(bbox.to_filename()) storage.put_file(filename, chunks.encode_npz(chunk)) storage.wait()
def __iter__(self): for x, y, z in xyzrange(grid_size): output_offset = tuple( s + x * b for (s, x, b) in zip(output_block_start, (z, y, x), output_block_size)) yield InferenceTask(image_layer_path=image_layer_path, convnet_path=convnet_path, mask_layer_path=mask_layer_path, output_layer_path=output_layer_path, output_offset=output_offset, output_shape=output_block_size, patch_size=patch_size, patch_overlap=patch_overlap, cropping_margin_size=cropping_margin_size, output_key=output_key, num_output_channels=num_output_channels, image_mip=image_mip, output_mip=output_mip, mask_mip=mask_mip) vol = CloudVolume(output_layer_path, mip=output_mip) vol.provenance.processing.append({ 'method': { 'task': 'InferenceTask', 'image_layer_path': image_layer_path, 'convnet_path': convnet_path, 'mask_layer_path': mask_layer_path, 'output_layer_path': output_layer_path, 'output_offset': output_offset, 'output_shape': output_block_size, 'patch_size': patch_size, 'patch_overlap': patch_overlap, 'cropping_margin_size': cropping_margin_size, 'output_key': output_key, 'num_output_channels': num_output_channels, 'image_mip': image_mip, 'output_mip': output_mip, 'mask_mip': mask_mip, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def create_transfer_tasks(task_queue, src_layer_path, dest_layer_path, shape=Vec(2048, 2048, 64), fill_missing=False, translate=(0, 0, 0)): shape = Vec(*shape) translate = Vec(*translate) vol = CloudVolume(src_layer_path) create_downsample_scales(dest_layer_path, mip=0, ds_shape=shape, preserve_chunk_size=True) bounds = vol.bounds.clone() for startpt in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape), desc="Inserting Transfer Tasks"): task = TransferTask( src_path=src_layer_path, dest_path=dest_layer_path, shape=shape.clone(), offset=startpt.clone(), fill_missing=fill_missing, translate=translate, ) task_queue.insert(task) task_queue.wait('Uploading Transfer Tasks') dvol = CloudVolume(dest_layer_path) dvol.provenance.processing.append({ 'method': { 'task': 'TransferTask', 'src': src_layer_path, 'dest': dest_layer_path, 'shape': list(map(int, shape)), }, 'by': '*****@*****.**', 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance()
def __iter__(self): self.bounds = bounds.clone() self.bounds.minpt.z = bounds.minpt.z + self.level_start * shape.z self.bounds.maxpt.z = bounds.minpt.z + self.level_end * shape.z for start in xyzrange(self.bounds.minpt, self.bounds.maxpt, shape): task_shape = min2(shape.clone(), self.bounds.maxpt - start) task_bounds = Bbox(start, start + task_shape) if task_bounds.volume() < 1: continue chunk_begin = tup2str(task_bounds.minpt) chunk_end = tup2str(task_bounds.maxpt) res_str = tup2str(resolution) cmd = (f"remap_ids {cleftpath} {cleftoutpath} {storagestr}" f" --chunk_begin {chunk_begin} --chunk_end {chunk_end}" f" --dup_map_storagestr {dupstoragestr} --mip {res_str}") yield SynaptorTask(cmd)
def __iter__(self): self.bounds = bounds.clone() self.bounds.minpt.z = bounds.minpt.z + self.level_start * shape.z self.bounds.maxpt.z = bounds.minpt.z + self.level_end * shape.z for startpt in xyzrange( self.bounds.minpt, self.bounds.maxpt, shape ): task_shape = min2(shape.clone(), self.bounds.maxpt - startpt) task_bounds = Bbox( startpt, startpt + task_shape ) if task_bounds.volume() < 1: continue chunk_begin = tup2str(task_bounds.minpt) chunk_end = tup2str(task_bounds.maxpt) mip_str = tup2str(mip) cmd = (f"chunk_overlaps {segpath} {base_segpath} {storagestr}" f" --chunk_begin {chunk_begin} --chunk_end {chunk_end}" f" --parallel {parallel} --mip {mip_str}") yield SynaptorTask(cmd)
def create_boss_transfer_tasks(task_queue, src_layer_path, dest_layer_path, shape=Vec(1024, 1024, 64)): # Note: Weird errors with datatype changing to float64 when requesting 2048,2048,64 # 1024,1024,64 worked nicely though. shape = Vec(*shape) vol = CloudVolume(dest_layer_path) create_downsample_scales(dest_layer_path, mip=0, ds_shape=shape) for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape), desc="Inserting Boss Transfer Tasks"): task = BossTransferTask( src_path=src_layer_path, dest_path=dest_layer_path, shape=shape.clone(), offset=startpt.clone(), ) task_queue.insert(task) task_queue.wait('Uploading Boss Transfer Tasks')
def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): yield DownsampleTask( layer_path=layer_path, mip=vol.mip, shape=shape.clone(), offset=startpt.clone(), axis=axis, fill_missing=fill_missing, sparse=sparse, ) vol.provenance.processing.append({ 'method': { 'task': 'DownsampleTask', 'mip': mip, 'shape': shape.tolist(), 'axis': axis, 'method': 'downsample_with_averaging' if vol.layer_type == 'image' else 'downsample_segmentation', 'sparse': sparse, 'bounds': str(bounds), 'chunk_size': (list(chunk_size) if chunk_size else None), 'preserve_chunk_size': preserve_chunk_size, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) vol.commit_provenance()
def __iter__(self): self.bounds = bounds.clone() self.bounds.minpt.z = bounds.minpt.z + self.level_start * shape.z self.bounds.maxpt.z = bounds.minpt.z + self.level_end * shape.z for start in xyzrange(self.bounds.minpt, self.bounds.maxpt, shape): task_shape = min2(shape.clone(), self.bounds.maxpt - start) task_bounds = Bbox(start, start + task_shape) if task_bounds.volume() < 1: continue chunk_begin = tup2str(task_bounds.minpt) chunk_end = tup2str(task_bounds.maxpt) patchsz_str = tup2str(patchsz) res_str = tup2str(resolution) cmd = (f"chunk_edges {imgpath} {cleftpath} {segpath}" f" {storagestr} {hashmax} --storagedir {storagedir}" f" --chunk_begin {chunk_begin} --chunk_end {chunk_end}" f" --patchsz {patchsz_str} --resolution {res_str}") yield SynaptorTask(cmd)
def create_meshing_tasks(task_queue, layer_path, mip, shape=Vec(512, 512, 512)): shape = Vec(*shape) max_simplification_error = 40 vol = CloudVolume(layer_path, mip) if not 'mesh' in vol.info: vol.info['mesh'] = 'mesh_mip_{}_err_{}'.format( mip, max_simplification_error) vol.commit_info() for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape), desc="Inserting Mesh Tasks"): task = MeshTask( layer_path=layer_path, mip=vol.mip, shape=shape.clone(), offset=startpt.clone(), max_simplification_error=max_simplification_error, ) task_queue.insert(task) task_queue.wait('Uploading MeshTasks')
def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): task_shape = min2(shape.clone(), srcvol.bounds.maxpt - startpt) yield ContrastNormalizationTask( src_path=src_path, dest_path=dest_path, levels_path=levels_path, shape=task_shape, offset=startpt.clone(), clip_fraction=clip_fraction, mip=mip, fill_missing=fill_missing, translate=translate, minval=minval, maxval=maxval, ) dvol.provenance.processing.append({ 'method': { 'task': 'ContrastNormalizationTask', 'src_path': src_path, 'dest_path': dest_path, 'shape': Vec(*shape).tolist(), 'clip_fraction': clip_fraction, 'mip': mip, 'translate': Vec(*translate).tolist(), 'minval': minval, 'maxval': maxval, 'bounds': [bounds.minpt.tolist(), bounds.maxpt.tolist()], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance()
def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): task_shape = min2(shape.clone(), dvol_bounds.maxpt - startpt) yield TransferTask( src_path=src_layer_path, dest_path=dest_layer_path, shape=task_shape, offset=startpt.clone(), fill_missing=fill_missing, translate=translate, mip=mip, ) job_details = { 'method': { 'task': 'TransferTask', 'src': src_layer_path, 'dest': dest_layer_path, 'shape': list(map(int, shape)), 'fill_missing': fill_missing, 'translate': list(map(int, translate)), 'bounds': [bounds.minpt.tolist(), bounds.maxpt.tolist()], 'mip': mip, }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), } dvol = CloudVolume(dest_layer_path) dvol.provenance.sources = [src_layer_path] dvol.provenance.processing.append(job_details) dvol.commit_provenance() if vol.path.protocol != 'boss': vol.provenance.processing.append(job_details) vol.commit_provenance()
def xyz(*args): return np.array(list(xyzrange(*args)))
def test_xyzrange(): def xyz(*args): return np.array(list(xyzrange(*args))) assert list(xyzrange((0, 0, 0))) == [] assert list(xyzrange((1, 0, 0))) == [] assert np.all(xyz((1, 1, 1)) == [Vec(0, 0, 0)]) assert np.all(xyz((2, 1, 1)) == [Vec(0, 0, 0), Vec(1, 0, 0)]) assert np.all( xyz((2, 2, 2)) == [ Vec(0, 0, 0), Vec(1, 0, 0), Vec(0, 1, 0), Vec(1, 1, 0), Vec(0, 0, 1), Vec(1, 0, 1), Vec(0, 1, 1), Vec(1, 1, 1), ]) assert np.all(xyz((2, 1, 1), (3, 2, 2)) == [Vec(2, 1, 1)]) assert np.all( xyz((2, 1, 1), (5, 2, 2), (2, 1, 1)) == [Vec(2, 1, 1), Vec(4, 1, 1)]) z = xyz((0, 0, 0), (2, 2, 1), (0.5, 0.5, 0.5)) print(z) print(len(z)) assert np.all( xyz((0, 0, 0), (2, 2, 1), (0.5, 0.5, 0.5)) == [ Vec(0.0, 0.0, 0.0), Vec(0.5, 0.0, 0.0), Vec(1.0, 0.0, 0.0), Vec(1.5, 0.0, 0.0), Vec(0.0, 0.5, 0.0), Vec(0.5, 0.5, 0.0), Vec(1.0, 0.5, 0.0), Vec(1.5, 0.5, 0.0), Vec(0.0, 1.0, 0.0), Vec(0.5, 1.0, 0.0), Vec(1.0, 1.0, 0.0), Vec(1.5, 1.0, 0.0), Vec(0.0, 1.5, 0.0), Vec(0.5, 1.5, 0.0), Vec(1.0, 1.5, 0.0), Vec(1.5, 1.5, 0.0), Vec(0.0, 0.0, 0.5), Vec(0.5, 0.0, 0.5), Vec(1.0, 0.0, 0.5), Vec(1.5, 0.0, 0.5), Vec(0.0, 0.5, 0.5), Vec(0.5, 0.5, 0.5), Vec(1.0, 0.5, 0.5), Vec(1.5, 0.5, 0.5), Vec(0.0, 1.0, 0.5), Vec(0.5, 1.0, 0.5), Vec(1.0, 1.0, 0.5), Vec(1.5, 1.0, 0.5), Vec(0.0, 1.5, 0.5), Vec(0.5, 1.5, 0.5), Vec(1.0, 1.5, 0.5), Vec(1.5, 1.5, 0.5), ])
encoding='raw', resolution=[pysical_x, pysical_y, pysical_z], voxel_offset=[0, 0, 0], #volume_size = [1024, 10240, 14592], volume_size=[h5_class.shape_z, h5_class.shape_y, h5_class.shape_x], #chunk_size = [512, 512, 64], ) vol = CloudVolume("file://" + h5_class.Destination_path, compress=False, info=info, non_aligned_writes=True) vol.commit_info() h5_data = h5_file[Dataset_name] for x, y, z in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape)): pt = Vec(x, y, z) bounded_shape = min2(shape, bounds.maxpt - Vec(x, y, z)) bbx = Bbox(pt, pt + bounded_shape) if bbx.subvoxel(): continue vol[bbx] = (h5_data[bbx.to_slices()[::-1]].T).astype(np_type) print("KNeuroViz pre-processing DONE!") if (h5_class.ImageType == 'segmentation'): seg_mesh_path = "file://" + h5_class.Destination_path with LocalTaskQueue(parallel=8) as tq: tasks = tc.create_meshing_tasks(seg_mesh_path,