def chunk_mesh_task(cg, chunk_id, cv_path, cv_mesh_dir=None, mip=3, max_err=40): """ Computes the meshes for a single chunk :param cg: ChunkedGraph instance :param chunk_id: int :param cv_path: str :param cv_mesh_dir: str or None :param mip: int :param max_err: float """ layer = cg.get_chunk_layer(chunk_id) cx, cy, cz = cg.get_chunk_coordinates(chunk_id) mesh_dir = cv_mesh_dir or cg._mesh_dir if layer <= 2: # Level 1 or 2 chunk - fetch supervoxel mapping from ChunkedGraph, and # generate an igneous MeshTask, which will: # 1) Relabel the segmentation based on the sv_to_node_mapping # 2) Run Marching Cubes, # 3) simply each mesh using error quadrics to control the edge collapse # 4) upload a single mesh file for each segment of this chunk # 5) upload a manifest file for each segment of this chunk, # pointing to the mesh for the segment print("Retrieving remap table for chunk %s -- (%s, %s, %s, %s)" % (chunk_id, layer, cx, cy, cz)) sv_to_node_mapping = get_sv_to_node_mapping(cg, chunk_id) print("Remapped %s segments to %s agglomerations. Start meshing..." % (len(sv_to_node_mapping), len(np.unique(list(sv_to_node_mapping.values()))))) if len(sv_to_node_mapping) == 0: print("Nothing to do", cx, cy, cz) return mesh_block_shape = meshgen_utils.get_mesh_block_shape(cg, layer, mip) chunk_offset = (cx, cy, cz) * mesh_block_shape task = MeshTask( mesh_block_shape, chunk_offset, cv_path, mip=mip, simplification_factor=999999, # Simplify as much as possible ... max_simplification_error=max_err, # ... staying below max error. remap_table=sv_to_node_mapping, generate_manifests=False, low_padding=0, # One voxel overlap to exactly line up high_padding=1, # vertex boundaries. mesh_dir=mesh_dir, cache_control='no-cache') task.execute() print("Layer %d -- finished:" % layer, cx, cy, cz) else: # For each node with more than one child, create a new fragment by # merging the mesh fragments of the children. print("Retrieving children for chunk %s -- (%s, %s, %s, %s)" % (chunk_id, layer, cx, cy, cz)) node_ids = cg.range_read_chunk(layer, cx, cy, cz, columns=column_keys.Hierarchy.Child) print("Collecting only nodes with more than one child: ", end="") # Only keep nodes with more than one child multi_child_nodes = {} for node_id, data in node_ids.items(): children = data[0].value if len(children) > 1: multi_child_descendant = [ meshgen_utils.get_downstream_multi_child_node( cg, child, 2) for child in children ] multi_child_nodes[ f'{node_id}:0:{meshgen_utils.get_chunk_bbox_str(cg, node_id, mip)}'] = [ f'{c}:0:{meshgen_utils.get_chunk_bbox_str(cg, c, mip)}' for c in multi_child_descendant ] print("%d out of %d" % (len(multi_child_nodes), len(node_ids))) if not multi_child_nodes: print("Nothing to do", cx, cy, cz) return with Storage(os.path.join(cv_path, mesh_dir)) as storage: i = 0 for new_fragment_id, fragment_ids_to_fetch in multi_child_nodes.items( ): i += 1 if i % max(1, len(multi_child_nodes) // 10) == 0: print(f"{i}/{len(multi_child_nodes)}") fragment_contents = storage.get_files(fragment_ids_to_fetch) fragment_contents = { x['filename']: decode_mesh_buffer(x['filename'], x['content']) for x in fragment_contents if x['content'] is not None and x['error'] is None } old_fragments = list(fragment_contents.values()) if not old_fragments: continue new_fragment = merge_meshes(old_fragments) new_fragment_b = b''.join([ new_fragment['num_vertices'].tobytes(), new_fragment['vertices'].tobytes(), new_fragment['faces'].tobytes() ]) storage.put_file(new_fragment_id, new_fragment_b, content_type='application/octet-stream', compress=True, cache_control='no-cache')
def create_storage(layer_name='layer'): stor_path = os.path.join(layer_path, layer_name) return Storage('file://' + stor_path, n_threads=0)
def _download_input_chunk(self, bounds): storage = Storage(self.layer_path, n_threads=0) relpath = 'build/{}'.format(bounds.to_filename()) return storage.get_file(relpath)