def make(self, key): """ Pseudocode: 1) Compute all of the 2) Save the mesh as an h5 py file 3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces 4) Insert """ #get the mesh data print(f"\n\n\n---- Working on {key['segment_id']} ----") print(key) new_mesh = (minnie.Decimation() & key).fetch1("mesh") current_mesh_verts, current_mesh_faces = new_mesh.vertices, new_mesh.faces segment_id = key["segment_id"] (total_soma_list, run_time, total_soma_list_sdf) = extract_soma_center( segment_id, current_mesh_verts, current_mesh_faces, ) print( f"Run time was {run_time} \n total_soma_list = {total_soma_list}" f"\n with sdf values = {total_soma_list_sdf}") #check if soma list is empty and did not find soma if len(total_soma_list) <= 0: print( "There were no somas found for this mesh so just writing empty data" ) returned_file_path = tu.write_h5_file( vertices=np.array([]), faces=np.array([]), segment_id=segment_id, filename=f'{segment_id}_0.h5', filepath=str(du.get_somas_path())) insert_dict = dict( key, soma_index=0, centroid_x=None, centroid_y=None, centroid_z=None, #distance_from_prediction=None, #prediction_matching_index = None, n_vertices=0, n_faces=0, mesh=returned_file_path, multiplicity=0, sdf=None, max_side_ratio=None, bbox_volume_ratio=None, max_hole_length=None, run_time=run_time) #raise Exception("to prevent writing because none were found") self.insert1(insert_dict, skip_duplicates=True) return #if there is one or more soma found, get the volume and side length checks max_side_ratio = [ np.max(side_length_ratios(m)) for m in total_soma_list ] bbox_volume_ratio = [soma_volume_ratio(m) for m in total_soma_list] dicts_to_insert = [] for i, (current_soma, soma_sdf, sz_ratio, vol_ratio) in enumerate( zip(total_soma_list, total_soma_list_sdf, max_side_ratio, bbox_volume_ratio)): print("Trying to write off file") """ Currently don't need to export the meshes current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off") """ auto_prediction_center = np.mean(current_soma.vertices, axis=0) / np.array([4, 4, 40]) auto_prediction_center = auto_prediction_center.astype("int") print(f"Predicted Coordinates are {auto_prediction_center}") max_hole_length = tu.largest_hole_length(current_soma) returned_file_path = tu.write_h5_file( vertices=current_soma.vertices, faces=current_soma.faces, segment_id=segment_id, filename=f'{segment_id}_{i}.h5', filepath=str(du.get_somas_path())) insert_dict = dict(key, soma_index=i + 1, centroid_x=auto_prediction_center[0], centroid_y=auto_prediction_center[1], centroid_z=auto_prediction_center[2], n_vertices=len(current_soma.vertices), n_faces=len(current_soma.faces), mesh=returned_file_path, multiplicity=len(total_soma_list), sdf=np.round(soma_sdf, 3), max_side_ratio=np.round(sz_ratio, 3), bbox_volume_ratio=np.round(vol_ratio, 3), max_hole_length=np.round(max_hole_length, 3), run_time=np.round(run_time, 4)) dicts_to_insert.append(insert_dict) self.insert(dicts_to_insert, skip_duplicates=True)
def make(self,key): """ Pseudocode: 1) Compute all of the 2) Save the mesh as an h5 py file 3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces 4) Insert """ #get the mesh data print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----") print(key) new_mesh = (minnie.Decimation() & key).fetch1("mesh") current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces segment_id = key["segment_id"] (total_soma_list, run_time, total_soma_list_sdf, glia_pieces, nuclei_pieces) = sm.extract_soma_center( segment_id, current_mesh_verts, current_mesh_faces, return_glia_nuclei_pieces=True, ) # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- # """ Psuedocode: For both glia and nuclie pieces 1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh 2) If not then just put None """ orig_mesh = trimesh.Trimesh(vertices=current_mesh_verts, faces=current_mesh_faces) if len(glia_pieces)>0: glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces)) n_glia_faces = len(glia_faces) else: glia_faces = None n_glia_faces = 0 if len(nuclei_pieces)>0: nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces)) n_nuclei_faces = len(nuclei_faces) else: nuclei_faces = None n_nuclei_faces = 0 # --------- saving the nuclei and glia saves glia_path,nuclei_path = du.save_glia_nuclei_files(glia_faces=glia_faces, nuclei_faces=nuclei_faces, segment_id=segment_id) print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}") glia_nuclei_key = dict(key, ver=current_version, n_glia_faces=n_glia_faces, #glia_faces = glia_faces, glia_faces = glia_path, n_nuclei_faces = n_nuclei_faces, #nuclei_faces = nuclei_faces nuclei_faces = nuclei_path, ) NeuronGliaNuclei.insert1(glia_nuclei_key,replace=True) print(f"Finished saving off glia and nuclei information : {glia_nuclei_key}") # ---------------- End of 1/9 Addition --------------------------------- # print(f"Run time was {run_time} \n total_soma_list = {total_soma_list}" f"\n with sdf values = {total_soma_list_sdf}") #check if soma list is empty and did not find soma if len(total_soma_list) <= 0: print("There were no somas found for this mesh so just writing empty data") returned_file_path = tu.write_h5_file( vertices=np.array([]), faces=np.array([]), segment_id=segment_id, filename = f'{segment_id}_0.h5', filepath=str(du.get_somas_path()) ) insert_dict = dict(key, soma_index=0, ver=current_version, centroid_x=None, centroid_y=None, centroid_z=None, #distance_from_prediction=None, #prediction_matching_index = None, n_vertices=0, n_faces=0, mesh=returned_file_path, multiplicity=0, sdf = None, volume = None, max_side_ratio = None, bbox_volume_ratio = None, max_hole_length=None, run_time=run_time ) #raise Exception("to prevent writing because none were found") self.insert1(insert_dict,skip_duplicates=True) return #if there is one or more soma found, get the volume and side length checks max_side_ratio = [np.max(sm.side_length_ratios(m)) for m in total_soma_list] bbox_volume_ratio = [sm.soma_volume_ratio(m) for m in total_soma_list] dicts_to_insert = [] for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)): print("Trying to write off file") """ Currently don't need to export the meshes current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off") """ auto_prediction_center = np.mean(current_soma.vertices,axis=0) / np.array([4,4,40]) auto_prediction_center = auto_prediction_center.astype("int") print(f"Predicted Coordinates are {auto_prediction_center}") max_hole_length = tu.largest_hole_length(current_soma) returned_file_path = tu.write_h5_file( vertices=current_soma.vertices, faces=current_soma.faces, segment_id=segment_id, filename = f'{segment_id}_{i}.h5', filepath=str(du.get_somas_path()) ) insert_dict = dict(key, soma_index=i+1, ver=current_version, centroid_x=auto_prediction_center[0], centroid_y=auto_prediction_center[1], centroid_z=auto_prediction_center[2], n_vertices = len(current_soma.vertices), n_faces = len(current_soma.faces), mesh=returned_file_path, multiplicity=len(total_soma_list), sdf = np.round(soma_sdf,3), volume = current_soma.convex_hull.volume/1000000000, max_side_ratio = np.round(sz_ratio,3), bbox_volume_ratio = np.round(vol_ratio,3), max_hole_length = np.round(max_hole_length,3), run_time=np.round(run_time,4) ) dicts_to_insert.append(insert_dict) self.insert(dicts_to_insert,skip_duplicates=True)
def make(self,key): """ Purpose: To decimate a mesh by a perscribed decimation ratio and algorithm Pseudocode: 1) Get the current mesh 2) Decimationt he current mesh with the corresponding decimate method 3) Get n_verts,n_faces and save as h5py file """ segment_id = key["segment_id"] decimation_method = key["decimation_method"] mesh_fragment_method_set = key["mesh_fragment_method_set"] ver =key["ver"] if verbose: print(f"\n\n--Working on {segment_id}: (decimation_method = {decimation_method})") param_tables = MeshFragmentMethodSet.restrict_one_part_with_hash(mesh_fragment_method_set) soma_kwargs = MeshFragmentMethod.restrict_one_part_with_hash( (param_tables & "table_type = 'Soma'").fetch1("mesh_fragment_method")).fetch1() glia_kwargs = MeshFragmentMethod.restrict_one_part_with_hash( (param_tables & "table_type = 'Glia'").fetch1("mesh_fragment_method")).fetch1() nuclei_kwargs = MeshFragmentMethod.restrict_one_part_with_hash( (param_tables & "table_type = 'Nuclei'").fetch1("mesh_fragment_method")).fetch1() all_kwargs = gu.merge_dicts([glia_kwargs,nuclei_kwargs,soma_kwargs]) mesh_fragment_method_soma = (param_tables & dict(table_type='Soma')).fetch1("mesh_fragment_method") mesh_fragment_method_nuclei = (param_tables & dict(table_type='Nuclei')).fetch1("mesh_fragment_method") mesh_fragment_method_glia = (param_tables & dict(table_type='Glia')).fetch1("mesh_fragment_method") st = time.time() #1) Get the current mesh new_mesh = (h01mor.Decimation.Object() & key).fetch1("mesh") if verbose: print(f"Fetching Decimation time: {time.time()-st}") current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces segment_id = key["segment_id"] (total_soma_list, run_time, total_soma_list_sdf, glia_pieces, nuclei_pieces) = sm.extract_soma_center( segment_id, current_mesh_verts, current_mesh_faces, return_glia_nuclei_pieces=True, verbose = True, **all_kwargs ) # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- # """ Psuedocode: For both glia and nuclie pieces 1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh 2) If not then just put None """ orig_mesh = new_mesh if len(glia_pieces)>0: glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces)) n_glia_faces = len(glia_faces) else: glia_faces = None n_glia_faces = 0 if len(nuclei_pieces)>0: nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces)) n_nuclei_faces = len(nuclei_faces) else: nuclei_faces = None n_nuclei_faces = 0 # --------- saving the nuclei and glia saves glia_path,nuclei_path = hdju.save_glia_nuclei_files( glia_faces=glia_faces, nuclei_faces=nuclei_faces, segment_id=segment_id, filename_append = filename_append) print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}") glia_key = dict(key, mesh_fragment_method = mesh_fragment_method_glia, n_faces = n_glia_faces, faces=glia_path) nuclei_key = dict(key, mesh_fragment_method = mesh_fragment_method_nuclei, n_faces = n_nuclei_faces, faces=nuclei_path) # ---------------- End of 1/9 Addition --------------------------------- # self.insert1(key, skip_duplicates=True, ignore_extra_fields=True, allow_direct_insert=True) print(f"Run time was {run_time} \n total_soma_list = {total_soma_list}" f"\n with sdf values = {total_soma_list_sdf}") #check if soma list is empty and did not find soma if len(total_soma_list) <= 0: print("There were no somas found for this mesh so just writing empty data") returned_file_path = tu.write_h5_file( vertices=np.array([]), faces=np.array([]), segment_id=segment_id, filename = f'{segment_id}_soma_0.h5', filepath=str(hdju.external_path_mesh) ) insert_dict = dict(key, soma_index=0, centroid_x=None, centroid_y=None, centroid_z=None, centroid_x_nm=None, centroid_y_nm=None, centroid_z_nm=None, #distance_from_prediction=None, #prediction_matching_index = None, n_vertices=0, n_faces=0, mesh=returned_file_path, multiplicity=0, sdf = None, volume = None, surface_area = None, max_side_ratio = None, bbox_volume_ratio = None, max_hole_length=None, run_time=np.round(run_time,4), mesh_fragment_method = mesh_fragment_method_soma, ) #raise Exception("to prevent writing because none were found") self.Soma.insert1(insert_dict, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) self.SomaObject.insert1(insert_dict, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) return #if there is one or more soma found, get the volume and side length checks max_side_ratio = [np.max(sm.side_length_ratios(m)) for m in total_soma_list] bbox_volume_ratio = [sm.soma_volume_ratio(m) for m in total_soma_list] dicts_to_insert = [] for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)): print("Trying to write off file") """ Currently don't need to export the meshes current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off") """ auto_prediction_center_nm = np.mean(current_soma.vertices,axis=0) auto_prediction_center = auto_prediction_center_nm / hu.voxel_to_nm_scaling auto_prediction_center = auto_prediction_center.astype("int") print(f"Predicted Coordinates are {auto_prediction_center}") max_hole_length = tu.largest_hole_length(current_soma) if max_hole_length is not None: max_hole_length = np.round(max_hole_length,3) returned_file_path = tu.write_h5_file( vertices=current_soma.vertices, faces=current_soma.faces, segment_id=segment_id, filename = f'{segment_id}_soma_{i+1}.h5', filepath=str(hdju.external_path_mesh) ) divisor=1000000 insert_dict = dict(key, soma_index=i+1, centroid_x=auto_prediction_center[0], centroid_y=auto_prediction_center[1], centroid_z=auto_prediction_center[2], centroid_x_nm=auto_prediction_center_nm[0], centroid_y_nm=auto_prediction_center_nm[1], centroid_z_nm=auto_prediction_center_nm[2], n_vertices = len(current_soma.vertices), n_faces = len(current_soma.faces), mesh=returned_file_path, multiplicity=len(total_soma_list), sdf = np.round(soma_sdf,3), volume = np.round(current_soma.convex_hull.volume/1000000000,3), surface_area = np.round(current_soma.area/divisor,3), max_side_ratio = np.round(sz_ratio,3), bbox_volume_ratio = np.round(vol_ratio,3), max_hole_length = max_hole_length, run_time=np.round(run_time,4), mesh_fragment_method = mesh_fragment_method_soma, ) dicts_to_insert.append(insert_dict) self.Glia.insert1(glia_key, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) self.GliaObject.insert1(glia_key, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) self.Nuclei.insert1(nuclei_key, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) self.NucleiObject.insert1(nuclei_key, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) self.Soma.insert(dicts_to_insert, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True) self.SomaObject.insert(dicts_to_insert, allow_direct_insert = True, ignore_extra_fields = True, skip_duplicates=True)
def make(self, key): """ Purpose: To decimate a mesh by a perscribed decimation ratio and algorithm Pseudocode: 1) Get the current mesh 2) Decimationt he current mesh with the corresponding decimate method 3) Get n_verts,n_faces and save as h5py file """ segment_id = key["segment_id"] decimation_hash = key["decimation_method"] ver = key["ver"] if verbose: print( f"\n\n--Working on {segment_id}: (decimation_hash = {decimation_hash})" ) filename_append = f"{dataset}_decimation_{decimation_hash}" if len(self & dict( ver=ver, segment_id=segment_id, decimation=decimation_hash) ) > 0: if verbose: print( f"Already processed {segment_id} (decimation_hash = {decimation_hash})" ) return if filename_append is not None: filename = f"{segment_id}_{filename_append}.h5" else: filename = f"{segment_id}.h5" #1) Get the current mesh current_mesh = (h01mat.Mesh.Object() & dict(segment_id=segment_id, ver=ver)).fetch1("mesh") #2) Decimationt he current mesh with the corresponding decimate method st = time.time() dec_mesh = DecimationMethod.restrict_one_part_with_hash( decimation_hash).method(current_mesh) if verbose: print(f"Decimation time: {time.time() - st}") #3) Get n_verts,n_faces and save as h5py file st = time.time() n_vertices, n_faces = len(dec_mesh.vertices), len(dec_mesh.faces) mesh_path = tu.write_h5_file(dec_mesh, filepath=target_dir, filename=filename, return_file_path=True) if verbose: print(f"Writing h5 time: {time.time() - st}") write_dict = dict( segment_id=segment_id, ver=ver, n_vertices=n_vertices, n_faces=n_faces, mesh=mesh_path, decimation_method=decimation_hash, ) self.insert1(write_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True) self.Object.insert1(write_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True)