Esempio n. 1
0
side_length_threshold = 5000,
filter_out_border_spines=False, #this seemed to cause a lot of misses
skeleton_endpoint_nullification=True,
skeleton_endpoint_nullification_distance = 2000,
soma_vertex_nullification = True,
border_percentage_threshold=0.3,
check_spine_border_perc=0.4,

#-------1/20 Addition --------
filter_by_volume = True,
filter_by_volume_threshold = 19835293, #calculated from experiments
)

import general_utils as gu
kwargs_dict = gu.merge_dicts([combined_dict,
               preprocess_neuron_kwargs,
               spines_kwargs])


# In[ ]:


# for k,v in kwargs_dict.items():
#     print(f"{k}: int unsigned")


# In[ ]:


# for k,v in combined_dict.items():
#     print(f"{k}={v},")
    def make(self,key):
        """
        Purpose: To decimate a mesh by a perscribed
        decimation ratio and algorithm
        
        Pseudocode: 
        1) Get the current mesh
        2) Decimationt he current mesh with the corresponding decimate method
        3) Get n_verts,n_faces and save as h5py file
        
        """
        
        segment_id = key["segment_id"]
        decimation_method =  key["decimation_method"]
        mesh_fragment_method_set = key["mesh_fragment_method_set"]
        
        ver =key["ver"]
        
        if verbose:
            print(f"\n\n--Working on {segment_id}: (decimation_method = {decimation_method})")
        
        
        
        
        param_tables = MeshFragmentMethodSet.restrict_one_part_with_hash(mesh_fragment_method_set)
        soma_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Soma'").fetch1("mesh_fragment_method")).fetch1()
        glia_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Glia'").fetch1("mesh_fragment_method")).fetch1()
        nuclei_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Nuclei'").fetch1("mesh_fragment_method")).fetch1()
        all_kwargs = gu.merge_dicts([glia_kwargs,nuclei_kwargs,soma_kwargs])
        
        mesh_fragment_method_soma = (param_tables & dict(table_type='Soma')).fetch1("mesh_fragment_method")
        mesh_fragment_method_nuclei = (param_tables & dict(table_type='Nuclei')).fetch1("mesh_fragment_method")
        mesh_fragment_method_glia = (param_tables & dict(table_type='Glia')).fetch1("mesh_fragment_method")
            
        
        st = time.time()
        #1) Get the current mesh
        new_mesh = (h01mor.Decimation.Object() & key).fetch1("mesh")
        
        if verbose:
            print(f"Fetching Decimation time: {time.time()-st}")

        
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
            verbose = True,
            **all_kwargs
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = new_mesh
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = hdju.save_glia_nuclei_files(
            glia_faces=glia_faces,
            nuclei_faces=nuclei_faces,
            segment_id=segment_id,
            filename_append = filename_append)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
            
            
        
        glia_key = dict(key,
                        mesh_fragment_method = mesh_fragment_method_glia,
                        n_faces = n_glia_faces,
                        faces=glia_path)
        
        nuclei_key = dict(key,
                        mesh_fragment_method = mesh_fragment_method_nuclei,
                        n_faces = n_nuclei_faces,
                        faces=nuclei_path)
                        
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        self.insert1(key,
            skip_duplicates=True,
            ignore_extra_fields=True,
           allow_direct_insert=True)
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_soma_0.h5',
                                                    filepath=str(hdju.external_path_mesh)
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               centroid_x_nm=None,
                               centroid_y_nm=None,
                               centroid_z_nm=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               surface_area = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=np.round(run_time,4),
                               mesh_fragment_method = mesh_fragment_method_soma,
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.Soma.insert1(insert_dict,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
            self.SomaObject.insert1(insert_dict,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
            
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center_nm = np.mean(current_soma.vertices,axis=0) 
            auto_prediction_center = auto_prediction_center_nm / hu.voxel_to_nm_scaling
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            if max_hole_length is not None:
                max_hole_length = np.round(max_hole_length,3)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_soma_{i+1}.h5',
                                                filepath=str(hdju.external_path_mesh)
                                             )


            divisor=1000000
            insert_dict = dict(key,
                              soma_index=i+1,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               centroid_x_nm=auto_prediction_center_nm[0],
                               centroid_y_nm=auto_prediction_center_nm[1],
                               centroid_z_nm=auto_prediction_center_nm[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = np.round(current_soma.convex_hull.volume/1000000000,3),
                               surface_area = np.round(current_soma.area/divisor,3),
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = max_hole_length,
                               run_time=np.round(run_time,4),
                               mesh_fragment_method = mesh_fragment_method_soma,
                              )



            dicts_to_insert.append(insert_dict)
            
            
        
        self.Glia.insert1(glia_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.GliaObject.insert1(glia_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        
        self.Nuclei.insert1(nuclei_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.NucleiObject.insert1(nuclei_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        
        self.Soma.insert(dicts_to_insert,
                         allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.SomaObject.insert(dicts_to_insert,
                         allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
def collect_global_parameters_and_attributes_by_data_type(
        module,
        data_type,
        include_default=True,
        algorithms=None,
        output_types=None,
        algorithms_only=None,
        verbose=False):
    """
    PUrpose: To compile the dictionary to either
    set or output
    
    """
    if algorithms is not None:
        algorithms = nu.convert_to_array_like(algorithms)
    else:
        algorithms = []

    if output_types is None:
        output_types = output_types_global
    else:
        output_types = nu.convert_to_array_like(output_types)

    p_list = dict()
    parameters_list = []

    if include_default and data_type != "default":
        total_data_types = ["default", data_type]
    else:
        total_data_types = [data_type]

    for dict_type in output_types:
        p_list[dict_type] = []
        for data_type in total_data_types:

            dict_name = f"{dict_type}_dict_{data_type}"

            if not algorithms_only:
                try:
                    curr_dict = getattr(module, dict_name).copy()
                except:
                    if verbose:
                        print(f"Unknown dict_name = {dict_name}")
                else:
                    if verbose:
                        print(f"Collecting {dict_name}")
                        print(f"curr_dict = {curr_dict}")
                    p_list[dict_type].append(curr_dict)

            for alg in algorithms:
                #                 if data_type == "default":
                #                     break
                dict_name = f"{dict_type}_dict_{data_type}_{alg}"
                try:
                    curr_dict = getattr(module, dict_name).copy()
                except:
                    if verbose:
                        print(f"Unknown dict_name = {dict_name}")
                else:
                    if verbose:
                        print(f"Collecting {dict_name}")
                        print(f"curr_dict = {curr_dict}")
                    p_list[dict_type].append(curr_dict)

    #compiling all the dicts
    if "global_parameters" in p_list:
        global_parameters_dict = gu.merge_dicts(p_list["global_parameters"])
    else:
        global_parameters_dict = {}

    if "attributes" in p_list:
        attributes_dict = gu.merge_dicts(p_list["attributes"])
    else:
        attributes_dict = {}

    return global_parameters_dict, attributes_dict
def output_global_parameters_and_attributes_from_current_data_type(
        module,
        algorithms=None,
        verbose=True,
        lowercase=True,
        output_types=("global_parameters"),
        include_default=True,
        algorithms_only=False,
        abbreviate_keywords=False,
        **kwargs):
    if output_types is None:
        output_types = output_types_global

    module_list = nu.convert_to_array_like(module)
    total_dict_list = []
    for module in module_list:

        module, algorithms_local, algorithms_only_local = modu.extract_module_algorithm(
            module,
            return_parameters=True,
        )

        if algorithms_local is None:
            algorithms_local = algorithms

        if algorithms_only_local is None:
            algorithms_only_local = algorithms_only

        data_type = module.data_type

        if algorithms_local is None:
            algorithms_local = module.algorithms

        if verbose:
            print(
                f"module: {module.__name__} data_type set to {data_type}, algorithms = {algorithms_local}"
            )

        (global_parameters_dict, attributes_dict
         ) = modu.collect_global_parameters_and_attributes_by_data_type(
             module=module,
             data_type=data_type,
             algorithms=algorithms_local,
             include_default=include_default,
             output_types=output_types,
             algorithms_only=algorithms_only_local,
             verbose=verbose)

        total_dict = gu.merge_dicts([global_parameters_dict, attributes_dict])

        if lowercase:
            if isinstance(total_dict, dsu.DictType):
                total_dict = total_dict.lowercase()
            else:
                total_dict = {k.lower(): v for k, v in total_dict.items()}

        total_dict_list.append(total_dict)

    final_dict = gu.merge_dicts(total_dict_list)

    return final_dict
Esempio n. 5
0
nst.set_global_parameters_and_attributes_by_data_type(data_type)
ctu.set_global_parameters_and_attributes_by_data_type(data_type)
#ctu.output_global_parameters_and_attributes_from_current_data_type()
au.set_global_parameters_and_attributes_by_data_type(data_type)
syu.set_global_parameters_and_attributes_by_data_type(data_type)
spu.set_global_parameters_and_attributes_by_data_type(
    data_type,
    ["head_neck_shaft"],
)

kwargs_dict = gu.merge_dicts([
    ctu.output_global_parameters_and_attributes_from_current_data_type(),
    au.output_global_parameters_and_attributes_from_current_data_type(),
    spu.output_global_parameters_and_attributes_from_current_data_type(
        algorithms=["head_neck_shaft"],
        include_default=True,
        algorithms_only=True),
    syu.output_global_parameters_and_attributes_from_current_data_type()
])

import dict_utils as dictu
kwargs_dict, key_mapping = dictu.abbreviate_str_keys(kwargs_dict,
                                                     verbose=True,
                                                     verbose_key_change=False,
                                                     max_phrase_len=25,
                                                     return_key_mapping=True)

# In[15]:

# h01auto.DecompositionCellTypeMethod.drop()