def make(self,key):
        """
        Purpose: To decimate a mesh by a perscribed
        decimation ratio and algorithm
        
        Pseudocode: 
        1) Get the current mesh
        2) Decimationt he current mesh with the corresponding decimate method
        3) Get n_verts,n_faces and save as h5py file
        
        """
        
        segment_id = key["segment_id"]
        decimation_method =  key["decimation_method"]
        mesh_fragment_method_set = key["mesh_fragment_method_set"]
        
        ver =key["ver"]
        
        if verbose:
            print(f"\n\n--Working on {segment_id}: (decimation_method = {decimation_method})")
        
        
        
        
        param_tables = MeshFragmentMethodSet.restrict_one_part_with_hash(mesh_fragment_method_set)
        soma_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Soma'").fetch1("mesh_fragment_method")).fetch1()
        glia_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Glia'").fetch1("mesh_fragment_method")).fetch1()
        nuclei_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Nuclei'").fetch1("mesh_fragment_method")).fetch1()
        all_kwargs = gu.merge_dicts([glia_kwargs,nuclei_kwargs,soma_kwargs])
        
        mesh_fragment_method_soma = (param_tables & dict(table_type='Soma')).fetch1("mesh_fragment_method")
        mesh_fragment_method_nuclei = (param_tables & dict(table_type='Nuclei')).fetch1("mesh_fragment_method")
        mesh_fragment_method_glia = (param_tables & dict(table_type='Glia')).fetch1("mesh_fragment_method")
            
        
        st = time.time()
        #1) Get the current mesh
        new_mesh = (h01mor.Decimation.Object() & key).fetch1("mesh")
        
        if verbose:
            print(f"Fetching Decimation time: {time.time()-st}")

        
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
            verbose = True,
            **all_kwargs
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = new_mesh
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = hdju.save_glia_nuclei_files(
            glia_faces=glia_faces,
            nuclei_faces=nuclei_faces,
            segment_id=segment_id,
            filename_append = filename_append)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
            
            
        
        glia_key = dict(key,
                        mesh_fragment_method = mesh_fragment_method_glia,
                        n_faces = n_glia_faces,
                        faces=glia_path)
        
        nuclei_key = dict(key,
                        mesh_fragment_method = mesh_fragment_method_nuclei,
                        n_faces = n_nuclei_faces,
                        faces=nuclei_path)
                        
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        self.insert1(key,
            skip_duplicates=True,
            ignore_extra_fields=True,
           allow_direct_insert=True)
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_soma_0.h5',
                                                    filepath=str(hdju.external_path_mesh)
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               centroid_x_nm=None,
                               centroid_y_nm=None,
                               centroid_z_nm=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               surface_area = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=np.round(run_time,4),
                               mesh_fragment_method = mesh_fragment_method_soma,
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.Soma.insert1(insert_dict,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
            self.SomaObject.insert1(insert_dict,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
            
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center_nm = np.mean(current_soma.vertices,axis=0) 
            auto_prediction_center = auto_prediction_center_nm / hu.voxel_to_nm_scaling
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            if max_hole_length is not None:
                max_hole_length = np.round(max_hole_length,3)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_soma_{i+1}.h5',
                                                filepath=str(hdju.external_path_mesh)
                                             )


            divisor=1000000
            insert_dict = dict(key,
                              soma_index=i+1,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               centroid_x_nm=auto_prediction_center_nm[0],
                               centroid_y_nm=auto_prediction_center_nm[1],
                               centroid_z_nm=auto_prediction_center_nm[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = np.round(current_soma.convex_hull.volume/1000000000,3),
                               surface_area = np.round(current_soma.area/divisor,3),
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = max_hole_length,
                               run_time=np.round(run_time,4),
                               mesh_fragment_method = mesh_fragment_method_soma,
                              )



            dicts_to_insert.append(insert_dict)
            
            
        
        self.Glia.insert1(glia_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.GliaObject.insert1(glia_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        
        self.Nuclei.insert1(nuclei_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.NucleiObject.insert1(nuclei_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        
        self.Soma.insert(dicts_to_insert,
                         allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.SomaObject.insert(dicts_to_insert,
                         allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
Exemplo n.º 2
0
def combine_branches(
    branch_upstream,
    branch_downstream,
    add_skeleton=True,
    add_labels=False,
    verbose=True,
    common_endpoint=None,
    return_jitter_segment=False,
):
    """
    Purpose: To combine two branch objects together
    WHERE IT IS ASSUMED THEY SHARE ONE COMMON ENDPOINT
    
    
    Ex: 
    import branch_utils as bu

    branch_upstream = copy.deepcopy(neuron_obj[0][upstream_branch])
    branch_downstream= copy.deepcopy(neuron_obj[0][downstream_branch])

    branch_upstream.labels = ["hellow"]
    branch_downstream.labels = ["my","new","labels"]

    b_out = bu.combine_branches(
        branch_upstream,
        branch_downstream,
        verbose = True,
        add_skeleton = False,
        add_labels = False
    )
    """

    debug_time = False
    st = time.time()

    b_up = branch_upstream
    b_d = branch_downstream

    b_obj = copy.deepcopy(b_up)

    if debug_time:
        print(f"Copying branch: {time.time() - st}")
        st = time.time()

    # --working on the non-optional_features
    face_offset = len(b_up.mesh.faces)
    b_obj.mesh = tu.combine_meshes([b_up.mesh, b_d.mesh])
    if verbose:
        print(f"b_up.mesh.faces = {b_up.mesh.faces.shape}")
        print(f"b_d.mesh.faces = {b_d.mesh.faces.shape}")
        print(f"b_obj.mesh  = {b_obj.mesh.faces.shape}")

    if debug_time:
        print(f"Combining meshes: {time.time() - st}")
        st = time.time()

    b_obj.mesh_face_idx = np.concatenate(
        [b_up.mesh_face_idx, b_d.mesh_face_idx])

    # if verbose:
    #     print(f"b_up.mesh_face_idx = {b_up.mesh_face_idx}")
    #     print(f"b_d.mesh_face_idx = {b_d.mesh_face_idx}")
    #     print(f"b_obj.mesh_face_idx= {b_obj.mesh_face_idx}")

    # --------- setting the spine info -----------
    if b_d.spines is not None:
        if b_d.spines_obj is not None:
            d_spines_obj = [
                spu.adjust_obj_with_face_offset(k, face_offset=face_offset)
                for k in b_d.spines_obj
            ]
        else:
            d_spines_obj = []

        for sp_attr in ["spines", "spines_obj", "spines_volume"]:
            if getattr(b_obj, sp_attr) is None:
                setattr(b_obj, sp_attr, [])

            if sp_attr == "spines_obj":
                curr_value = d_spines_obj
            else:
                curr_value = getattr(b_d, sp_attr)

            setattr(b_obj, sp_attr, getattr(b_obj, sp_attr) + curr_value)

    if debug_time:
        print(f"Spine adjustment: {time.time() - st}")
        st = time.time()

    if verbose:
        print(f"Total number of spines = {b_obj.n_spines}")

    if debug_time:
        print(f"computing number of spines : {time.time() - st}")
        st = time.time()

    # --------- setting the bouton and spines info -----------
    for k in [
            'boutons',
            'boutons_cdfs',
            'boutons_volume',
            "head_neck_shaft_idx",
    ]:
        if debug_time:
            print(f"combining {k}: {time.time() - st}")
            st = time.time()
        setattr(b_obj, k, combine_attr_lists(getattr(b_up, k), getattr(b_d,
                                                                       k)))

    if debug_time:
        print(f"combining lists: {time.time() - st}")
        st = time.time()

    #------ adjusting the synapses ------------------
    synapses_to_add = [
        syu.adjust_obj_with_face_offset(k, face_offset) for k in b_d.synapses
    ]

    if debug_time:
        print(f"synapses offset: {time.time() - st}")
        st = time.time()

    # ---------------- PARTS TO RECALCULATE
    b_obj._mesh_volume = None
    b_obj.mesh_volume
    if verbose:
        print(f"b_obj.mesh_volume = {b_obj.mesh_volume}")

    # ---------------- Do skeleton combination ------------------------------
    try:
        match_idx_1, match_idx_2 = sk.matching_endpoint_singular(
            b_obj.endpoints, b_d.endpoints, return_indices=True, verbose=False)
        jitter_segment = None
    except Exception as e:
        if common_endpoint is None:
            raise Exception(f"{e}")

        match_idx_1 = nu.matching_row_index(b_obj.endpoints, common_endpoint)
        match_idx_2 = nu.matching_row_index(b_d.endpoints, common_endpoint)

        jitter_segment = bu.add_jitter_to_endpoint(b_d,
                                                   b_d.endpoints[1 -
                                                                 match_idx_2],
                                                   verbose=verbose)

        print(f"b_d.endpoints = {b_d.endpoints}")

    if add_skeleton:
        if verbose:
            print(f"Adding skeleton of downstream branch")

        sk_len_weights = [b_up.skeletal_length, b_d.skeletal_length]

        if match_idx_2 == 1:
            skeleton_add = sk.flip_skeleton(b_d.skeleton)
            width_array_add = {
                k: np.flip(v)
                for k, v in b_d.width_array.items()
            }
        else:
            skeleton_add = b_d.skeleton
            width_array_add = b_d.width_array

        b_obj.skeleton = sk.stack_skeletons([b_up.skeleton, skeleton_add])
        b_obj.endpoints[match_idx_1] = b_d.endpoints[1 - match_idx_2]

        if verbose:
            print(f"Original lengths = {sk_len_weights}")
            print(f"New skeleton length = {b_obj.skeletal_length}")
            print(
                f"New endpoints calculated = {sk.find_skeleton_endpoint_coordinates(b_obj.skeleton)}"
            )
            print(f"Adjusted endpoints = {b_obj.endpoints}")

        b_obj.width_array = {
            k: np.concatenate([v, width_array_add[k]])
            for k, v in b_obj.width_array.items()
        }

        b_obj.width = nu.weighted_average([b_up.width, b_d.width],
                                          sk_len_weights)
        b_obj.width_new = {
            k: nu.weighted_average([v, b_d.width_new[k]], sk_len_weights)
            for k, v in b_obj.width_new.items()
        }

    else:
        if verbose:
            print(f"Not adding skeleton")
        # adjust the closest skeleton coordinates of all the synapses to add
        for sy in synapses_to_add:
            sy.closest_sk_coordinate = sk.closest_skeleton_coordinate(
                b_obj.skeleton, sy.closest_face_coordinate)

    if debug_time:
        print(f"Skeleton adjustment: {time.time() - st}")
        st = time.time()

    # ------------ Parts to recalculates ----------
    b_obj._skeleton_graph = None
    b_obj._endpoints_nodes = None
    b_obj.skeleton_graph
    b_obj.endpoints_nodes

    if debug_time:
        print(f"Ccalculating skeleton graph and endpoints: {time.time() - st}")
        st = time.time()

    # ----- adjusting the synapses --------
    if verbose:
        print(
            f"--- Adjusting the synapse features based on new skeleton additions ------"
        )

    b_obj.synapses = combine_attr_lists(b_obj.synapses, synapses_to_add)

    for syn in b_obj.synapses:
        syu.calculate_endpoints_dist(b_obj, syn)
        syu.calculate_upstream_downstream_dist_from_down_idx(
            syn, down_idx=match_idx_1)

    if debug_time:
        print(f"synapse distances: {time.time() - st}")
        st = time.time()

    # --------- adding labels -------
    if add_labels:
        b_obj.labels += b_d.labels

    if return_jitter_segment:
        return b_obj, jitter_segment
    else:
        return b_obj
    def make(self,key):
        """
        Pseudocode: 
        1) Compute all of the
        2) Save the mesh as an h5 py file
        3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces
        4) Insert
        
        
        """
        
        #get the mesh data
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        print(key)
        new_mesh = (minnie.Decimation() & key).fetch1("mesh")
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = trimesh.Trimesh(vertices=current_mesh_verts,
                                   faces=current_mesh_faces)
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = du.save_glia_nuclei_files(glia_faces=glia_faces,
                                 nuclei_faces=nuclei_faces,
                                 segment_id=segment_id)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
        glia_nuclei_key = dict(key,
                               ver=current_version,
                               n_glia_faces=n_glia_faces,
                               #glia_faces = glia_faces,
                               glia_faces = glia_path,
                               n_nuclei_faces = n_nuclei_faces,
                               #nuclei_faces = nuclei_faces
                               nuclei_faces = nuclei_path,
                              )
        
        NeuronGliaNuclei.insert1(glia_nuclei_key,replace=True)
        print(f"Finished saving off glia and nuclei information : {glia_nuclei_key}")
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_0.h5',
                                                    filepath=str(du.get_somas_path())
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                               ver=current_version,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=run_time
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.insert1(insert_dict,skip_duplicates=True)
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center = np.mean(current_soma.vertices,axis=0) / np.array([4,4,40])
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_{i}.h5',
                                                filepath=str(du.get_somas_path())
                                             )



            insert_dict = dict(key,
                              soma_index=i+1,
                               ver=current_version,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = current_soma.convex_hull.volume/1000000000,
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = np.round(max_hole_length,3),
                               run_time=np.round(run_time,4)
                              )



            dicts_to_insert.append(insert_dict)
        self.insert(dicts_to_insert,skip_duplicates=True)