def make(self, key):

        from cgal_Segmentation_Module import cgal_segmentation
        #key passed to function is just dictionary with the following attributes
        """segmentation
        segment_id
        decimation_ratio
        """

        #clusters_default = 18
        #smoothness_list = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8]
        #cluster_list = [2,3,4,5,6]

        smoothness_list = [0.2]
        cluster_list = [3]

        entire_neuron = (ta3p100.CleansedMesh35 & key).fetch1()
        neuron_ID = key["segment_id"]
        component_size = int(entire_neuron["n_triangles"])

        print("inside make function with " + str(neuron_ID))

        total_dict = list()

        for smoothness in smoothness_list:

            for clusters in cluster_list:
                start_time = time.time()

                #print(str(entire_neuron["segment_id"]) + " cluster:" + str(clusters)
                #      + " smoothness:" + str(smoothness))

                #generate the off file for each component
                #what need to send them:
                """----From cleansed Mesh---
                vertices
                triangles
                ----From component table--
                n_vertex_indices
                n_triangle_indices
                vertex_indices
                triangle_indices"""

                if key['segment_id'] not in self.whole_neuron_dicts:
                    self.whole_neuron_dicts[key['segment_id']] = (
                        ta3p100.CleansedMesh35 & 'decimation_ratio=0.35'
                        & dict(segment_id=key['segment_id'])).fetch1()

                path_and_filename, off_file_name = write_Whole_Neuron_Off_file(
                    neuron_ID,
                    self.whole_neuron_dicts[key['segment_id']]["vertices"],
                    self.whole_neuron_dicts[key['segment_id']]["triangles"])

                #print("About to start segmentation")

                #will have generated the component file by now so now need to run the segmentation
                csm.cgal_segmentation(path_and_filename, clusters, smoothness)

                #generate the name of the files
                smoothness_str = str(smoothness)
                if (len(smoothness_str) < 4):
                    smoothness_str = smoothness_str + "0"

                cgal_file_name = path_and_filename + "-cgal_" + str(
                    clusters) + "_" + str(smoothness_str)
                group_csv_cgal_file = cgal_file_name + ".csv"
                sdf_csv_file_name = cgal_file_name + "_sdf.csv"

                #check if file actually exists
                import os
                exists = os.path.isfile(group_csv_cgal_file)

                if (not exists):
                    print("Segmentation not created for " + str(off_file_name))
                    print("################## " + str(neuron_ID) +
                          " ##################")

                    #delete the off file if it exists:
                    #off_exists = os.path.isfile(path_and_filename)
                    print(path_and_filename + ".off")
                    if os.path.isfile(path_and_filename + ".off"):
                        os.remove(path_and_filename + ".off")
                else:

                    with open(group_csv_cgal_file) as f:
                        reader = csv.reader(f)
                        your_list = list(reader)
                    group_list = []
                    for item in your_list:
                        group_list.append(int(item[0]))

                    with open(sdf_csv_file_name) as f:
                        reader = csv.reader(f)
                        your_list = list(reader)
                    sdf_list = []
                    for item in your_list:
                        sdf_list.append(float(item[0]))

                    #print(group_list)
                    #print(sdf_list)

                    #now write them to the datajoint table
                    #table columns for ComponentAutoSegmentation: segmentation, segment_id, decimation_ratio, compartment_type, component_index, seg_group, sdf
                    comp_dict = dict(key,
                                     clusters=clusters,
                                     smoothness=smoothness,
                                     n_triangles=component_size,
                                     seg_group=group_list,
                                     sdf=sdf_list,
                                     median_sdf=np.median(sdf_list),
                                     mean_sdf=np.mean(sdf_list),
                                     third_q=np.percentile(sdf_list, 75),
                                     ninety_perc=np.percentile(sdf_list, 90),
                                     time_updated=str(
                                         datetime.datetime.now())[0:19])

                    total_dict.append(comp_dict)
                    self.insert1(comp_dict, skip_duplicates=True
                                 )  #--> only inserting one at a time

                    #then go and erase all of the files used: the sdf files,
                    real_off_file_name = path_and_filename + ".off"

                    files_to_delete = [
                        group_csv_cgal_file, sdf_csv_file_name,
                        real_off_file_name
                    ]
                    for fl in files_to_delete:
                        if os.path.exists(fl):
                            os.remove(fl)
                        else:
                            print(fl + " file does not exist")

                    print("finished")
                    print("--- %s seconds ---" % (time.time() - start_time))
コード例 #2
0
    def make(self, key):
        print("key = " + str(key))
        #key passed to function is just dictionary with the following attributes
        """segmentation
        segment_id
        decimation_ratio
        compartment_type
        component_index
        """
        start_time = time.time()

        #clusters_default = 18
        smoothness = 0.04

        Apical_Basal_Oblique_default = [12]
        basal_big = [16]

        neuron_ID = key["segment_id"]
        component = (pinky.CompartmentFinal.ComponentFinal & key).fetch1()

        component_id = component["component_index"]
        compartment_type = component["compartment_type"]
        component_size = int(component["n_triangle_indices"])

        print("component_size = " + str(component_size))

        if (compartment_type == "Basal") & (component_size > 160000):
            cluster_list = basal_big
        else:
            cluster_list = Apical_Basal_Oblique_default

        for clusters in cluster_list:
            smoothness = 0.04
            print(
                str(component["segment_id"]) + " type:" +
                str(component["compartment_type"]) + " index:" +
                str(component["component_index"]) + " cluster:" +
                str(clusters) + " smoothness:" + str(smoothness))

            #generate the off file for each component
            #what need to send them:
            """----From cleansed Mesh---
            vertices
            triangles
            ----From component table--
            n_vertex_indices
            n_triangle_indices
            vertex_indices
            triangle_indices"""

            if key['segment_id'] not in self.whole_neuron_dicts:
                self.whole_neuron_dicts[key['segment_id']] = (
                    pinky.PymeshfixDecimatedExcitatoryStitchedMesh
                    & 'decimation_ratio=0.35'
                    & dict(segment_id=key['segment_id'])).fetch1()

            path_and_filename, off_file_name = generate_component_off_file(
                neuron_ID, compartment_type, component_id,
                component["n_vertex_indices"], component["n_triangle_indices"],
                component["vertex_indices"], component["triangle_indices"],
                self.whole_neuron_dicts[key['segment_id']]["vertices"],
                self.whole_neuron_dicts[key['segment_id']]["triangles"])

            print(len(component['vertex_indices']),
                  len(component['triangle_indices']))

            #will have generated the component file by now so now need to run the segmentation

            csm.cgal_segmentation(path_and_filename, clusters, smoothness)

            #generate the name of the files
            cgal_file_name = path_and_filename + "-cgal_" + str(
                clusters) + "_" + str(smoothness)
            group_csv_cgal_file = cgal_file_name + ".csv"
            sdf_csv_file_name = cgal_file_name + "_sdf.csv"

            try:
                with open(group_csv_cgal_file) as f:
                    reader = csv.reader(f)
                    your_list = list(reader)
                group_list = []
                for item in your_list:
                    group_list.append(int(item[0]))

                with open(sdf_csv_file_name) as f:
                    reader = csv.reader(f)
                    your_list = list(reader)
                sdf_list = []
                for item in your_list:
                    sdf_list.append(float(item[0]))
            except:
                print("no CGAL segmentation for " + str(off_file_name))
                return

            #print(group_list)
            #print(sdf_list)

            #now write them to the datajoint table
            #table columns for ComponentAutoSegmentation: segmentation, segment_id, decimation_ratio, compartment_type, component_index, seg_group, sdf
#             print(dict(key,
#                                 clusters=clusters,
#                                 smoothness=smoothness,
#                                 n_triangles=component["n_triangle_indices"],
#                                 seg_group=group_list,
#                                 sdf=sdf_list,
#                                 median_sdf=np.median(sdf_list),
#                                 mean_sdf=np.mean(sdf_list),
#                                 third_q=np.percentile(sdf_list, 75),
#                                 ninety_perc=np.percentile(sdf_list, 90),
#                                 time_updated=str(datetime.datetime.now())[0:19]))

            comp_dict = dict(key,
                             clusters=clusters,
                             smoothness=smoothness,
                             n_triangles=component["n_triangle_indices"],
                             seg_group=group_list,
                             sdf=sdf_list,
                             median_sdf=np.median(sdf_list),
                             mean_sdf=np.mean(sdf_list),
                             third_q=np.percentile(sdf_list, 75),
                             ninety_perc=np.percentile(sdf_list, 90),
                             time_updated=str(datetime.datetime.now())[0:19])

            self.insert1(comp_dict)

            #then go and erase all of the files used: the sdf files,
            real_off_file_name = path_and_filename + ".off"

            files_to_delete = [
                group_csv_cgal_file, sdf_csv_file_name, real_off_file_name
            ]
            for fl in files_to_delete:
                if os.path.exists(fl):
                    os.remove(fl)
                else:
                    print(fl + " file does not exist")

        print("finished")
        print("--- %s seconds ---" % (time.time() - start_time))
コード例 #3
0
import cgal_Segmentation_Module
import time

start_time = time.time()
print(start_time)

print("about to start")
fil_loc = "/Users/brendancelii/Documents/"
filename = "neuron_28571618_Basal_1"

cgal_Segmentation_Module.cgal_segmentation(fil_loc, filename, 16, 0.04)
print("finished")
print("--- %s seconds ---" % (time.time() - start_time))
コード例 #4
0
def complete_spine_extraction(mesh_file_location, file_name, **kwargs):
    """
    Extracts the spine meshes from a given dendritic mesh and returns either 
    just the spine meshes or the spine meshes and the dendritic shaft with the spines removed. 
  

    Parameters: 
    mesh_file_location (str): location of the dendritic mesh on computer
    file_name (str): file name of dendritic mesh on computer
    
    Optional Parameters:
    ---configuring cgal segmentation ---
    
    clusters (int) : number of clusters to use for CGAL surface mesh segmentation (default = 12)
    smoothness (int) : smoothness parameter use for CGAL surface mesh segmentation (default = 0.04)
    
    ---configuring output---
    
    split_up_spines (bool): if True will return array of trimesh objects representing each spine
                         if False will return all spines as one mesh (default = True)
    shaft_mesh (bool) : if True then returns the shaft mesh with the spines stripped out as well (default=False)
    
    --- configuring spine extraction ---
    stub_threshold (int) : number of faces (size) that a spine mesh must include in order to be considered spine (default=50)
                            
    smooth_backbone_parameters (dict) : dict containing parameters for backbone extraction after cgal segmentation
        ---- dictionary can contain the following parameters: ---
        max_backbone_threshold (int) :the absolute size if it is greater than this then labeled as a possible backbone
        (default = 200)
        backbone_threshold (int) :if the label meets the width requirements, these are the size requirements as well in order to be considered possible backbone
        (default = 40)
        shared_vert_threshold (int): raw number of backbone verts that need to be shared in order for label to possibly be a backbone
        (default = 10)
        shared_vert_threshold_new (int): raw number of backbone verts that need to be shared in order for label to possibly be a backbone in phase 2
        (default = 5)
        backbone_width_threshold (float) :#the median sdf/width value the segment has to have in order to be considered a possible backbone 
        (default = 0.1)
        backbone_neighbor_min (int): number of backbones in chain in order for label to keep backbone status
        (default = 10)
    -------------------------------------
  
    Returns: 
    1 or 2 trimesh.mesh objects/lists of objects depending on settings
    
    if split_up_spines == True (default)
        list of trimesh.Mesh: each element in list is trimesh.mesh object representing a single spine
    else:
        trimesh.Mesh: trimesh.mesh object representing all spines
    
    if shaft_mesh == False (default):
         No mesh object 
    else:
        Trimesh.mesh object: representing shaft mesh with all of the spines filtered away
        
    
    Examples:
    #returns the spine meshes as one entire mesh
    
    list_of_spine_meshes = complete_spine_extraction(file_location,file_name)
    list_of_spine_meshes,shaft_mesh = complete_spine_extraction(file_location,file_name,shaft_mesh=True)
    merged_spine_meshes = complete_spine_extraction(file_location,file_name,split_up_spines=False)
    merged_spine_meshes,shaft_mesh = complete_spine_extraction(file_location,file_name,split_up_spines=False,shaft_mesh=True)
    
    
    """

    clusters = kwargs.pop('clusters', 12)
    smoothness = kwargs.pop('smoothness', 0.04)
    smooth_backbone_parameters = kwargs.pop('smooth_backbone_parameters',
                                            dict())
    stub_threshold = kwargs.pop('stub_threshold', 50)
    split_up_spines = kwargs.pop('split_up_spines', True)
    shaft_mesh = kwargs.pop('shaft_mesh', False)

    #making sure there is no more keyword arguments left that you weren't expecting
    if kwargs:
        raise TypeError('Unexpected **kwargs: %r' % kwargs)

    #check to see if file exists and if it is an off file
    if file_name[-3:] != "off":
        raise TypeError("input file must be a .off ")
        return None
    if not os.path.isfile(str(Path(mesh_file_location) / Path(file_name))):
        raise TypeError(
            str(Path(mesh_file_location) / Path(file_name)) +
            " cannot be found")
        return None

    total_time = time.time()
    print(
        f"Starting spine extraction for {file_name} with clusters={clusters} and smoothness={smoothness}"
    )
    start_time = time.time()
    myClassifier = ClassifyMesh(mesh_file_location, file_name)
    print(
        f"Step 1: Trimesh mesh build total time ---- {np.round(time.time() - start_time,5)} seconds"
    )
    #make sure a cgal folder is created, and if not make one

    #     if (os.path.isdir(str(Path(os.getcwd()) / Path("cgal")))) == False:
    #         os.chdir(str(Path(os.getcwd()) / Path("cgal")))
    #         os.mkdir("cgal")

    start_time = time.time()
    print("\nStarting CGAL segmentation")
    full_file_path = str(Path(mesh_file_location) / Path(file_name))[:-4]
    csm.cgal_segmentation(full_file_path, clusters, smoothness)
    print(
        f"Step 2: CGAL segmentation total time ---- {np.round(time.time() - start_time,5)} seconds"
    )

    #do the cgal processing
    #labels_file_location = str(Path(os.getcwd()) / Path("cgal"))
    start_time = time.time()
    print("\nStarting Spine Extraction")
    individual_spines = myClassifier.extract_spines(
        mesh_file_location,
        file_name,
        clusters,
        smoothness,
        split_up_spines,
        shaft_mesh,
        smooth_backbone_parameters=smooth_backbone_parameters,
        stub_threshold=stub_threshold)
    print(
        f"Step 3: Spine extraction total time ---- {np.round(time.time() - start_time,5)} seconds"
    )

    #clean of the cgal files from the computer

    print(f"Total time ---- {np.round(time.time() - total_time,5)} seconds")
    return individual_spines