class DecompositionSplit(dj.Computed):
    definition = """
    -> minnie.Decomposition()
    split_index: tinyint unsigned  #the index of the neuron object that resulted AFTER THE SPLITTING ALGORITHM
    split_version: tinyint unsigned  #the version of the splitting algorithm used
    ---
    multiplicity=null    : tinyint unsigned             # the number of somas found for this base segment
    n_splits             : int unsigned                 # the number of cuts required to help split the neuron
    split_success        : tinyint unsigned             # the successfulness of the splitting
    
    n_error_limbs_cancelled : tinyint unsigned     # number of limbs that couldn't be resolved and cancelled out        
    n_same_soma_limbs_cancelled : tinyint unsigned     # number of same soma touching limbs that couldn't be resolved and cancelled out
    n_multi_soma_limbs_cancelled : tinyint unsigned     # number of multi soma touching limbs that couldn't be resolved and cancelled out        
    
    error_imbs_cancelled_area=NULL : double            # the total area (in microns^2) of the limbs that was cancelled out because touching the same soma multiple times or multiple somas
    error_imbs_cancelled_skeletal_length = NULL : double #the total skeletal length (in microns) of the limbs that were called out because could not be resolved
    
    split_results: longblob #will store the results of how to split the limbs of neuron objects from original neuron
    decomposition: <decomposition>
    
    
    n_vertices           : int unsigned                 # number of vertices
    n_faces              : int unsigned                 # number of faces
    n_not_processed_soma_containing_meshes : int unsigned  #the number of meshes with somas that were not processed
    n_error_limbs: int #the number of limbs that are touching multiple somas or 1 soma in multiple places
    n_same_soma_multi_touching_limbs: int # number of limbs that touch the same soma multiple times
    n_multi_soma_touching_limbs: int # number of limbs that touch multiple somas
    n_somas: int #number of soma meshes detected
    max_soma_n_faces:  int unsigned                 # The largest number of faces of the somas
    max_soma_volume:  int unsigned                 # The largest volume of the somas the (volume in billions (10*9 nm^3))
    n_limbs: int
    n_branches: int
    max_limb_n_branches=NULL:int
    
    skeletal_length=NULL: double
    max_limb_skeletal_length=NULL:double
    median_branch_length=NULL:double #gives information on average skeletal length to next branch point
    
    
    width_median=NULL: double #median width from mesh center without spines removed
    width_no_spine_median=NULL: double #median width from mesh center with spines removed
    width_90_perc=NULL: double # 90th percentile for width without spines removed
    width_no_spine_90_perc=NULL: double  # 90th percentile for width with spines removed
    
    
    n_spines: bigint

    spine_density=NULL: double # n_spines/ skeletal_length
    spines_per_branch=NULL: double
    
    skeletal_length_eligible=NULL: double # the skeletal length for all branches searched for spines
    n_spine_eligible_branches=NULL: int # the number of branches that were checked for spines because passed width threshold
    
    spine_density_eligible=NULL:double # n_spines/skeletal_length_eligible
    spines_per_branch_eligible=NULL:double # n_spines/n_spine_eligible_branches
    
    total_spine_volume=NULL: double # the sum of all spine volume
    spine_volume_median=NULL: double # median of the spine volume for those spines with able to calculate volume
    spine_volume_density=NULL: double #total_spine_volume/skeletal_length
    spine_volume_density_eligible=NULL: double #total_spine_volume/skeletal_length_eligible
    spine_volume_per_branch_eligible=NULL: double #total_spine_volume/n_spine_eligible_branches
    
    run_time=NULL : double                   # the amount of time to run (seconds)

    """
    ''' Old keysource used for inhibitory excitatory check
    classified_table = (minnie.BaylorManualCellType() &
                        'nucleus_version=3' & 
                        "(cell_type = 'excitatory') or  (cell_type = 'inhibitory')")
    
    key_source = ((minnie.Decomposition & 
                (minnie.NeuronSplitSuggestions.proj()) & 
                (classified_table.proj()) 
                & f"n_somas<{max_n_somas}" & "n_error_limbs>0"))'''

    # This keysource acounts that you could have more than 1 possible soma but not a significant limb connecting them (no error limbs)
    key_source = minnie.Decomposition(
    ) & "n_somas>1 OR n_error_limbs>0" & du.proofreading_segment_id_restriction(
    )

    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decomposed neurong object from Decomposition table
        3) Run the multi_soma split suggestions algorithm
        4) Get the number of splits required for this neuron
        5) Split the neuron into a list of neuron objects
        6) For each neuron object in the list:
        - get the number of errored limbs (to indicate the success type)
        - Change the description to include the multiplicity
        - Compute the information on the largest soma faces and volume
        - Save the neuron object to the external
        - Add the new write key to a list to commit 
        7) Write all of the keys 
        """

        whole_pass_time = time.time()

        # 1) Get the segment id from the key
        segment_id = key["segment_id"]
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")

        # 2) Get the decomposed neuron object from Decomposition table and the split suggestions
        neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition")
        neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path)
        """ Old way that downloaded from another table
        # 3) Retrieve the multi soma suggestions
        split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results")
        """
        #3) Calculated the split results
        split_results = pru.multi_soma_split_suggestions(
            neuron_obj, plot_intermediates=False)

        # 4) Get the number of splits required for this neuron
        n_paths_cut = pru.get_n_paths_cut(split_results)

        if verbose:
            print(f"n_paths_cut = {n_paths_cut}")

        # 5) Split the neuron into a list of neuron objects
        (neuron_list, neuron_list_errored_limbs_area,
         neuron_list_errored_limbs_skeletal_length,
         neuron_list_n_multi_soma_errors,
         neuron_list_n_same_soma_errors) = pru.split_neuron(
             neuron_obj,
             limb_results=split_results,
             verbose=verbose,
             return_error_info=True)

        print(f"neuron_list = {neuron_list}")
        print(
            f"neuron_list_errored_limbs_area = {neuron_list_errored_limbs_area}"
        )
        print(
            f"neuron_list_n_multi_soma_errors = {neuron_list_n_multi_soma_errors}"
        )
        print(
            f"neuron_list_n_same_soma_errors = {neuron_list_n_same_soma_errors}"
        )

        if verbose:
            print(f"Number of neurons: {len(neuron_list)}")

        neuron_entries = []
        for neuron_idx in range(len(neuron_list)):
            """
            # 6) For each neuron object in the list:
            # - get the number of errored limbs (to indicate the success type)
            # - Compute the information on the largest soma faces and volume
            # - Save the neuron object to the external
            # - Add the new write key to a list to commit 
            """
            n = neuron_list[neuron_idx]

            error_imbs_cancelled_area = neuron_list_errored_limbs_area[
                neuron_idx]
            error_imbs_cancelled_skeletal_length = neuron_list_errored_limbs_skeletal_length[
                neuron_idx]
            n_multi_soma_limbs_cancelled = neuron_list_n_multi_soma_errors[
                neuron_idx]
            n_same_soma_limbs_cancelled = neuron_list_n_same_soma_errors[
                neuron_idx]

            #for n in neuron_list:
            #     nviz.visualize_neuron(n,
            #                          limb_branch_dict="all")

            # - get the number of errored limbs (to indicate the success type)
            if n.n_error_limbs == 0:
                split_success = 0
            elif n.multi_soma_touching_limbs == 0:
                split_successs = 1
            elif n.same_soma_multi_touching_limbs == 0:
                split_success = 2
            else:
                split_success = 3

            if verbose:
                print(f"split_success = {split_success}")

            # - Compute the information on the largest soma faces and volume
            soma_volumes = [
                n[k].volume / 1000000000 for k in n.get_soma_node_names()
            ]
            soma_n_faces = [
                len(n[k].mesh.faces) for k in n.get_soma_node_names()
            ]

            largest_n_faces = np.max(soma_n_faces)
            largest_volume = np.max(soma_volumes)

            if verbose:
                print(f"largest_n_faces = {largest_n_faces}")
                print(f"largest_volume = {largest_volume}")

            if "split" not in n.description:
                n.description += "_soma_0_split"

            #6) Save the file in a certain location
            if True:
                save_time = time.time()
                ret_file_path = n.save_compressed_neuron(output_folder=str(
                    du.get_decomposition_path()),
                                                         return_file_path=True,
                                                         export_mesh=False,
                                                         suppress_output=True)

                ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
                print(f"Save time = {time.time() - save_time}")
            else:
                print("Storing a dummy value for neuron")
                ret_file_path_str = "dummy"

            #7) Pass stats and file location to insert
            new_key = dict(
                key,
                split_index=neuron_idx,
                split_version=split_version,
                multiplicity=len(neuron_list),
                n_splits=n_paths_cut,
                split_success=split_success,
                n_error_limbs_cancelled=len(error_imbs_cancelled_area),
                n_multi_soma_limbs_cancelled=n_multi_soma_limbs_cancelled,
                n_same_soma_limbs_cancelled=n_same_soma_limbs_cancelled,
                error_imbs_cancelled_area=np.round(
                    np.sum(error_imbs_cancelled_area), 4),
                error_imbs_cancelled_skeletal_length=np.round(
                    np.sum(error_imbs_cancelled_skeletal_length) / 1000, 4),
                split_results=split_results,
                max_soma_n_faces=largest_n_faces,
                max_soma_volume=largest_volume,
                decomposition=ret_file_path_str,
                n_vertices=len(n.mesh.vertices),
                n_faces=len(n.mesh.faces),
                run_time=np.round(time.time() - whole_pass_time, 4))

            stats_dict = n.neuron_stats()
            new_key.update(stats_dict)

            attributes_to_remove = ["axon_length", "axon_area", "n_boutons"]

            for k in attributes_to_remove:
                del new_key[k]

            neuron_entries.append(new_key)

        self.insert(neuron_entries,
                    allow_direct_insert=True,
                    skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------"
        )
import meshlab
meshlab.set_meshlab_port(current_port=None)

# In[8]:

decimation_version = 0
decimation_ratio = 0.25
# key_source = (minnie.Decimation().proj(decimation_version='version')  &
#                   dict(decimation_version=decimation_version,decimation_ratio=decimation_ratio)
#                   & minnie.MultiSomaProofread2() & (dj.U("segment_id") & (minnie.BaylorSegmentCentroid()).proj()))

key_source = ((minnie.Decimation).proj(decimation_version='version')
              & "decimation_version=" + str(decimation_version)
              & f"decimation_ratio={decimation_ratio}" &
              (minnie.BaylorSegmentCentroid() & "multiplicity>0")
              & du.proofreading_segment_id_restriction())

key_source

# In[11]:

# minnie.DecompositionVersions.insert1(dict(process_version=9,
#                                         description="uses meshafterparty for decomposition type and stitch distance 2000"),
#                                    skip_duplicates=True)
# minnie.DecompositionVersions()

# In[12]:

import numpy as np
import time
decimation_version = 0
class Decomposition(dj.Computed):
    definition = """
    -> minnie.Decimation.proj(decimation_version='version')
    ver : decimal(6,2) #the version number of the materializaiton
    process_version : int unsigned #the version of the preprocessing pipeline run
    index : tinyint unsigned  #the index of the neuron object that resulted from that mesh (indexed starting at 0)
    ---
    multiplicity=null    : tinyint unsigned             # the number of somas found for this base segment
    decomposition: <decomposition>
    n_vertices           : int unsigned                 # number of vertices
    n_faces              : int unsigned                 # number of faces
    n_not_processed_soma_containing_meshes : int unsigned  #the number of meshes with somas that were not processed
    n_error_limbs: int #the number of limbs that are touching multiple somas or 1 soma in multiple places
    n_same_soma_multi_touching_limbs: int # number of limbs that touch the same soma multiple times
    n_multi_soma_touching_limbs: int # number of limbs that touch multiple somas
    n_somas: int #number of soma meshes detected
    n_limbs: int
    n_branches: int
    max_limb_n_branches=NULL:int
    
    skeletal_length=NULL: double
    max_limb_skeletal_length=NULL:double
    median_branch_length=NULL:double #gives information on average skeletal length to next branch point
    
    
    width_median=NULL: double #median width from mesh center without spines removed
    width_no_spine_median=NULL: double #median width from mesh center with spines removed
    width_90_perc=NULL: double # 90th percentile for width without spines removed
    width_no_spine_90_perc=NULL: double  # 90th percentile for width with spines removed
    
    
    n_spines: bigint

    spine_density=NULL: double # n_spines/ skeletal_length
    spines_per_branch=NULL: double
    
    skeletal_length_eligible=NULL: double # the skeletal length for all branches searched for spines
    n_spine_eligible_branches=NULL: int # the number of branches that were checked for spines because passed width threshold
    
    spine_density_eligible=NULL:double # n_spines/skeletal_length_eligible
    spines_per_branch_eligible=NULL:double # n_spines/n_spine_eligible_branches
    
    total_spine_volume=NULL: double # the sum of all spine volume
    spine_volume_median=NULL: double # median of the spine volume for those spines with able to calculate volume
    spine_volume_density=NULL: double #total_spine_volume/skeletal_length
    spine_volume_density_eligible=NULL: double #total_spine_volume/skeletal_length_eligible
    spine_volume_per_branch_eligible=NULL: double #total_spine_volume/n_spine_eligible_branches
    
    run_time=NULL : double                   # the amount of time to run (seconds)

    
    """

    key_source = ((minnie.Decimation).proj(decimation_version='version')
                  & "decimation_version=" + str(decimation_version)
                  & f"decimation_ratio={decimation_ratio}" &
                  (minnie.BaylorSegmentCentroid() & "multiplicity>0")
                  & du.proofreading_segment_id_restriction())

    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decimated mesh
        3) Get the somas info
        4) Run the preprocessing
        5) Calculate all starter stats
        6) Save the file in a certain location
        7) Pass stats and file location to insert
        """
        whole_pass_time = time.time()
        #1) Get the segment id from the key
        segment_id = key["segment_id"]
        description = str(key['decimation_version']) + "_25"
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        global_start = time.time()

        #2) Get the decimated mesh
        current_neuron_mesh = du.fetch_segment_id_mesh(segment_id)

        #3) Get the somas info
        somas = du.get_soma_mesh_list(segment_id)
        soma_ver = du.get_soma_mesh_list_ver(segment_id)

        print(f"somas = {somas}")

        #3b) Get the glia and nuclei information
        glia_faces, nuclei_faces = du.get_segment_glia_nuclei_faces(
            segment_id, return_empty_list=True)

        #4) Run the preprocessing

        total_neuron_process_time = time.time()

        print(f"\n--- Beginning preprocessing of {segment_id}---")
        recovered_neuron = neuron.Neuron(
            mesh=current_neuron_mesh,
            somas=somas,
            segment_id=segment_id,
            description=description,
            suppress_preprocessing_print=False,
            suppress_output=False,
            calculate_spines=True,
            widths_to_calculate=["no_spine_median_mesh_center"],
            glia_faces=glia_faces,
            nuclei_faces=nuclei_faces,
            decomposition_type="meshafterparty",
        )

        print(
            f"\n\n\n---- Total preprocessing time = {time.time() - total_neuron_process_time}"
        )

        #5) Don't have to do any of the processing anymore because will do in the neuron object
        stats_dict = recovered_neuron.neuron_stats(stats_to_ignore=[
            "n_boutons",
            "axon_length",
            "axon_area",
            "max_soma_volume",
            "max_soma_n_faces",
        ])

        #6) Save the file in a certain location
        save_time = time.time()
        ret_file_path = recovered_neuron.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"Save time = {time.time() - save_time}")

        #7) Pass stats and file location to insert
        new_key = dict(key,
                       ver=soma_ver,
                       process_version=process_version,
                       index=0,
                       multiplicity=1,
                       decomposition=ret_file_path_str,
                       n_vertices=len(current_neuron_mesh.vertices),
                       n_faces=len(current_neuron_mesh.faces),
                       run_time=np.round(time.time() - whole_pass_time, 4))
        new_key.update(stats_dict)

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - global_start} ------"
        )
class DecompositionCellType(dj.Computed):
    definition = """
    -> minnie.Decomposition()
    split_index          : tinyint unsigned             # the index of the neuron object that resulted AFTER THE SPLITTING ALGORITHM
    -> minnie.DecompositonAxonVersion()             # the version of code used for this cell typing classification
    ---

    # -- attributes for the cell type classification ---
    decomposition        : <decomposition> # saved neuron object with high fidelity axon


    # ----- Nucleus Information ----#
    nucleus_id           : int unsigned                 # id of nucleus from the flat segmentation  Equivalent to Allen: 'id'.
    nuclei_distance      : double                    # the distance to the closest nuclei (even if no matching nuclei found)
    n_nuclei_in_radius   : tinyint unsigned          # the number of nuclei within the search radius of 15000 belonging to that segment
    n_nuclei_in_bbox     : tinyint unsigned          # the number of nuclei within the bounding box of that soma


    # ------ Information Used For Excitatory Inhibitory Classification (Baylor Cell) -------- 
    cell_type_predicted: enum('excitatory','inhibitory','other','unknown') # morphology predicted by classifier
    spine_category: enum('no_spined','sparsely_spined','densely_spined')

    n_axons: tinyint unsigned             # Number of axon candidates identified
    n_apicals: tinyint unsigned             # Number of apicals identified

    axon_angle_maximum=NULL:double #the anlge of an identified axon
    spine_density_classifier:double              # the number of spines divided by skeletal length for branches analyzed in classification
    n_branches_processed: int unsigned                 # the number branches used for the spine density analysis
    skeletal_length_processed: double                 # The total skeletal length of the viable branches used for the spine density analysis
    n_branches_in_search_radius: int unsigned                 # the number branches existing in the search radius used for spine density
    skeletal_length_in_search_radius : double         # The total skeletal length of the branches existing in the search radius used for spine density


    #---- allen classification info -----
    allen_e_i=NULL: enum('excitatory','inhibitory','other','unknown')
    allen_e_i_n_nuc=NULL: tinyint unsigned  
    allen_cell_type=NULL:varchar(256)
    allen_cell_type_n_nuc=NULL:tinyint unsigned  
    allen_cell_type_e_i=NULL:enum('excitatory','inhibitory','other','unknown')


    # ----- for the dendrite on axon filtering away --------
    dendrite_on_axon_merges_error_area=NULL : double #the area (in um ^ 2) of the faces canceled out by filter
    dendrite_on_axon_merges_error_length =NULL: double #the length (in um) of skeleton distance canceled out by filter

    # ----- attributes from the axon_features --- 
    cell_type_for_axon : enum('excitatory','inhibitory','other','unknown')
    axon_volume=NULL: double #volume of the oriented bounding box of axon (divided by 10^14)

    axon_length=NULL: double  # length (in um) of the classified axon skeleton
    axon_branch_length_median=NULL: double  # length (in um) of the classified axon skeleton
    axon_branch_length_mean=NULL: double  # length (in um) of the classified axon skeleton

    # number of branches in the axon
    axon_n_branches=NULL: int unsigned  
    axon_n_short_branches=NULL:  int unsigned
    axon_n_long_branches=NULL:  int unsigned
    axon_n_medium_branches=NULL:  int unsigned

    #bounding box features
    axon_bbox_x_min=NULL: double 
    axon_bbox_y_min=NULL: double 
    axon_bbox_z_min=NULL: double 
    axon_bbox_x_max=NULL: double 
    axon_bbox_y_max=NULL: double 
    axon_bbox_z_max=NULL: double 

    axon_bbox_x_min_soma_relative=NULL: double 
    axon_bbox_y_min_soma_relative=NULL: double 
    axon_bbox_z_min_soma_relative=NULL: double 
    axon_bbox_x_max_soma_relative=NULL: double 
    axon_bbox_y_max_soma_relative=NULL: double 
    axon_bbox_z_max_soma_relative=NULL: double 

    run_time=NULL : double                   # the amount of time to run (seconds)
    """

    key_source = minnie.Decomposition(
    ) & du.proofreading_segment_id_restriction()

    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        #         ver = 88
        #         nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id,
        #                                                                nuclei_version=ver)
        nucleus_ids, nucleus_centers = du.segment_to_nuclei(segment_id,
                                                            nuclei_version=ver)

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #4) -------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            (inh_exc_class, spine_category, axon_angles, n_axons, n_apicals,
             neuron_spine_density, n_branches_processed,
             skeletal_length_processed, n_branches_in_search_radius,
             skeletal_length_in_search_radius
             ) = clu.inhibitory_excitatory_classifier(
                 neuron_obj,
                 return_spine_classification=True,
                 return_axon_angles=True,
                 return_n_axons=True,
                 return_n_apicals=True,
                 return_spine_statistics=True,
                 axon_limb_branch_dict_precomputed=None,
                 axon_angles_precomputed=None,
                 verbose=verbose)
            if verbose:
                print(
                    f"Total time for classification = {time.time() - filter_time}"
                )

            all_axon_angles = []
            for limb_idx, limb_data in axon_angles.items():
                for candidate_idx, cand_angle in limb_data.items():
                    all_axon_angles.append(cand_angle)

            if len(axon_angles) > 0:
                axon_angle_maximum = np.max(all_axon_angles)
            else:
                axon_angle_maximum = 0

            if verbose:
                print("\n -- Cell Type Classification Results --")
                print(f"inh_exc_class={inh_exc_class}")
                print(f"spine_category={spine_category}")
                print(f"axon_angles={axon_angles}")
                print(f"n_axons={n_axons}")
                print(f"n_apicals={n_apicals}")
                print(f"neuron_spine_density={neuron_spine_density}")
                print(f"n_branches_processed={n_branches_processed}")
                print(f"skeletal_length_processed={skeletal_length_processed}")
                print(
                    f"n_branches_in_search_radius={n_branches_in_search_radius}"
                )
                print(
                    f"skeletal_length_in_search_radius={skeletal_length_in_search_radius}"
                )

            baylor_cell_type_info = dict(
                cell_type_predicted=inh_exc_class,
                spine_category=spine_category,
                axon_angle_maximum=axon_angle_maximum,
                n_axons=n_axons,
                n_apicals=n_apicals,
                spine_density_classifier=neuron_spine_density,
                n_branches_processed=neuron_spine_density,
                skeletal_length_processed=skeletal_length_processed,
                n_branches_in_search_radius=n_branches_in_search_radius,
                skeletal_length_in_search_radius=
                skeletal_length_in_search_radius,
            )

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            #6) -------- If excitatory running the axon processing--------------
            """
            Psuedocode: 
            If e_i class is excitatory:
            1) Filter away the axon on dendrite
            2) Do the higher fidelity axon processing
            3) Compute the axon features

            """

            if e_i_class == "excitatory" and neuron_obj.axon_limb_name is not None:
                if verbose:
                    print(
                        f"Excitatory so performing high fidelity axon and computing axon features"
                    )
            #     1) Filter away the axon on dendrite
            #     2) Do the higher fidelity axon processing

                o_neuron, filtering_info = au.complete_axon_processing(
                    neuron_obj,
                    perform_axon_classification=False,
                    return_filtering_info=True)
                filtering_info = {
                    k: np.round(v, 2)
                    for k, v in filtering_info.items()
                    if "area" in k or "length" in k
                }
                #3) Compute the axon features
                axon_features = au.axon_features_from_neuron_obj(o_neuron)
            else:
                nru.clear_all_branch_labels(neuron_obj, labels_to_clear="axon")
                o_neuron = neuron_obj
                axon_features = dict()
                filtering_info = dict()

            #3) ------ Adding the Synapses -----------
            o_neuron = syu.add_synapses_to_neuron_obj(
                o_neuron,
                validation=validation,
                verbose=True,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type_for_axon=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)
            #dict_to_write.append(n_dict)

        #write the
        #self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
class DecompositionSplitMultiSoma(dj.Computed):
    definition = """
    -> minnie.Decomposition()
    split_version: tinyint unsigned  #the version of the splitting algorithm used
    ---
    n_splits             : int unsigned                 # the number of cuts required to help split the neuron

    split_results: longblob #will store the results of how to split the limbs of neuron objects from original neuron
    red_blue_split_results: longblob
    
    run_time=NULL : double                   # the amount of time to run (seconds)

    """
    ''' Old keysource used for inhibitory excitatory check
    classified_table = (minnie.BaylorManualCellType() &
                        'nucleus_version=3' & 
                        "(cell_type = 'excitatory') or  (cell_type = 'inhibitory')")
    
    key_source = ((minnie.Decomposition & 
                (minnie.NeuronSplitSuggestions.proj()) & 
                (classified_table.proj()) 
                & f"n_somas<{max_n_somas}" & "n_error_limbs>0"))'''

    # This keysource acounts that you could have more than 1 possible soma but not a significant limb connecting them (no error limbs)
    key_source = minnie.Decomposition(
    ) & "n_somas>1" & du.proofreading_segment_id_restriction(
    )  #& dict(segment_id=864691136756471662)

    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decomposed neurong object from Decomposition table
        3) Run the multi_soma split suggestions algorithm
        4) Get the number of splits required for this neuron
        5) Split the neuron into a list of neuron objects
        6) For each neuron object in the list:
        - get the number of errored limbs (to indicate the success type)
        - Change the description to include the multiplicity
        - Compute the information on the largest soma faces and volume
        - Save the neuron object to the external
        - Add the new write key to a list to commit 
        7) Write all of the keys 
        """

        whole_pass_time = time.time()

        # 1) Get the segment id from the key
        segment_id = key["segment_id"]
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")

        # 2) Get the decomposed neuron object from Decomposition table and the split suggestions
        neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition")
        neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path)
        """ Old way that downloaded from another table
        # 3) Retrieve the multi soma suggestions
        split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results")
        """
        #3) Calculated the split results
        split_results, red_blue_split_results = pru.multi_soma_split_suggestions(
            neuron_obj,
            plot_intermediates=False,
            only_multi_soma_paths=True,
            default_cut_edge="last",
            verbose=True,
            debug=False,
            output_red_blue_suggestions=True,
            split_red_blue_by_common_upstream=True,
            apply_valid_upstream_branches_restriction=True,
            debug_red_blue=False,
        )

        # 4) Get the number of splits required for this neuron
        n_paths_cut = pru.get_n_paths_cut(split_results)

        #7) Pass stats and file location to insert
        new_key = dict(key,
                       split_version=split_version,
                       n_splits=n_paths_cut,
                       split_results=split_results,
                       red_blue_split_results=red_blue_split_results,
                       run_time=np.round(time.time() - whole_pass_time, 4))

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------"
        )
class DecompositionAxon(dj.Computed):
    definition="""
    -> minnie.Decomposition()
    split_index          : tinyint unsigned             # the index of the neuron object that resulted AFTER THE SPLITTING ALGORITHM
    -> minnie.DecompositonAxonVersion()             # the version of code used for this cell typing classification
    ---
    decomposition        : <decomposition> # saved neuron object with high fidelity axon
    axon_length: double  # length (in um) of the classified axon skeleton
    run_time=NULL : double                   # the amount of time to run (seconds)
    """
                             
    
    #key_source = minnie.Decomposition() & minnie.NucleiSegmentsRun2() & "segment_id=864691136540183458"
    key_source = minnie.Decomposition() & du.proofreading_segment_id_restriction()
    
    

    def make(self,key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """
        
        
        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        
        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")
        
        whole_pass_time = time.time()
        
        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs,neuron_split_idxs = du.decomposition_with_spine_recalculation(segment_id)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        for split_index,neuron_obj in zip(neuron_split_idxs,neuron_objs):
            
            if verbose:
                print(f"--> Working on Split Index {split_index} -----")
                
            st = time.time()
            #Run the Axon Decomposition
            neuron_obj_with_web = au.complete_axon_processing(neuron_obj,
                                     verbose=True)
            
            save_time = time.time()
            ret_file_path = neuron_obj_with_web.save_compressed_neuron(
                                            output_folder=str(du.get_decomposition_path()),
                                            #output_folder = "./",
            file_name=f"{neuron_obj_with_web.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                                              return_file_path=True,
                                             export_mesh=False,
                                             suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
            
            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")
            
            n_dict = dict(key,
              split_index = split_index,
              axon_version = au.axon_version,
             decomposition=ret_file_path_str,
             axon_length=neuron_obj_with_web.axon_length,
              run_time = np.round(time.time() - st,2)
             )
            
            dict_to_write.append(n_dict)
        
        #write the
        self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True)

        print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")
class DecompositionCellTypeV7(dj.Computed):
    definition = """
    -> minnie.Decomposition()
    split_index          : tinyint unsigned             # the index of the neuron object that resulted AFTER THE SPLITTING ALGORITHM
    -> minnie.DecompositonAxonVersion()             # the version of code used for this cell typing classification
    ---
    
    # -- attributes for the cell type classification ---
    decomposition        : <decomposition> # saved neuron object with high fidelity axon

    # ----- Nucleus Information ----#
    nucleus_id           : int unsigned                 # id of nucleus from the flat segmentation  Equivalent to Allen: 'id'.
    nuclei_distance      : double                    # the distance to the closest nuclei (even if no matching nuclei found)
    n_nuclei_in_radius   : tinyint unsigned          # the number of nuclei within the search radius of 15000 belonging to that segment
    n_nuclei_in_bbox     : tinyint unsigned          # the number of nuclei within the bounding box of that soma
    
    soma_x_nm               : double                 # x coordinate of soma mesh in nm
    soma_y_nm               : double                 # y coordinate of soma mesh in nm
    soma_z_nm               : double                 # z coordinate of soma mesh in nm

    # ---- chosen cell type for proofreading rules
    baylor_e_i=NULL: enum('excitatory','inhibitory','other','unknown')
    allen_e_i=NULL: enum('excitatory','inhibitory','other','unknown')
    cell_type_used=NULL: enum('allen','baylor')
    cell_type=NULL: enum('excitatory','inhibitory','other','unknown')

    # ------ Information Used For Excitatory Inhibitory Classification (Baylor Cell) -------- 
    syn_density_post: double
    syn_density_head: double
    syn_density_neck: double
    syn_density_shaft: double
    skeletal_length_processed_syn: double
    spine_density: double
    skeletal_length_processed_spine: double
    n_syn_pre: int unsigned
    n_syn_post: int unsigned
    
    # -- regarding the axon that was found ---
    axon_angle_max=NULL    : double
    axon_angle_min=NULL    : double
    n_axon_angles     : tinyint unsigned

    
    #---- allen classification info -----
    allen_e_i_n_nuc=NULL: tinyint unsigned  
    allen_cell_type=NULL:varchar(256)
    allen_cell_type_n_nuc=NULL:tinyint unsigned  
    allen_cell_type_e_i=NULL:enum('excitatory','inhibitory','other','unknown')
    
    
    # ----- for the dendrite on axon filtering away --------
    dendrite_on_axon_merges_error_area=NULL : double #the area (in um ^ 2) of the faces canceled out by filter
    dendrite_on_axon_merges_error_length =NULL: double #the length (in um) of skeleton distance canceled out by filter
    

    # ----- attributes from the axon_features---
    axon_skeleton: <skeleton>      # the skeleton of the axon of the final proofread neuorn
    
    axon_volume=NULL: double #volume of the oriented bounding box of axon (divided by 10^14)

    axon_length=NULL: double  # length (in um) of the classified axon skeleton
    axon_branch_length_median=NULL: double  # length (in um) of the classified axon skeleton
    axon_branch_length_mean=NULL: double  # length (in um) of the classified axon skeleton

    # number of branches in the axon
    axon_n_branches=NULL: int unsigned  
    axon_n_short_branches=NULL:  int unsigned
    axon_n_long_branches=NULL:  int unsigned
    axon_n_medium_branches=NULL:  int unsigned

    #bounding box features
    axon_bbox_volume=NULL: double
    axon_bbox_x_min=NULL: double 
    axon_bbox_y_min=NULL: double 
    axon_bbox_z_min=NULL: double 
    axon_bbox_x_max=NULL: double 
    axon_bbox_y_max=NULL: double 
    axon_bbox_z_max=NULL: double 

    axon_bbox_x_min_soma_relative=NULL: double 
    axon_bbox_y_min_soma_relative=NULL: double 
    axon_bbox_z_min_soma_relative=NULL: double 
    axon_bbox_x_max_soma_relative=NULL: double 
    axon_bbox_y_max_soma_relative=NULL: double 
    axon_bbox_z_max_soma_relative=NULL: double 


    run_time=NULL : double                   # the amount of time to run (seconds)
    """

    key_source = minnie.Decomposition(
    ) & du.proofreading_segment_id_restriction()

    #key_source = minnie.Decomposition & dict(n_error_limbs = 0,n_same_soma_multi_touching_limbs=0,n_multi_soma_touching_limbs=0) & "n_limbs > 5"

    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        ''' ------ Old way of getting the nucleus info for the manual proofread data -------
        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        
        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]
        
        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")'''

        nucleus_ids, nucleus_centers = du.segment_to_nuclei(
            segment_id,
            #nuclei_version=ver
        )

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            # 3) ---- Doing Baylor Cell Type Classification ---------
            # 3a) --- Adding the synapses and spine labels
            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            st = time.time()
            if verbose:
                print(f"Adding the synapses and the head_neck_shaft")
            neuron_obj = syu.add_synapses_to_neuron_obj(
                neuron_obj,
                validation=validation,
                verbose=verbose,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False,
            )
            neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                            verbose=verbose)
            if verbose:
                print(
                    f"Done adding synapses and head_neck_shaft: {time.time() - st}"
                )

            # 3b) --- Running the stats for Baylor Classification

            filter_time = time.time()
            '''
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                                neuron_obj = neuron_obj,
                               verbose = False,)
            
            (syn_density_post,
             syn_density_head,
             syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj,
                          limb_branch_dict = limb_branch_dict,
                                            verbose = True)
            
            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj,
                                                        verbose = True,
                                                        multiplier = 1000)

            if verbose:
                print(f"Total time for density calculations = {time.time() - filter_time}")

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            
            baylor_cell_type_info = dict(
                        syn_density_post = syn_density_post,
                        syn_density_head = syn_density_head,
                        syn_density_neck = syn_density_neck,
                        syn_density_shaft = syn_density_shaft,
                        skeletal_length_processed_syn=skeletal_length_processed_syn,
                        spine_density=spine_density,
                        skeletal_length_processed_spine = skeletal_length_processed_spine
            )
            
            baylor_e_i = ctu.e_i_classification_single(data=[syn_density_shaft,spine_density],
                              features=["syn_density_shaft","spine_density"],
                             verbose = True,
                              return_label_name = True
                             )
            
            
            '''
            baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
                neuron_obj, verbose=True, return_cell_type_info=True)

            baylor_cell_type_info["baylor_e_i"] = baylor_e_i

            #5) ----- Deciding on cell type to use for axon
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "allen_e_i"] is not None:
                e_i_class = allen_cell_type_info["allen_e_i"]
                cell_type_used = "allen"
            else:
                e_i_class = baylor_e_i
                cell_type_used = "baylor"

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
                )

            #3) ------ Axon Classification (and getting the axon features)------------------

            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                cell_type=e_i_class,
                add_synapses_and_head_neck_shaft_spines=False,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            rand_numb = np.random.randint(0, 3000)
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}_pipe_v{pipe_version}_bcm_{rand_numb}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            # ---- 8/10 Addition ----------

            if save_axon_skeleton:
                axon_skeleton_file = du.save_proofread_skeleton(
                    o_neuron.axon_skeleton,
                    segment_id=o_neuron.segment_id,
                    split_index=split_index,
                    file_name_ending=
                    f"decomp_cell_type_axon_skeleton_v{pipe_version}_bcm_{rand_numb}"
                )
            else:
                axon_skeleton_file = None

            #---- 8/29 Addition: Will compute the soma center of the mesh in nm ---
            soma_x_nm, soma_y_nm, soma_z_nm = neuron_obj["S0"].mesh_center
            if verbose:
                print(
                    f"soma_x_nm, soma_y_nm, soma_z_nm = {soma_x_nm, soma_y_nm, soma_z_nm}"
                )

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type=e_i_class,
                cell_type_used=cell_type_used,
                axon_skeleton=str(axon_skeleton_file),
                soma_x_nm=soma_x_nm,
                soma_y_nm=soma_y_nm,
                soma_z_nm=soma_z_nm,
                n_syn_pre=neuron_obj.n_synapses_pre,
                n_syn_post=neuron_obj.n_synapses_post,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
class Decimation(dj.Computed):
    #     definition = minnie.Decimation.describe(printout=False)

    #key_source = minnie.Mesh.proj() * (minnie.DecimationConfig & 'decimation_ratio=0.25') & minnie.MultiSomaProofread2.proj()
    #key_source = (du.version_to_segment_id_table() - (minnie.Decimation() & dict(decimation_ratio=decimation_ratio_global)).proj())
    key_source = (du.proofreading_segment_id_restriction() -
                  (minnie.Decimation()
                   & dict(decimation_ratio=decimation_ratio_global)).proj()
                  ) & minnie.Mesh.proj()
    #key_source = minnie.Mesh() & dict(segment_id=864691136008592558)

    # Creates hf file at the proper location, returns the filepath of the newly created file
    @classmethod
    def make_file(cls, segment_id, version, decimation_ratio, vertices, faces):
        """Creates hf file at the proper location, returns the filepath of the newly created file"""

        assert vertices.ndim == 2 and vertices.shape[1] == 3
        assert faces.ndim == 2 and faces.shape[1] == 3

        filename = f'{segment_id}_{version}_{int(decimation_ratio*100):02}.h5'
        filepath = os.path.join(external_decimated_mesh_path, filename)
        with h5py.File(filepath, 'w') as hf:
            hf.create_dataset('segment_id', data=segment_id)
            hf.create_dataset('version', data=version)
            hf.create_dataset('decimation_ratio', data=float(decimation_ratio))
            hf.create_dataset('vertices', data=vertices)
            hf.create_dataset('faces', data=faces)

        return filepath

    @classmethod
    def make_entry(cls, segment_id, version, decimation_ratio, vertices,
                   faces):
        key = dict(segment_id=segment_id,
                   version=version,
                   decimation_ratio=decimation_ratio,
                   n_vertices=len(vertices),
                   n_faces=len(faces))

        filepath = cls.make_file(segment_id, version, decimation_ratio,
                                 vertices, faces)

        cls.insert1(dict(key, mesh=filepath), allow_direct_insert=True)

    def make(self, key):
        print(key)
        segment_id = key['segment_id']
        # -------- 1/27: Inserting the data into Mesh and Segment first ----- #
        if perform_mesh_segment_insert:
            #2) Manually add segmnet ids to segment tables
            minnie.Segment.insert1(dict(segment_id=segment_id),
                                   skip_duplicates=True)

            #3) Fill in the Mesh Table
            du.fill_from_ids(segment_ids=[segment_id])

            if verbose:
                print("Done Inserting Mesh")

        if "version" not in key.keys():
            version = decimation_version

        if "decimation_ratio" not in key.keys():
            decimation_ratio = decimation_ratio_global

        mesh = (minnie.Mesh & key).fetch1('mesh')

        print(
            f"Mesh size: n_vertices = {len(mesh.vertices)}, n_faces = {len(mesh.faces)}"
        )

        if decimation_ratio not in meshlab_scripts:
            meshlab_scripts[decimation_ratio] = meshlab.Decimator(
                decimation_ratio, temporary_folder, overwrite=False)
        mls_func = meshlab_scripts[decimation_ratio]

        try:
            expected_filepath = os.path.join(external_decimated_mesh_path,
                                             f'{segment_id}_{version}.h5')
            if not os.path.isfile(expected_filepath):
                new_mesh, _path = mls_func(mesh.vertices, mesh.faces,
                                           segment_id)
                new_vertices, new_faces = new_mesh.vertices, new_mesh.faces

                self.make_entry(
                    segment_id=segment_id,
                    version=version,
                    decimation_ratio=decimation_ratio,
                    vertices=new_vertices,
                    faces=new_faces,
                )
            else:
                print('File already exists.')
                with h5py.File(expected_filepath, 'r') as hf:
                    vertices = hf['vertices'][()].astype(np.float64)
                    faces = hf['faces'][()].reshape(-1, 3).astype(np.uint32)
                self.insert1(dict(key,
                                  n_vertices=len(vertices),
                                  n_faces=len(faces),
                                  mesh=expected_filepath),
                             allow_direct_insert=True)
        except Exception as e:
            minnie.DecimationError.insert1(dict(key, log=str(e)))
            print(e)
            raise e
Esempio n. 9
0
class AutoProofreadNeurons7(dj.Computed):
    definition = djt.AutoProofreadNeurons7

    key_source = (
        minnie.DecompositionCellTypeV7() & "n_syn_pre + n_syn_post > 0"
        & du.proofreading_segment_id_restriction()
        #& dict(segment_id=864691134884836602) #worked for excitatory
        #& dict(segment_id=864691135012928150)
    ) & (minnie.ManualCellTypesAllen()
         & "table_name != 'allen_v1_column_types_slanted'").proj()

    def make(self, key):
        """
        Pseudocode:
        1) Pull Down All of the Neurons
        2) Get the nucleus centers and the original mesh

        """

        whole_pass_time = time.time()

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        split_index = key["split_index"]

        if verbose:
            print(
                f"\n\n------- Working on Neuron {segment_id}_{split_index} -----"
            )

        cell_type, nucleus_id = (minnie.DecompositionCellTypeV7()
                                 & key).fetch1(f"{cell_type_used}_e_i",
                                               "nucleus_id")

        if verbose:
            print(f"---- Working on Neuron {segment_id}:{split_index}")
            print(f"nucleus_id = {nucleus_id},cell_type = {cell_type}")

        neuron_obj = du.decomposition_with_spine_recalculation(
            segment_id, split_index=split_index)

        if plot_data:
            nviz.plot_axon(neuron_obj)

        #2) Running the neuron proofreading
        neuron_obj_proof, filtering_info = pru.proofread_neuron_full(
            neuron_obj,

            # arguments for processing down in DecompositionCellTypeV7
            cell_type=cell_type,
            add_valid_synapses=False,
            validation=validation,
            add_spines=False,
            perform_axon_processing=False,
            return_after_axon_processing=False,

            #arguments for processing after DecompositionCellTypeV7 to Proofread Neuron
            plot_head_neck_shaft_synapses=plot_data,
            plot_soma_synapses=plot_data,
            proofread_verbose=proofread_verbose,
            verbose_outline=verbose,
            plot_limb_branch_filter_with_disconnect_effect=plot_data,
            plot_final_filtered_neuron=False,
            plot_synapses_after_proofread=False,
            plot_compartments=plot_data,
            plot_valid_synapses=plot_data,
            plot_error_synapses=plot_data,
            verbose=verbose,
            debug_time=verbose,
        )

        #3) Collect and Write Data to Synapse Table
        dj_keys_valid = syu.synapses_to_dj_keys(neuron_obj_proof,
                                                valid_synapses=True,
                                                verbose=verbose,
                                                nucleus_id=nucleus_id,
                                                split_index=split_index)

        dj_keys_error = syu.synapses_to_dj_keys(neuron_obj_proof,
                                                valid_synapses=False,
                                                verbose=verbose,
                                                nucleus_id=nucleus_id,
                                                split_index=split_index)

        if verbose:
            print(f"n_synapses_total = {neuron_obj_proof.n_synapses_total}")

        AutoProofreadSynapse7.insert(dj_keys_valid, skip_duplicates=True)
        AutoProofreadSynapseErrors7.insert(dj_keys_error, skip_duplicates=True)

        #4) Collect and Write Neuron Stats
        limb_branch_to_cancel = pru.extract_from_filter_info(
            filtering_info, name_to_extract="limb_branch_dict_to_cancel")

        red_blue_suggestions = pru.extract_from_filter_info(
            filtering_info, name_to_extract="red_blue_suggestions")

        filter_key = {
            k: np.round(v, 2)
            for k, v in filtering_info.items() if "area" in k or "length" in k
        }
        mesh_skeleton_file_paths = pru.save_off_meshes_skeletons(
            neuron_obj_proof,
            verbose=False,
            split_index=key["split_index"],
            file_name_ending=f"proofv{proof_version}")

        neuron_stats_dict = dict(
            key,
            proof_version=proof_version,
            limb_branch_to_cancel=limb_branch_to_cancel,
            red_blue_suggestions=red_blue_suggestions,
        )

        neuron_stats_dict.update(mesh_skeleton_file_paths)
        neuron_stats_dict.update(filter_key)

        #--------- 12/8: Adding the neuron_graph object that will be retrieved later ----
        if save_G_with_attrs:
            G = ctcu.G_with_attrs_from_neuron_obj(neuron_obj_proof,
                                                  plot_G=False)
            G_path = ctcu.save_G_with_attrs(G,
                                            segment_id=segment_id,
                                            split_index=split_index)
            if verbose:
                print(f"Saved G_path = {G_path}")
            neuron_stats_dict["neuron_graph"] = G_path

        AutoProofreadStats7.insert1(neuron_stats_dict, skip_duplicates=True)

        # 5) Collecting Stats for the AutoProofreadNeurons6 table

        #a) Neuron basics
        if verbose:
            print(f"\n--5a) Neuron basics")
        dicts_to_update = []

        multiplicity = du.multiplicity_from_segment_id(segment_id)
        soma_x, soma_y, soma_z = nru.soma_centers(neuron_obj,
                                                  soma_name="S0",
                                                  voxel_adjustment=True)

        basic_cell_dict = dict(multiplicity=multiplicity,
                               soma_x=soma_x,
                               soma_y=soma_y,
                               soma_z=soma_z,
                               cell_type=cell_type,
                               cell_type_used=cell_type_used)
        dicts_to_update.append(basic_cell_dict)

        #b) Neuron Overall Statistics
        if verbose:
            print(f"\n--5b) Neuron Overall Statistics")
        neuron_stats_dict = neuron_obj_proof.neuron_stats(
            stats_to_ignore=["axon_length", "axon_area"])
        dicts_to_update.append(neuron_stats_dict)

        #c) compartment Stats
        if verbose:
            print(f"\n--5c) compartment Stats")
        comp_stats = apu.compartments_stats(neuron_obj_proof,
                                            compartment_labels=None,
                                            verbose=False)
        dicts_to_update.append(comp_stats)

        #d) Synapse Stats
        if verbose:
            print(f"\n--5d) Synapse Stats")
        syn_stats = syu.complete_n_synapses_analysis(neuron_obj_proof)
        dicts_to_update.append(syn_stats)

        #e) Cell Typing Info after proofreading
        if verbose:
            print(f"\n--5e) Cell Typing Info after proofreading")
        baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
            neuron_obj_proof, verbose=False, return_cell_type_info=True)

        baylor_cell_type_info["baylor_e_i"] = baylor_e_i
        baylor_cell_type_info = {
            f"{k}_after_proof": v
            for k, v in baylor_cell_type_info.items()
        }
        dicts_to_update.append(baylor_cell_type_info)

        #c

        if verbose:
            print(f"\n--5e) Cell Typing Info after proofreading")
        axon_feature_dict = au.axon_features_from_neuron_obj(
            neuron_obj_proof, features_to_exclude=("length", "n_branches"))

        apical_feature_dict = apu.compartment_features_from_skeleton_and_soma_center(
            neuron_obj_proof,
            compartment_label="apical_total",
            name_prefix="apical",
            features_to_exclude=("length", "n_branches"),
        )

        basal_feature_dict = apu.compartment_features_from_skeleton_and_soma_center(
            neuron_obj_proof,
            compartment_label="basal",
            name_prefix="basal",
            features_to_exclude=("length", "n_branches"),
        )

        dendrite_feature_dict = apu.compartment_features_from_skeleton_and_soma_center(
            neuron_obj_proof,
            compartment_label="dendrite",
            name_prefix="dendrite",
            features_to_exclude=("length", "n_branches"),
        )

        dicts_to_update += [
            axon_feature_dict, apical_feature_dict, basal_feature_dict,
            dendrite_feature_dict
        ]

        #g) Repeating old features from DecompositionCellType table

        if verbose:
            print(
                f"\n--5g) Repeating old features from DecompositionCellTypeV7 table"
            )

        decomp_cell_type_features = [
            "nucleus_id",
            "nuclei_distance",
            "n_nuclei_in_radius",
            "n_nuclei_in_bbox",
            "soma_x_nm",
            "soma_y_nm",
            "soma_z_nm",
            "baylor_e_i",
            "allen_e_i",
            "cell_type_used",
            "cell_type",
            "axon_angle_max",
            "axon_angle_min",
            "n_axon_angles",
            "allen_e_i_n_nuc",
            "allen_cell_type",
            "allen_cell_type_n_nuc",
            "allen_cell_type_e_i",
        ]

        decomp_dict = (minnie.DecompositionCellTypeV7() & key).fetch(
            *decomp_cell_type_features, as_dict=True)[0]
        decomp_dict["cell_type_used_for_axon"] = decomp_dict["cell_type_used"]
        decomp_dict["cell_type_for_axon"] = decomp_dict["cell_type"]
        del decomp_dict["cell_type_used"]
        del decomp_dict["cell_type"]

        dicts_to_update.append(decomp_dict)

        if plot_data:
            nviz.plot_compartments(neuron_obj_proof)

        #h) Writing the Data

        if verbose:
            print(f"\n--5h) Writing the Data")

        neuron_proof_dict = dict(key,
                                 proof_version=proof_version,
                                 run_time=np.round(
                                     time.time() - whole_pass_time, 2))

        for d_u in dicts_to_update:
            neuron_proof_dict.update(d_u)

        AutoProofreadNeurons7.insert1(neuron_proof_dict, skip_duplicates=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Esempio n. 10
0
class BaylorSegmentCentroid(dj.Computed):
    definition="""
    -> minnie.Decimation.proj(decimation_version='version')
    soma_index : tinyint unsigned #index given to this soma to account for multiple somas in one base semgnet
    ver : decimal(6,2) #the version number of the materializaiton
    ---
    centroid_x=NULL           : int unsigned                 # (EM voxels)
    centroid_y=NULL           : int unsigned                 # (EM voxels)
    centroid_z=NULL           : int unsigned                 # (EM voxels)
    n_vertices=NULL           : bigint                 #number of vertices
    n_faces=NULL            : bigint                  #number of faces
    mesh: <somas>  #datajoint adapter to get the somas mesh objects
    multiplicity=NULL         : tinyint unsigned             # the number of somas found for this base segment
    sdf=NULL                  : double                       # sdf width value for the soma
    volume=NULL               : double                       # the volume in billions (10*9 nm^3) of the convex hull
    max_side_ratio=NULL       : double                       # the maximum of the side length ratios used for check if soma
    bbox_volume_ratio=NULL    : double                       # ratio of bbox (axis aligned) volume to mesh volume to use for check if soma
    max_hole_length=NULL      : double                    #euclidean distance of the maximum hole size
    run_time=NULL : double                   # the amount of time to run (seconds)

    """

    key_source =  (((minnie.Decimation & f"n_vertices > {verts_min}").proj(decimation_version='version') & 
                            "decimation_version=" + str(decimation_version) &
                       f"decimation_ratio={decimation_ratio}") & (
                                                                  du.proofreading_segment_id_restriction()
                                                                 )
                  )
                                                                 
     

    def make(self,key):
        """
        Pseudocode: 
        1) Compute all of the
        2) Save the mesh as an h5 py file
        3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces
        4) Insert
        
        
        """
        
        #get the mesh data
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        print(key)
        new_mesh = (minnie.Decimation() & key).fetch1("mesh")
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = trimesh.Trimesh(vertices=current_mesh_verts,
                                   faces=current_mesh_faces)
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = du.save_glia_nuclei_files(glia_faces=glia_faces,
                                 nuclei_faces=nuclei_faces,
                                 segment_id=segment_id)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
        glia_nuclei_key = dict(key,
                               ver=current_version,
                               n_glia_faces=n_glia_faces,
                               #glia_faces = glia_faces,
                               glia_faces = glia_path,
                               n_nuclei_faces = n_nuclei_faces,
                               #nuclei_faces = nuclei_faces
                               nuclei_faces = nuclei_path,
                              )
        
        NeuronGliaNuclei.insert1(glia_nuclei_key,replace=True)
        print(f"Finished saving off glia and nuclei information : {glia_nuclei_key}")
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_0.h5',
                                                    filepath=str(du.get_somas_path())
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                               ver=current_version,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=run_time
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.insert1(insert_dict,skip_duplicates=True)
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center = np.mean(current_soma.vertices,axis=0) / np.array([4,4,40])
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_{i}.h5',
                                                filepath=str(du.get_somas_path())
                                             )



            insert_dict = dict(key,
                              soma_index=i+1,
                               ver=current_version,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = current_soma.convex_hull.volume/1000000000,
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = np.round(max_hole_length,3),
                               run_time=np.round(run_time,4)
                              )



            dicts_to_insert.append(insert_dict)
        self.insert(dicts_to_insert,skip_duplicates=True)