def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        #         ver = 88
        #         nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id,
        #                                                                nuclei_version=ver)
        nucleus_ids, nucleus_centers = du.segment_to_nuclei(segment_id,
                                                            nuclei_version=ver)

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #4) -------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            (inh_exc_class, spine_category, axon_angles, n_axons, n_apicals,
             neuron_spine_density, n_branches_processed,
             skeletal_length_processed, n_branches_in_search_radius,
             skeletal_length_in_search_radius
             ) = clu.inhibitory_excitatory_classifier(
                 neuron_obj,
                 return_spine_classification=True,
                 return_axon_angles=True,
                 return_n_axons=True,
                 return_n_apicals=True,
                 return_spine_statistics=True,
                 axon_limb_branch_dict_precomputed=None,
                 axon_angles_precomputed=None,
                 verbose=verbose)
            if verbose:
                print(
                    f"Total time for classification = {time.time() - filter_time}"
                )

            all_axon_angles = []
            for limb_idx, limb_data in axon_angles.items():
                for candidate_idx, cand_angle in limb_data.items():
                    all_axon_angles.append(cand_angle)

            if len(axon_angles) > 0:
                axon_angle_maximum = np.max(all_axon_angles)
            else:
                axon_angle_maximum = 0

            if verbose:
                print("\n -- Cell Type Classification Results --")
                print(f"inh_exc_class={inh_exc_class}")
                print(f"spine_category={spine_category}")
                print(f"axon_angles={axon_angles}")
                print(f"n_axons={n_axons}")
                print(f"n_apicals={n_apicals}")
                print(f"neuron_spine_density={neuron_spine_density}")
                print(f"n_branches_processed={n_branches_processed}")
                print(f"skeletal_length_processed={skeletal_length_processed}")
                print(
                    f"n_branches_in_search_radius={n_branches_in_search_radius}"
                )
                print(
                    f"skeletal_length_in_search_radius={skeletal_length_in_search_radius}"
                )

            baylor_cell_type_info = dict(
                cell_type_predicted=inh_exc_class,
                spine_category=spine_category,
                axon_angle_maximum=axon_angle_maximum,
                n_axons=n_axons,
                n_apicals=n_apicals,
                spine_density_classifier=neuron_spine_density,
                n_branches_processed=neuron_spine_density,
                skeletal_length_processed=skeletal_length_processed,
                n_branches_in_search_radius=n_branches_in_search_radius,
                skeletal_length_in_search_radius=
                skeletal_length_in_search_radius,
            )

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            #6) -------- If excitatory running the axon processing--------------
            """
            Psuedocode: 
            If e_i class is excitatory:
            1) Filter away the axon on dendrite
            2) Do the higher fidelity axon processing
            3) Compute the axon features

            """

            if e_i_class == "excitatory" and neuron_obj.axon_limb_name is not None:
                if verbose:
                    print(
                        f"Excitatory so performing high fidelity axon and computing axon features"
                    )
            #     1) Filter away the axon on dendrite
            #     2) Do the higher fidelity axon processing

                o_neuron, filtering_info = au.complete_axon_processing(
                    neuron_obj,
                    perform_axon_classification=False,
                    return_filtering_info=True)
                filtering_info = {
                    k: np.round(v, 2)
                    for k, v in filtering_info.items()
                    if "area" in k or "length" in k
                }
                #3) Compute the axon features
                axon_features = au.axon_features_from_neuron_obj(o_neuron)
            else:
                nru.clear_all_branch_labels(neuron_obj, labels_to_clear="axon")
                o_neuron = neuron_obj
                axon_features = dict()
                filtering_info = dict()

            #3) ------ Adding the Synapses -----------
            o_neuron = syu.add_synapses_to_neuron_obj(
                o_neuron,
                validation=validation,
                verbose=True,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type_for_axon=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)
            #dict_to_write.append(n_dict)

        #write the
        #self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Exemple #2
0
    def make(self,key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """
        
        
        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        
        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")
        
        whole_pass_time = time.time()
        
        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs,neuron_split_idxs = du.decomposition_with_spine_recalculation(segment_id,
                                                                            ignore_DecompositionAxon=True,
                                                                            ignore_DecompositionCellType = True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        
        
        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        
        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]
        
        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")
        
        for split_index,neuron_obj in zip(neuron_split_idxs,neuron_objs):
            
            if verbose:
                print(f"--> Working on Split Index {split_index} -----")
                
            st = time.time()
            
            
            # ------------- Does all of the processing -------------------
            
            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(neuron_obj,
                                     "S0",
                                      nucleus_ids,
                                      nucleus_centers,
                                     nuclei_distance_threshold = 15000,
                                      return_matching_info = True,
                                     verbose=True)
            
            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")
        

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            
            #4) -------- Running the cell classification and stats--------------
            
            if verbose:
                print(f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n")
                
                
            filter_time = time.time()
            
            
            #---- adding the synapses and spines data -----#
            neuron_obj_exc_syn = syu.add_synapses_to_neuron_obj(neuron_obj,
                            validation = True,
                            verbose  = True,
                            original_mesh = None,
                            plot_valid_error_synapses = False,
                            calculate_synapse_soma_distance = False,
                            add_valid_synapses = True,
                              add_error_synapses=False,)
            neuron_obj_exc_syn_sp = spu.add_head_neck_shaft_spine_objs(neuron_obj_exc_syn,
                                                                       verbose = True
                                                                      )
            
                
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                                neuron_obj = neuron_obj_exc_syn_sp,
                               verbose = False,)
            
            (syn_density_post,
             syn_density_head,
             syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj_exc_syn_sp,
                          limb_branch_dict = limb_branch_dict,
                                            verbose = True)
            
            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj_exc_syn_sp,
                                                        verbose = True,
                                                        multiplier = 1000)

            if verbose:
                print(f"Total time for density calculations = {time.time() - filter_time}")

            baylor_cell_type_info = dict(
                        syn_density_post = syn_density_post,
                        syn_density_head = syn_density_head,
                        syn_density_neck = syn_density_neck,
                        syn_density_shaft = syn_density_shaft,
                        skeletal_length_processed_syn=skeletal_length_processed_syn,
                        spine_density=spine_density,
                        skeletal_length_processed_spine = skeletal_length_processed_spine
            )

        
            
            n_dict = dict(key,
              split_index = split_index,
              run_time = np.round(time.time() - st,2),
             )
            
            dicts_for_update = [baylor_cell_type_info,
                                allen_cell_type_info,
                                nucleus_info,]
            
            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,skip_duplicates=True,allow_direct_insert=True)

        print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if len(key_source_inh & dict(segment_id=segment_id)) > 0:
            manual_e_i = "inhibitory"
        elif len(key_source_exc & dict(segment_id=segment_id)) > 0:
            manual_e_i = "excitatory"
        else:
            raise Exception("Not in exc or inh table")

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        ''' ------ Old way of getting the nucleus info for the manual proofread data -------
        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        
        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]
        
        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")'''

        nucleus_ids, nucleus_centers = du.segment_to_nuclei(
            segment_id,
            #nuclei_version=ver
        )

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            # 3) ---- Doing Baylor Cell Type Classification ---------
            # 3a) --- Adding the synapses and spine labels
            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            st = time.time()
            if verbose:
                print(f"Adding the synapses and the head_neck_shaft")
            neuron_obj = syu.add_synapses_to_neuron_obj(
                neuron_obj,
                validation=validation,
                verbose=verbose,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False,
            )
            neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                            verbose=verbose)
            if verbose:
                print(
                    f"Done adding synapses and head_neck_shaft: {time.time() - st}"
                )

            # 3b) --- Running the stats for Baylor Classification

            filter_time = time.time()
            '''
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                                neuron_obj = neuron_obj,
                               verbose = False,)
            
            (syn_density_post,
             syn_density_head,
             syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj,
                          limb_branch_dict = limb_branch_dict,
                                            verbose = True)
            
            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj,
                                                        verbose = True,
                                                        multiplier = 1000)

            if verbose:
                print(f"Total time for density calculations = {time.time() - filter_time}")

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            
            baylor_cell_type_info = dict(
                        syn_density_post = syn_density_post,
                        syn_density_head = syn_density_head,
                        syn_density_neck = syn_density_neck,
                        syn_density_shaft = syn_density_shaft,
                        skeletal_length_processed_syn=skeletal_length_processed_syn,
                        spine_density=spine_density,
                        skeletal_length_processed_spine = skeletal_length_processed_spine
            )
            
            baylor_e_i = ctu.e_i_classification_single(data=[syn_density_shaft,spine_density],
                              features=["syn_density_shaft","spine_density"],
                             verbose = True,
                              return_label_name = True
                             )
            
            
            '''
            baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
                neuron_obj,
                verbose=True,
                return_cell_type_info=True,
                return_dendrite_branch_stats=True)

            baylor_cell_type_info["baylor_e_i"] = baylor_e_i

            #5) ----- Deciding on cell type to use for axon
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "allen_e_i"] is not None:
                e_i_class = allen_cell_type_info["allen_e_i"]
                cell_type_used = "allen"
            else:
                e_i_class = baylor_e_i
                cell_type_used = "baylor"

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
                )

            #3) ------ Axon Classification (and getting the axon features)------------------

            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                cell_type=e_i_class,
                add_synapses_and_head_neck_shaft_spines=False,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}_pipe_v6_e_i_val_3",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            # ---- 8/10 Addition ----------
            if save_axon_skeleton:
                axon_skeleton_file = du.save_proofread_skeleton(
                    o_neuron.axon_skeleton,
                    segment_id=o_neuron.segment_id,
                    split_index=split_index,
                    file_name_ending=f"decomp_cell_type_axon_skeleton_e_i_val_3"
                )
            else:
                axon_skeleton_file = None

            #---- 8/29 Addition: Will compute the soma center of the mesh in nm ---
            soma_x_nm, soma_y_nm, soma_z_nm = neuron_obj["S0"].mesh_center
            if verbose:
                print(
                    f"soma_x_nm, soma_y_nm, soma_z_nm = {soma_x_nm, soma_y_nm, soma_z_nm}"
                )

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                manual_e_i=manual_e_i,
                cell_type=e_i_class,
                cell_type_used=cell_type_used,
                axon_skeleton=str(axon_skeleton_file),
                soma_x_nm=soma_x_nm,
                soma_y_nm=soma_y_nm,
                soma_z_nm=soma_z_nm,
                n_syn_pre=neuron_obj.n_synapses_pre,
                n_syn_post=neuron_obj.n_synapses_post,
            )

            soma_stats_dict = ctu.soma_stats_for_cell_type(neuron_obj)

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict,
                soma_stats_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Exemple #4
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4()
                                & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh()
                                & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)

        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #3) ------ Axon Classification (and getting the axon features)------------------

            import axon_utils as au
            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                add_synapses_and_head_neck_shaft_spines=True,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            #3)------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            #---- adding the synapses and spines data -----#
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                neuron_obj=o_neuron,
                verbose=False,
            )

            (syn_density_post, syn_density_head, syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(
                 neuron_obj=o_neuron,
                 limb_branch_dict=limb_branch_dict,
                 verbose=True)

            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(
                 neuron_obj=o_neuron, verbose=True, multiplier=1000)

            if verbose:
                print(
                    f"Total time for density calculations = {time.time() - filter_time}"
                )

            baylor_cell_type_info = dict(
                syn_density_post=syn_density_post,
                syn_density_head=syn_density_head,
                syn_density_neck=syn_density_neck,
                syn_density_shaft=syn_density_shaft,
                skeletal_length_processed_syn=skeletal_length_processed_syn,
                spine_density=spine_density,
                skeletal_length_processed_spine=skeletal_length_processed_spine
            )

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            ################ NEED TO INSERT CODE TO DO THIS ###########

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )