def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        #         ver = 88
        #         nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id,
        #                                                                nuclei_version=ver)
        nucleus_ids, nucleus_centers = du.segment_to_nuclei(segment_id,
                                                            nuclei_version=ver)

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #4) -------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            (inh_exc_class, spine_category, axon_angles, n_axons, n_apicals,
             neuron_spine_density, n_branches_processed,
             skeletal_length_processed, n_branches_in_search_radius,
             skeletal_length_in_search_radius
             ) = clu.inhibitory_excitatory_classifier(
                 neuron_obj,
                 return_spine_classification=True,
                 return_axon_angles=True,
                 return_n_axons=True,
                 return_n_apicals=True,
                 return_spine_statistics=True,
                 axon_limb_branch_dict_precomputed=None,
                 axon_angles_precomputed=None,
                 verbose=verbose)
            if verbose:
                print(
                    f"Total time for classification = {time.time() - filter_time}"
                )

            all_axon_angles = []
            for limb_idx, limb_data in axon_angles.items():
                for candidate_idx, cand_angle in limb_data.items():
                    all_axon_angles.append(cand_angle)

            if len(axon_angles) > 0:
                axon_angle_maximum = np.max(all_axon_angles)
            else:
                axon_angle_maximum = 0

            if verbose:
                print("\n -- Cell Type Classification Results --")
                print(f"inh_exc_class={inh_exc_class}")
                print(f"spine_category={spine_category}")
                print(f"axon_angles={axon_angles}")
                print(f"n_axons={n_axons}")
                print(f"n_apicals={n_apicals}")
                print(f"neuron_spine_density={neuron_spine_density}")
                print(f"n_branches_processed={n_branches_processed}")
                print(f"skeletal_length_processed={skeletal_length_processed}")
                print(
                    f"n_branches_in_search_radius={n_branches_in_search_radius}"
                )
                print(
                    f"skeletal_length_in_search_radius={skeletal_length_in_search_radius}"
                )

            baylor_cell_type_info = dict(
                cell_type_predicted=inh_exc_class,
                spine_category=spine_category,
                axon_angle_maximum=axon_angle_maximum,
                n_axons=n_axons,
                n_apicals=n_apicals,
                spine_density_classifier=neuron_spine_density,
                n_branches_processed=neuron_spine_density,
                skeletal_length_processed=skeletal_length_processed,
                n_branches_in_search_radius=n_branches_in_search_radius,
                skeletal_length_in_search_radius=
                skeletal_length_in_search_radius,
            )

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            #6) -------- If excitatory running the axon processing--------------
            """
            Psuedocode: 
            If e_i class is excitatory:
            1) Filter away the axon on dendrite
            2) Do the higher fidelity axon processing
            3) Compute the axon features

            """

            if e_i_class == "excitatory" and neuron_obj.axon_limb_name is not None:
                if verbose:
                    print(
                        f"Excitatory so performing high fidelity axon and computing axon features"
                    )
            #     1) Filter away the axon on dendrite
            #     2) Do the higher fidelity axon processing

                o_neuron, filtering_info = au.complete_axon_processing(
                    neuron_obj,
                    perform_axon_classification=False,
                    return_filtering_info=True)
                filtering_info = {
                    k: np.round(v, 2)
                    for k, v in filtering_info.items()
                    if "area" in k or "length" in k
                }
                #3) Compute the axon features
                axon_features = au.axon_features_from_neuron_obj(o_neuron)
            else:
                nru.clear_all_branch_labels(neuron_obj, labels_to_clear="axon")
                o_neuron = neuron_obj
                axon_features = dict()
                filtering_info = dict()

            #3) ------ Adding the Synapses -----------
            o_neuron = syu.add_synapses_to_neuron_obj(
                o_neuron,
                validation=validation,
                verbose=True,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type_for_axon=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)
            #dict_to_write.append(n_dict)

        #write the
        #self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Example #2
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4()
                                & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh()
                                & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)

        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #3) ------ Axon Classification (and getting the axon features)------------------

            import axon_utils as au
            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                add_synapses_and_head_neck_shaft_spines=True,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            #3)------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            #---- adding the synapses and spines data -----#
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                neuron_obj=o_neuron,
                verbose=False,
            )

            (syn_density_post, syn_density_head, syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(
                 neuron_obj=o_neuron,
                 limb_branch_dict=limb_branch_dict,
                 verbose=True)

            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(
                 neuron_obj=o_neuron, verbose=True, multiplier=1000)

            if verbose:
                print(
                    f"Total time for density calculations = {time.time() - filter_time}"
                )

            baylor_cell_type_info = dict(
                syn_density_post=syn_density_post,
                syn_density_head=syn_density_head,
                syn_density_neck=syn_density_neck,
                syn_density_shaft=syn_density_shaft,
                skeletal_length_processed_syn=skeletal_length_processed_syn,
                spine_density=spine_density,
                skeletal_length_processed_spine=skeletal_length_processed_spine
            )

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            ################ NEED TO INSERT CODE TO DO THIS ###########

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if len(key_source_inh & dict(segment_id=segment_id)) > 0:
            manual_e_i = "inhibitory"
        elif len(key_source_exc & dict(segment_id=segment_id)) > 0:
            manual_e_i = "excitatory"
        else:
            raise Exception("Not in exc or inh table")

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        ''' ------ Old way of getting the nucleus info for the manual proofread data -------
        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        
        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]
        
        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")'''

        nucleus_ids, nucleus_centers = du.segment_to_nuclei(
            segment_id,
            #nuclei_version=ver
        )

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            # 3) ---- Doing Baylor Cell Type Classification ---------
            # 3a) --- Adding the synapses and spine labels
            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            st = time.time()
            if verbose:
                print(f"Adding the synapses and the head_neck_shaft")
            neuron_obj = syu.add_synapses_to_neuron_obj(
                neuron_obj,
                validation=validation,
                verbose=verbose,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False,
            )
            neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                            verbose=verbose)
            if verbose:
                print(
                    f"Done adding synapses and head_neck_shaft: {time.time() - st}"
                )

            # 3b) --- Running the stats for Baylor Classification

            filter_time = time.time()
            '''
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                                neuron_obj = neuron_obj,
                               verbose = False,)
            
            (syn_density_post,
             syn_density_head,
             syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj,
                          limb_branch_dict = limb_branch_dict,
                                            verbose = True)
            
            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj,
                                                        verbose = True,
                                                        multiplier = 1000)

            if verbose:
                print(f"Total time for density calculations = {time.time() - filter_time}")

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            
            baylor_cell_type_info = dict(
                        syn_density_post = syn_density_post,
                        syn_density_head = syn_density_head,
                        syn_density_neck = syn_density_neck,
                        syn_density_shaft = syn_density_shaft,
                        skeletal_length_processed_syn=skeletal_length_processed_syn,
                        spine_density=spine_density,
                        skeletal_length_processed_spine = skeletal_length_processed_spine
            )
            
            baylor_e_i = ctu.e_i_classification_single(data=[syn_density_shaft,spine_density],
                              features=["syn_density_shaft","spine_density"],
                             verbose = True,
                              return_label_name = True
                             )
            
            
            '''
            baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
                neuron_obj,
                verbose=True,
                return_cell_type_info=True,
                return_dendrite_branch_stats=True)

            baylor_cell_type_info["baylor_e_i"] = baylor_e_i

            #5) ----- Deciding on cell type to use for axon
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "allen_e_i"] is not None:
                e_i_class = allen_cell_type_info["allen_e_i"]
                cell_type_used = "allen"
            else:
                e_i_class = baylor_e_i
                cell_type_used = "baylor"

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
                )

            #3) ------ Axon Classification (and getting the axon features)------------------

            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                cell_type=e_i_class,
                add_synapses_and_head_neck_shaft_spines=False,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}_pipe_v6_e_i_val_3",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            # ---- 8/10 Addition ----------
            if save_axon_skeleton:
                axon_skeleton_file = du.save_proofread_skeleton(
                    o_neuron.axon_skeleton,
                    segment_id=o_neuron.segment_id,
                    split_index=split_index,
                    file_name_ending=f"decomp_cell_type_axon_skeleton_e_i_val_3"
                )
            else:
                axon_skeleton_file = None

            #---- 8/29 Addition: Will compute the soma center of the mesh in nm ---
            soma_x_nm, soma_y_nm, soma_z_nm = neuron_obj["S0"].mesh_center
            if verbose:
                print(
                    f"soma_x_nm, soma_y_nm, soma_z_nm = {soma_x_nm, soma_y_nm, soma_z_nm}"
                )

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                manual_e_i=manual_e_i,
                cell_type=e_i_class,
                cell_type_used=cell_type_used,
                axon_skeleton=str(axon_skeleton_file),
                soma_x_nm=soma_x_nm,
                soma_y_nm=soma_y_nm,
                soma_z_nm=soma_z_nm,
                n_syn_pre=neuron_obj.n_synapses_pre,
                n_syn_post=neuron_obj.n_synapses_post,
            )

            soma_stats_dict = ctu.soma_stats_for_cell_type(neuron_obj)

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict,
                soma_stats_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Example #4
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down All of the Neurons
        2) Get the nucleus centers and the original mesh

        """

        whole_pass_time = time.time()

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        split_index = key["split_index"]

        if verbose:
            print(
                f"\n\n------- Working on Neuron {segment_id}_{split_index} -----"
            )

        cell_type, nucleus_id = (minnie.DecompositionCellTypeV7()
                                 & key).fetch1(f"{cell_type_used}_e_i",
                                               "nucleus_id")

        if verbose:
            print(f"---- Working on Neuron {segment_id}:{split_index}")
            print(f"nucleus_id = {nucleus_id},cell_type = {cell_type}")

        neuron_obj = du.decomposition_with_spine_recalculation(
            segment_id, split_index=split_index)

        if plot_data:
            nviz.plot_axon(neuron_obj)

        #2) Running the neuron proofreading
        neuron_obj_proof, filtering_info = pru.proofread_neuron_full(
            neuron_obj,

            # arguments for processing down in DecompositionCellTypeV7
            cell_type=cell_type,
            add_valid_synapses=False,
            validation=validation,
            add_spines=False,
            perform_axon_processing=False,
            return_after_axon_processing=False,

            #arguments for processing after DecompositionCellTypeV7 to Proofread Neuron
            plot_head_neck_shaft_synapses=plot_data,
            plot_soma_synapses=plot_data,
            proofread_verbose=proofread_verbose,
            verbose_outline=verbose,
            plot_limb_branch_filter_with_disconnect_effect=plot_data,
            plot_final_filtered_neuron=False,
            plot_synapses_after_proofread=False,
            plot_compartments=plot_data,
            plot_valid_synapses=plot_data,
            plot_error_synapses=plot_data,
            verbose=verbose,
            debug_time=verbose,
        )

        #3) Collect and Write Data to Synapse Table
        dj_keys_valid = syu.synapses_to_dj_keys(neuron_obj_proof,
                                                valid_synapses=True,
                                                verbose=verbose,
                                                nucleus_id=nucleus_id,
                                                split_index=split_index)

        dj_keys_error = syu.synapses_to_dj_keys(neuron_obj_proof,
                                                valid_synapses=False,
                                                verbose=verbose,
                                                nucleus_id=nucleus_id,
                                                split_index=split_index)

        if verbose:
            print(f"n_synapses_total = {neuron_obj_proof.n_synapses_total}")

        AutoProofreadSynapse7.insert(dj_keys_valid, skip_duplicates=True)
        AutoProofreadSynapseErrors7.insert(dj_keys_error, skip_duplicates=True)

        #4) Collect and Write Neuron Stats
        limb_branch_to_cancel = pru.extract_from_filter_info(
            filtering_info, name_to_extract="limb_branch_dict_to_cancel")

        red_blue_suggestions = pru.extract_from_filter_info(
            filtering_info, name_to_extract="red_blue_suggestions")

        filter_key = {
            k: np.round(v, 2)
            for k, v in filtering_info.items() if "area" in k or "length" in k
        }
        mesh_skeleton_file_paths = pru.save_off_meshes_skeletons(
            neuron_obj_proof,
            verbose=False,
            split_index=key["split_index"],
            file_name_ending=f"proofv{proof_version}")

        neuron_stats_dict = dict(
            key,
            proof_version=proof_version,
            limb_branch_to_cancel=limb_branch_to_cancel,
            red_blue_suggestions=red_blue_suggestions,
        )

        neuron_stats_dict.update(mesh_skeleton_file_paths)
        neuron_stats_dict.update(filter_key)

        #--------- 12/8: Adding the neuron_graph object that will be retrieved later ----
        if save_G_with_attrs:
            G = ctcu.G_with_attrs_from_neuron_obj(neuron_obj_proof,
                                                  plot_G=False)
            G_path = ctcu.save_G_with_attrs(G,
                                            segment_id=segment_id,
                                            split_index=split_index)
            if verbose:
                print(f"Saved G_path = {G_path}")
            neuron_stats_dict["neuron_graph"] = G_path

        AutoProofreadStats7.insert1(neuron_stats_dict, skip_duplicates=True)

        # 5) Collecting Stats for the AutoProofreadNeurons6 table

        #a) Neuron basics
        if verbose:
            print(f"\n--5a) Neuron basics")
        dicts_to_update = []

        multiplicity = du.multiplicity_from_segment_id(segment_id)
        soma_x, soma_y, soma_z = nru.soma_centers(neuron_obj,
                                                  soma_name="S0",
                                                  voxel_adjustment=True)

        basic_cell_dict = dict(multiplicity=multiplicity,
                               soma_x=soma_x,
                               soma_y=soma_y,
                               soma_z=soma_z,
                               cell_type=cell_type,
                               cell_type_used=cell_type_used)
        dicts_to_update.append(basic_cell_dict)

        #b) Neuron Overall Statistics
        if verbose:
            print(f"\n--5b) Neuron Overall Statistics")
        neuron_stats_dict = neuron_obj_proof.neuron_stats(
            stats_to_ignore=["axon_length", "axon_area"])
        dicts_to_update.append(neuron_stats_dict)

        #c) compartment Stats
        if verbose:
            print(f"\n--5c) compartment Stats")
        comp_stats = apu.compartments_stats(neuron_obj_proof,
                                            compartment_labels=None,
                                            verbose=False)
        dicts_to_update.append(comp_stats)

        #d) Synapse Stats
        if verbose:
            print(f"\n--5d) Synapse Stats")
        syn_stats = syu.complete_n_synapses_analysis(neuron_obj_proof)
        dicts_to_update.append(syn_stats)

        #e) Cell Typing Info after proofreading
        if verbose:
            print(f"\n--5e) Cell Typing Info after proofreading")
        baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
            neuron_obj_proof, verbose=False, return_cell_type_info=True)

        baylor_cell_type_info["baylor_e_i"] = baylor_e_i
        baylor_cell_type_info = {
            f"{k}_after_proof": v
            for k, v in baylor_cell_type_info.items()
        }
        dicts_to_update.append(baylor_cell_type_info)

        #c

        if verbose:
            print(f"\n--5e) Cell Typing Info after proofreading")
        axon_feature_dict = au.axon_features_from_neuron_obj(
            neuron_obj_proof, features_to_exclude=("length", "n_branches"))

        apical_feature_dict = apu.compartment_features_from_skeleton_and_soma_center(
            neuron_obj_proof,
            compartment_label="apical_total",
            name_prefix="apical",
            features_to_exclude=("length", "n_branches"),
        )

        basal_feature_dict = apu.compartment_features_from_skeleton_and_soma_center(
            neuron_obj_proof,
            compartment_label="basal",
            name_prefix="basal",
            features_to_exclude=("length", "n_branches"),
        )

        dendrite_feature_dict = apu.compartment_features_from_skeleton_and_soma_center(
            neuron_obj_proof,
            compartment_label="dendrite",
            name_prefix="dendrite",
            features_to_exclude=("length", "n_branches"),
        )

        dicts_to_update += [
            axon_feature_dict, apical_feature_dict, basal_feature_dict,
            dendrite_feature_dict
        ]

        #g) Repeating old features from DecompositionCellType table

        if verbose:
            print(
                f"\n--5g) Repeating old features from DecompositionCellTypeV7 table"
            )

        decomp_cell_type_features = [
            "nucleus_id",
            "nuclei_distance",
            "n_nuclei_in_radius",
            "n_nuclei_in_bbox",
            "soma_x_nm",
            "soma_y_nm",
            "soma_z_nm",
            "baylor_e_i",
            "allen_e_i",
            "cell_type_used",
            "cell_type",
            "axon_angle_max",
            "axon_angle_min",
            "n_axon_angles",
            "allen_e_i_n_nuc",
            "allen_cell_type",
            "allen_cell_type_n_nuc",
            "allen_cell_type_e_i",
        ]

        decomp_dict = (minnie.DecompositionCellTypeV7() & key).fetch(
            *decomp_cell_type_features, as_dict=True)[0]
        decomp_dict["cell_type_used_for_axon"] = decomp_dict["cell_type_used"]
        decomp_dict["cell_type_for_axon"] = decomp_dict["cell_type"]
        del decomp_dict["cell_type_used"]
        del decomp_dict["cell_type"]

        dicts_to_update.append(decomp_dict)

        if plot_data:
            nviz.plot_compartments(neuron_obj_proof)

        #h) Writing the Data

        if verbose:
            print(f"\n--5h) Writing the Data")

        neuron_proof_dict = dict(key,
                                 proof_version=proof_version,
                                 run_time=np.round(
                                     time.time() - whole_pass_time, 2))

        for d_u in dicts_to_update:
            neuron_proof_dict.update(d_u)

        AutoProofreadNeurons7.insert1(neuron_proof_dict, skip_duplicates=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )