Ejemplo n.º 1
0
def save_glia_nuclei_files(glia_faces,nuclei_faces,segment_id,
                          filename_append=None):
    """
    Purpose: Will save off the glia and nuclei faces
    in the correct external storage and return the paths
    
    """
    if filename_append is None: 
        filename = f"{segment_id}"
    else:
        filename = f"{segment_id}_{filename_append}"
    
    curr_faces_path = external_path_faces
    glia_path = curr_faces_path / Path(f"{filename}_glia.pbz2")
    nuclei_path = curr_faces_path / Path(f"{filename}_nuclei.pbz2")
    
    su.compressed_pickle(glia_faces,glia_path)
    su.compressed_pickle(nuclei_faces,nuclei_path)
    
    return glia_path,nuclei_path
Ejemplo n.º 2
0
def save_G_with_attrs(G,segment_id,
                      split_index = 0,
                      file_append="",
                      file_path = graph_path,
                      return_filepath=True,
                     ):
    """
    To save a Graph after processing
    
    Ex: 
    ctcu.save_G_with_attrs(G,segment_id=segment_id,split_index=split_index)
    """
    file_name = f"{segment_id}_{split_index}_neuron_graph"
    if len(file_append) > 0:
        file_name = file_name + file_append
        
    filepath = Path(file_path) / Path(file_name)
    f = su.compressed_pickle(G,filepath,return_filepath=True)
    
    if return_filepath:
        return f
Ejemplo n.º 3
0
    def make(self,key):
        """
        Purpose: To decimate a mesh by a perscribed
        decimation ratio and algorithm
        
        Pseudocode: 
        1) Get the current mesh,somas,glia and nuclie faces
        2) Get the parameters and run the neuron preprocessing
        
        """
        global_time = time.time()
        
        segment_id = key["segment_id"]
        decomposition_hash = key["decomposition_method"]
        ver =key["ver"]
        
        if verbose:
            print(f"\n\n--Working on {segment_id}: (decomposition_hash = {decomposition_hash})")
        
#         if len(self & dict(
#             ver=ver,
#             segment_id = segment_id,
#             decimation=decomposition_hash)) > 0:
#             if verbose:
#                 print(f"Already processed {segment_id} (decomposition_hash = {decomposition_hash})")
#             return 
            
        
        #1) 
        st = time.time()
        
        mesh = hdju.fetch_segment_id_mesh(segment_id)
        somas = hdju.get_soma_mesh_list_filtered(segment_id)
        print(f"somas = {somas}")

        glia_faces,nuclei_faces = hdju.get_segment_glia_nuclei_faces(segment_id)
        
        
        if plotting:
            soma_mesh_center = hdju.soma_info_center(segment_id,return_nm=True)
            # rotating the mesh
            nviz.plot_objects(hu.align_mesh_from_soma_coordinate(mesh,
                                                                 soma_center=soma_mesh_center
                                                                ))
            
        if verbose:
            print(f"Collecting Mesh Info: {time.time() - st}")
        
        
        #2) 
        
        preprocess_args = DecompositionMethod.restrict_one_part_with_hash(
            decomposition_method_hash).fetch1()
        
        fill_hole_size = preprocess_args["fill_hole_size"]
        
        
        current_preprocess_neuron_kwargs = {
            k:v for k,v in preprocess_args.items() if k in preprocess_neuron_kwargs.keys()} 
        
        current_spines_kwargs = {
            k:v for k,v in preprocess_args.items() if k in spines_kwargs.keys()} 
        
        print(f"current_preprocess_neuron_kwargs = \n{current_preprocess_neuron_kwargs}")
        print(f"current_spines_kwargs = \n{current_spines_kwargs}")
        
        params_to_change = [k for k in current_preprocess_neuron_kwargs if k.split("_")[-1] in ["cgal","map"]]
        if verbose:
            print(f"params_to_change = {params_to_change}")
            
        current_preprocess_neuron_kwargs["width_threshold_MAP"] = current_preprocess_neuron_kwargs["width_threshold_map"]
        current_preprocess_neuron_kwargs["size_threshold_MAP"] = current_preprocess_neuron_kwargs["size_threshold_map"]
        current_preprocess_neuron_kwargs["max_stitch_distance_CGAL"] = current_preprocess_neuron_kwargs["max_stitch_distance_cgal"]
        
        del current_preprocess_neuron_kwargs["width_threshold_map"]
        del current_preprocess_neuron_kwargs["size_threshold_map"]
        del current_preprocess_neuron_kwargs["max_stitch_distance_cgal"]
        
        
        description = "0_25"
        st = time.time()
        
        neuron_obj = neuron.Neuron(
                mesh = mesh,
                somas = somas,
                segment_id=segment_id,
                description=description,
                suppress_preprocessing_print=False,
                suppress_output=False,
                calculate_spines=True,
                widths_to_calculate=["no_spine_median_mesh_center"],
                glia_faces=glia_faces,
                nuclei_faces = nuclei_faces,
                decomposition_type = "meshafterparty",
                preprocess_neuron_kwargs=current_preprocess_neuron_kwargs,
                spines_kwargs=spines_kwargs,
                fill_hole_size=fill_hole_size
                        )

        
        
        neuron_obj_comb = nru.combined_somas_neuron_obj(neuron_obj,
                                                inplace = False,
                                                verbose = verbose,
                                                plot_soma_limb_network = plotting)
        if verbose:
            print(f"\n\n\n---- Total preprocessing time = {time.time() - st}")
        
        if plotting:
            nviz.visualize_neuron(neuron_obj_comb,
                     limb_branch_dict="all",
                     mesh_whole_neuron=True)
            
        #3) 
        st = time.time()
        stats_dict = neuron_obj_comb.neuron_stats(stats_to_ignore = [
                    "n_boutons",
                     "axon_length",
                     "axon_area",
                     "max_soma_volume",
                     "max_soma_n_faces",],
            include_skeletal_stats = True,
            include_centroids= True,
            voxel_adjustment_vector=voxel_adjustment_vector,

        )
        
        if verbose:
            print(f"-- Generating Stats: {time.time() - st}")
        
        #4) Save the neuron object in a certain location
        file_name = f"{neuron_obj_comb.segment_id}_{decomposition_hash}"
        file_name_decomp = f"{file_name}_{dataset}_decomposition"
        output_folder=str(target_dir_decomp)
        
        
        st = time.time()
        ret_file_path = neuron_obj_comb.save_compressed_neuron(
            output_folder=output_folder,
            file_name= file_name_decomp,
            return_file_path=True,
            export_mesh=False,
            suppress_output=True,
            )

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        
   
        if verbose:
            print(f"-- Neuron Object Save time: {time.time() - st}")
        
        #5) Outputting skeleton object, computing stats and saving
        st = time.time()
        
        sk_stats = nst.skeleton_stats_from_neuron_obj(
        neuron_obj_comb,
         include_centroids=True,
         voxel_adjustment_vector=voxel_adjustment_vector,
        verbose = True)
        
        skeleton = neuron_obj_comb.skeleton
        file_name_decomp_sk = f"{file_name}_{dataset}_decomposition_sk"
        ret_sk_filepath = su.compressed_pickle(
            skeleton,
            filename = file_name_decomp_sk,
            folder=str(target_dir_sk),
            return_filepath=True)
        
        if verbose:
            print(f"ret_sk_filepath = {ret_sk_filepath}")
            
        if verbose:
            print(f"-- Skeleton Generation and Save time: {time.time() - st}")
        
        
        
        # 6) make the insertions
        run_time = run_time=np.round(time.time() - global_time,4)
        # -- decomp table --
        decomp_dict = dict(key.copy(),
                       process_version = process_version,
                       index = 0,
                       multiplicity=1,
                         decomposition=ret_file_path_str,
                          run_time = run_time)
        decomp_dict.update(stats_dict)
        
        self.insert1(decomp_dict,
                     allow_direct_insert = True,
                     ignore_extra_fields = True,
                     skip_duplicates=True)
        self.Object.insert1(
                    decomp_dict,
                    allow_direct_insert = True,
                     ignore_extra_fields = True,
                     skip_duplicates=True)
        
        #-- sk table
        sk_dict = dict(key.copy() ,
                       process_version = process_version,
                       index = 0,
                       multiplicity=1,
                         skeleton=ret_sk_filepath,
                      run_time = run_time)
        sk_dict.update(sk_stats)
        
        SkeletonDecomposition.insert1(sk_dict,
                     allow_direct_insert = True,
                     ignore_extra_fields = True,
                     skip_duplicates=True)
        SkeletonDecomposition.Object.insert1(
                    sk_dict,
                    allow_direct_insert = True,
                     ignore_extra_fields = True,
                     skip_duplicates=True)
Ejemplo n.º 4
0
    def make(self, key):
        """
        Purpose: To extract the axon/dendrite of a split neuron
        
        1) Pull down the neuron
        2) Get the neuron ids and nucleus centers corresponding to
        that segent id

        Iterate through all the neuron objects
        a0) Recompute the width
        a) Get the winning nucleus_id
        b) Get the cell type info from the central database
        c) Add synapses to neuron obj
        d) Add spine categories to neuorn object
        e) classifiy E/I cell type according to Baylor rules
        f) Pick the cell type to use
        g) Perfrom complete aon processing
        h) Get aon Features
        i) Save neurong object
        j) Save Axon/Dendrite before proofreading
        k) Write to dj table

        """
        global_time = time.time()

        segment_id = key["segment_id"]
        decomposition_cell_type_hash = key["decomposition_cell_type_method"]
        decomposition_split_method = hdju.decomposition_split_method_hash_from_segment_id(
            segment_id, verbose=True)

        if verbose:
            print(
                f"\n\n--Working on {segment_id}: (decomposition_cell_type_hash = "
                f"{decomposition_cell_type_hash}, decomposition_split_method = {decomposition_split_method})"
            )

        #0) Visualizing the neuron
        if plotting:
            print(f"Visualizing the intial neuron")
            hdju.plot_mesh_with_somas(
                segment_id=segment_id,
                #split_index=0,
                with_skeleton=True,
                align_from_soma_center=True)

        # ---1) Pulling down the neuron---
        st = time.time()

        n_objs, sp_indexes = hdju.neuron_objs_from_decomposition_stage(
            segment_id, verbose=True, return_one=False)

        if verbose:
            print(f"---1) Pulling down the neuron---: {time.time() - st}")
            st = time.time()

        # ---2) Get the nucleus ids and nucleus centers for that segment id---
        nucleus_ids, nucleus_centers = hdju.nuclei_from_segment_id(
            segment_id, return_centers=True, return_nm=True)

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        curr_idx = 0
        neuron_obj_pre_filt = n_objs[curr_idx]
        split_index = sp_indexes[curr_idx]

        if plot_initial_neuron:
            neuron_obj_rot = hu.align_neuron_obj(neuron_obj_pre_filt)
            nviz.visualize_neuron(neuron_obj_rot, limb_branch_dict="all")

        if verbose:
            print(f"--> Working on Split Index {split_index} -----")

        if verbose:
            print(
                f"---2) Get the nucleus ids and nucleus centers--- {time.time() - st}"
            )
            st = time.time()

        # -- a0) Prep work: Recompute the Widths --
        if filter_low_branch_cluster_dendrite:
            neuron_obj, filtering_info_low_branch = pru.apply_proofreading_filters_to_neuron(
                input_neuron=neuron_obj_pre_filt,
                filter_list=[pru.low_branch_length_clusters_dendrite_filter],
                plot_limb_branch_filter_with_disconnect_effect=False,
                plot_limb_branch_filter_away=
                plot_limb_branch_filter_away_low_branch,
                plot_final_neuron=False,
                return_error_info=True,
                verbose=False,
                verbose_outline=verbose)
        else:
            neuron_obj = neuron_obj_pre_filt
            filtering_info_low_branch = {}

        neuron_obj = wu.neuron_width_calculation_standard(neuron_obj,
                                                          verbose=True)

        if verbose:
            print(f"a0) Prep work: Recompute the Widths: {time.time() - st}")
            st = time.time()

        # --- a) Get the winning nucleus_id and nucleus info
        winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
            neuron_obj,
            "S0",
            nucleus_ids,
            nucleus_centers,
            nuclei_distance_threshold=15000,
            return_matching_info=True,
            verbose=True)
        if verbose:
            print(f"nucleus_info = {nucleus_info}")
            print(f"winning_nucleus_id = {winning_nucleus_id}")

        if winning_nucleus_id is None:
            if verbose:
                print(
                    f"No winning nuclues found so assigning the only nucleus id"
                )
            winning_nucleus_id = nucleus_ids[0]

        if verbose:
            print(
                f"--- a) Get the winning nucleus_id and nucleus info: {time.time() - st}"
            )
            st = time.time()

        # ---b) Get the cell type info from database
        database_cell_type_info = hdju.nuclei_classification_info_from_nucleus_id(
            winning_nucleus_id)
        database_e_i_class = database_cell_type_info[
            f"{data_type}_e_i_cell_type"]

        if verbose:
            print(f"database_cell_type_info = {database_cell_type_info}")
            print(f"database_e_i_class = {database_e_i_class}")

        if verbose:
            print(
                f"---b) Get the cell type info from database: {time.time() - st}"
            )
            st = time.time()

        # ---c/d) Add synapses and spine categories
        import synapse_utils as syu
        neuron_obj = syu.add_synapses_to_neuron_obj(
            neuron_obj,
            validation=False,
            verbose=verbose,
            original_mesh=None,
            plot_valid_error_synapses=False,
            calculate_synapse_soma_distance=False,
            add_valid_synapses=True,
            add_error_synapses=False,
        )
        neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                        verbose=verbose)
        if plot_synapses:
            syu.plot_synapses(neuron_obj)

        if plot_spines:
            spu.plot_spines_head_neck(neuron_obj)

        if verbose:
            print(
                f"---c/d) Add synapses and spine categories: {time.time() - st}"
            )
            st = time.time()

        #---e) classifiy E/I cell type according to Baylor rules
        baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
            neuron_obj,
            plot_on_model_map=False,
            plot_spines_and_sk_filter_for_syn=plot_spines_and_sk_filter_for_syn,
            plot_spines_and_sk_filter_for_spine=
            plot_spines_and_sk_filter_for_spine,
            verbose=True,
            return_cell_type_info=True)

        baylor_cell_type_info["baylor_e_i"] = baylor_e_i

        if verbose:
            print(f"baylor_cell_type_info = \n{baylor_cell_type_info}")

        if verbose:
            print(
                f"---e) classifiy E/I cell type according to Baylor rules: {time.time() - st}"
            )
            st = time.time()

        #--- f) Pick the cell type to use

        if (inh_exc_class_to_use_for_axon == "h01"
                and database_e_i_class in ["excitatory", "inhibitory"]):
            e_i_class = database_e_i_class
            if verbose:
                print(f"Using h01 e/i cell type")

            cell_type_used = "h01"
        else:
            if verbose:
                print(f"Using baylor e/i cell type")
            e_i_class = baylor_e_i
            cell_type_used = "baylor"

        if verbose:
            print(
                f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
            )

        if verbose:
            print(f"---f) Pick the cell type to use: {time.time() - st}")
            st = time.time()

        #---# g) Perfrom complete aon processing

        if plot_aligned_neuron:
            print(f"plot_aligned_neuron")
            neuron_obj_rot = hu.align_neuron_obj(neuron_obj)
            nviz.visualize_neuron(neuron_obj_rot, limb_branch_dict="all")

        o_neuron_unalign, filtering_info, axon_angles_dict = au.complete_axon_processing(
            neuron_obj,
            cell_type=e_i_class,
            add_synapses_and_head_neck_shaft_spines=False,
            validation=False,
            plot_initial_axon=plot_initial_axon,
            plot_axon_on_dendrite=plot_axon_on_dendrite,
            return_filtering_info=True,
            return_axon_angle_info=True,
            plot_high_fidelity_axon=plot_high_fidelity_axon,
            plot_boutons_web=plot_boutons_web,
            add_synapses_after_high_fidelity_axon=True,
            verbose=verbose)

        #o_neuron_unalign = hu.unalign_neuron_obj(o_neuron)

        #         if verbose:
        #             print(f"Readding Synapses to the high fidelity axon after all processing donw")
        #         o_neuron_unalign = syu.add_synapses_to_neuron_obj(o_neuron_unalign,
        #                 validation = False,
        #                 verbose  = verbose,
        #                 original_mesh = None,
        #                 plot_valid_error_synapses = False,
        #                 calculate_synapse_soma_distance = False,
        #                 add_valid_synapses = True,
        #                   add_error_synapses=False,
        #                 limb_branch_dict_to_add_synapses=o_neuron_unalign.axon_limb_branch_dict)

        if verbose:
            print(
                f"After add_synapses_after_high_fidelity_axon: # of neuron_obj.synapses_somas = {len(o_neuron_unalign.synapses_somas)}"
            )

        if plot_unaligned_synapses:
            syu.plot_synapses(o_neuron_unalign, total_synapses=True)

        if plot_unaligned_axon:
            nviz.plot_axon(o_neuron_unalign)

        if verbose:
            print(f"---g) Perfrom complete aon processing: {time.time() - st}")
            st = time.time()

        # --- h) Get the axon and dendrite stats ----
        dendrite_stats = nst.skeleton_stats_dendrite(o_neuron_unalign,
                                                     include_centroids=False)
        axon_stats = nst.skeleton_stats_axon(o_neuron_unalign,
                                             include_centroids=False)
        stats_dict = o_neuron_unalign.neuron_stats(
            stats_to_ignore=[
                "n_not_processed_soma_containing_meshes", "n_error_limbs",
                "n_same_soma_multi_touching_limbs",
                "n_multi_soma_touching_limbs", "n_somas", "spine_density"
            ],
            include_skeletal_stats=False,
            include_centroids=True,
            voxel_adjustment_vector=voxel_adjustment_vector,
        )

        if verbose:
            print(
                f"--- h) Get the axon and dendrite stats: {time.time() - st}")
            st = time.time()

        #---- i) Calculating the synapse info ------
        syn_dict = syu.n_synapses_analysis_axon_dendrite(o_neuron_unalign,
                                                         verbose=True)

        # --- j) saving neuron and skeleton ----

        #4) Save the neuron object in a certain location
        file_name = f"{o_neuron_unalign.segment_id}_{split_index}_{decomposition_cell_type_hash}"
        file_name_decomp = f"{file_name}_{dataset}_cell_type_decomp"
        output_folder = str(target_dir_decomp)

        ret_file_path = o_neuron_unalign.save_compressed_neuron(
            output_folder=output_folder,
            file_name=file_name_decomp,
            return_file_path=True,
            export_mesh=False,
            suppress_output=True,
        )

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

        axon_skeleton = o_neuron_unalign.axon_skeleton
        file_name_decomp_sk_axon = f"{file_name_decomp}_axon_sk"
        ret_sk_filepath_ax = su.compressed_pickle(
            axon_skeleton,
            filename=file_name_decomp_sk_axon,
            folder=str(target_dir_sk),
            return_filepath=True)

        dendrite_skeleton = o_neuron_unalign.dendrite_skeleton
        file_name_decomp_sk_dendr = f"{file_name_decomp}_dendr_sk"
        ret_sk_filepath_dendr = su.compressed_pickle(
            dendrite_skeleton,
            filename=file_name_decomp_sk_dendr,
            folder=str(target_dir_sk),
            return_filepath=True)

        if verbose:
            print(f"neuron ret_file_path_str = {ret_file_path_str}")
            print(f"ret_sk_filepath_ax = {ret_sk_filepath_ax}")
            print(f"ret_sk_filepath_dendr = {ret_sk_filepath_dendr}")

        if verbose:
            print(f"--- i) saving neuron and skeleton ----")
            st = time.time()

        nucleus_info
        h01_e_i_cell_type = database_e_i_class

        database_cell_type_info

        baylor_e_i, baylor_cell_type_info
        e_i_class
        cell_type_used

        filtering_info, axon_angles_dict
        o_neuron_unalign

        dendrite_stats
        axon_stats
        stats_dict

        ret_file_path_str
        ret_sk_filepath_ax
        ret_sk_filepath_dendr

        # 7) make the insertions
        run_time = run_time = np.round(time.time() - global_time, 4)
        # -- decomp table --
        n_dict = dict(
            key.copy(),
            decomposition_split_method=decomposition_split_method,
            multiplicity=1,
            split_index=split_index,
            decomposition=str(ret_file_path_str),
            axon_skeleton=str(ret_sk_filepath_ax),
            dendrite_skeleton=str(ret_sk_filepath_dendr),

            #--- cell types
            h01_e_i_cell_type=database_e_i_class,
            cell_type=e_i_class,
            cell_type_used=cell_type_used,

            #----- synapses ---
            n_syn_pre=neuron_obj.n_synapses_pre,
            n_syn_post=neuron_obj.n_synapses_post,
            run_time=run_time,

            # statistics for the split
        )

        dicts_for_update = [
            nucleus_info, database_cell_type_info, filtering_info,
            axon_angles_dict, dendrite_stats, axon_stats, stats_dict,
            baylor_cell_type_info, filtering_info_low_branch, syn_dict
        ]

        for d in dicts_for_update:
            n_dict.update(d)

        print(f"n_dict = {n_dict}")

        for curr_obj in [self, SkeletonAxonDendrite]:

            curr_obj.insert1(n_dict,
                             allow_direct_insert=True,
                             ignore_extra_fields=True,
                             skip_duplicates=True)
            curr_obj.Object.insert1(n_dict,
                                    allow_direct_insert=True,
                                    ignore_extra_fields=True,
                                    skip_duplicates=True)
def export_model(model, path):
    su.compressed_pickle(model, path)
if __name__ == "__main__":
    mesh_file = "./mesh.off"
    sk_filename = "terminal_sk_75"

    from os import sys
    sys.path.append("/meshAfterParty/")

    import time
    import skeleton_utils as sk
    import system_utils as su

    st_time = time.time()
    terminal_sk = sk.skeleton_cgal(
        mesh_path=mesh_file,
        quality_speed_tradeoff=0.2,
        medially_centered_speed_tradeoff=0.2,
        area_variation_factor=0.0001,
        max_iterations=500,
        min_edge_length=75,
    )
    print(f"\n\n Total time for skeletonization = {time.time() - st_time}")
    su.compressed_pickle(terminal_sk, sk_filename)
    def make(self, key):
        """
        Purpose: To decimate a mesh by a perscribed
        decimation ratio and algorithm
        
        Pseudocode: 
        1) Fetch neuron object
        2) Get the parameters for the mesh split
        3) Calculate the split results
        4) Apply the split results to the neuron
        5) Save the neuron and write to the table (including the skeleton table)
        """
        global_time = time.time()

        segment_id = key["segment_id"]
        decomposition_split_hash = key["decomposition_split_method"]

        ver = key["ver"]

        if verbose:
            print(
                f"\n\n--Working on {segment_id}: (decomposition_split_hash = "
                f"{decomposition_split_hash})")

        #1) Fetch neuron object
        st = time.time()

        neuron_obj = hdju.neuron_obj_from_table(
            segment_id=segment_id,
            table=h01auto.Decomposition.Object(),
            verbose=True,
            return_one=True,
        )

        if verbose:
            print(f"Downloading Neuron Object: {time.time() - st}")

        if plotting:
            nviz.plot_soma_limb_concept_network(neuron_obj)
            for limb_idx in nru.error_limbs(neuron_obj):
                print(f"Error Limb: {limb_idx}")
                nviz.visualize_neuron_specific_limb(neuron_obj, limb_idx)

        #2) Get the parameters for the mesh split

        split_args = DecompositionSplitMethod.restrict_one_part_with_hash(
            decomposition_split_hash).fetch1()

        min_skeletal_length_limb = copy.copy(
            split_args["min_skeletal_length_limb"])
        del split_args["min_skeletal_length_limb"]

        #3) Calculated the split results
        split_results = pru.multi_soma_split_suggestions(
            neuron_obj,
            plot_suggestions=plotting,
            verbose=verbose,
            #**split_args
        )

        n_paths_cut = pru.get_n_paths_cut(split_results)

        if verbose:
            print(f"n_paths_cut = {n_paths_cut}")

        # 4) Apply the split results to the neuron
        (neuron_list, neuron_list_errored_limbs_area,
         neuron_list_errored_limbs_skeletal_length,
         neuron_list_n_multi_soma_errors,
         neuron_list_n_same_soma_errors) = pru.split_neuron(
             neuron_obj,
             limb_results=split_results,
             verbose=verbose,
             return_error_info=True,
             #min_skeletal_length_limb=min_skeletal_length_limb,
         )

        #5) Save the neuron and write to the table (including the skeleton table)

        if len(neuron_list) > 1:
            raise Exception("More than one neuron after splitting")

        neuron_idx = 0

        neuron_obj_comb = neuron_list[neuron_idx]
        error_imbs_cancelled_area = neuron_list_errored_limbs_area[neuron_idx]
        error_imbs_cancelled_skeletal_length = neuron_list_errored_limbs_skeletal_length[
            neuron_idx]
        n_multi_soma_limbs_cancelled = neuron_list_n_multi_soma_errors[
            neuron_idx]
        n_same_soma_limbs_cancelled = neuron_list_n_same_soma_errors[
            neuron_idx]

        if plotting:
            nviz.visualize_neuron_limbs(neuron_obj_comb)

        st = time.time()
        stats_dict = neuron_obj_comb.neuron_stats(
            stats_to_ignore=[
                "n_boutons",
                "axon_length",
                "axon_area",
                "max_soma_volume",
                "max_soma_n_faces",
            ],
            include_skeletal_stats=True,
            include_centroids=True,
            voxel_adjustment_vector=voxel_adjustment_vector,
        )

        if verbose:
            print(f"-- Generating Stats: {time.time() - st}")

        #4) Save the neuron object in a certain location
        #file_name = f"{neuron_obj_comb.segment_id}_{neuron_idx}"
        file_name = f"{neuron_obj_comb.segment_id}_{neuron_idx}_{decomposition_split_hash}"
        file_name_decomp = f"{file_name}_{dataset}_decomposition"
        output_folder = str(target_dir_decomp)

        st = time.time()
        ret_file_path = neuron_obj_comb.save_compressed_neuron(
            output_folder=output_folder,
            file_name=file_name_decomp,
            return_file_path=True,
            export_mesh=False,
            suppress_output=True,
        )

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

        if verbose:
            print(f"-- Neuron Object Save time: {time.time() - st}")

        #5) Outputting skeleton object, computing stats and saving
        st = time.time()

        sk_stats = nst.skeleton_stats_from_neuron_obj(
            neuron_obj_comb,
            include_centroids=True,
            voxel_adjustment_vector=voxel_adjustment_vector,
            verbose=True)

        skeleton = neuron_obj_comb.skeleton
        file_name_decomp_sk = f"{file_name}_{dataset}_decomposition_sk"
        ret_sk_filepath = su.compressed_pickle(skeleton,
                                               filename=file_name_decomp_sk,
                                               folder=str(target_dir_sk),
                                               return_filepath=True)

        if verbose:
            print(f"ret_sk_filepath = {ret_sk_filepath}")

        if verbose:
            print(f"-- Skeleton Generation and Save time: {time.time() - st}")

        # 6) Calcuating the split statistics:
        split_success = pru.split_success(neuron_obj_comb)
        if verbose:
            print(f"Calculating split statistics dict")
            print(f"split_success = {split_success}")

        split_stats_dict = dict(
            n_splits=n_paths_cut,
            split_success=split_success,
            n_error_limbs_cancelled=len(error_imbs_cancelled_area),
            n_multi_soma_limbs_cancelled=n_multi_soma_limbs_cancelled,
            n_same_soma_limbs_cancelled=n_same_soma_limbs_cancelled,
            error_imbs_cancelled_area=np.round(
                np.sum(error_imbs_cancelled_area), 4),
            error_imbs_cancelled_skeletal_length=np.round(
                np.sum(error_imbs_cancelled_skeletal_length) / 1000, 4),
            split_results=split_results,
        )

        # 7) make the insertions
        run_time = run_time = np.round(time.time() - global_time, 4)
        # -- decomp table --
        decomp_dict = dict(
            key.copy(),
            multiplicity=1,
            split_index=neuron_idx,
            decomposition=ret_file_path_str,
            run_time=run_time,

            # statistics for the split
        )

        decomp_dict.update(stats_dict)
        decomp_dict.update(split_stats_dict)

        self.insert1(decomp_dict,
                     allow_direct_insert=True,
                     ignore_extra_fields=True,
                     skip_duplicates=True)
        self.Object.insert1(decomp_dict,
                            allow_direct_insert=True,
                            ignore_extra_fields=True,
                            skip_duplicates=True)

        #-- sk table
        sk_dict = dict(key.copy(),
                       multiplicity=1,
                       split_index=neuron_idx,
                       skeleton=ret_sk_filepath,
                       run_time=run_time)
        sk_dict.update(sk_stats)

        SkeletonDecompositionSplit.insert1(sk_dict,
                                           allow_direct_insert=True,
                                           ignore_extra_fields=True,
                                           skip_duplicates=True)
        SkeletonDecompositionSplit.Object.insert1(sk_dict,
                                                  allow_direct_insert=True,
                                                  ignore_extra_fields=True,
                                                  skip_duplicates=True)