def fetch_proofread_mesh(segment_id,split_index = 0,
                         original_mesh = None,
                        return_error_mesh = False,
                         plot_mesh = False
                        ):
    if original_mesh is None:
        original_mesh = du.fetch_segment_id_mesh(segment_id)
        
    proof_mesh = pv.fetch_compartment_mesh("neuron",
                                          segment_id,
                                          split_index,
                                          original_mesh=original_mesh,
                                          )
    if return_error_mesh:
        error_mesh = tu.subtract_mesh(original_mesh,proof_mesh)
        
        if plot_mesh:
            nviz.plot_objects(proof_mesh,
                             meshes=[error_mesh],
                             meshes_colors=["red"])
        
        return proof_mesh,error_mesh
    
    if plot_mesh:
        nviz.plot_objects(proof_mesh,
                         #meshes=[error_mesh],
                         #meshes_colors=["red"]
                         )
    return proof_mesh
def fetch_compartments_meshes(compartments,
                             segment_id,
                          split_index=0,
                          original_mesh=None,
                        verbose=False,
                          plot_mesh = False,
                             mesh_alpha = 1):
    """
    Purpose: to get the requested
    compartment meshes saved off

    Ex: 
    import apical_utils as apu
    pv.fetch_compartments_meshes(apu.default_compartment_order,
                                segment_id,
                                split_index,
                                 original_mesh = original_mesh,
                                plot_mesh=True)
    """
    compartments = nu.convert_to_array_like(compartments)
    
    if original_mesh is None:
        original_mesh = du.fetch_segment_id_mesh(segment_id)
        
    comp_meshes = [pv.fetch_compartment_mesh(c,segment_id,split_index,
                                            original_mesh=original_mesh) for c in compartments]
    
    if plot_mesh:
        comp_meshes_colors = apu.colors_from_compartments(compartments)
        nviz.plot_objects(original_mesh,
                         meshes = comp_meshes,
                         meshes_colors=comp_meshes_colors,
                         mesh_alpha=mesh_alpha)
        
    return comp_meshes
    def make(self, key):
        global_start = time.time()
        segment_id = key["segment_id"]
        verbose = True

        print(f"\n\n----- Working on {segment_id}-------")
        whole_pass_time = time.time()

        #new method that checks if the information exists in the error table and if not then
        error_table = (minnie.DecompositionErrorLabels()
                       & dict(segment_id=segment_id))
        if len(error_table) > 0:
            print("using quick fetch")
            current_mesh = du.fetch_segment_id_mesh(segment_id, minnie=minnie)
            returned_error_faces = error_table.fetch1("face_idx_for_error")

        else:
            neuron_obj = (minnie.Decomposition() & key).fetch1("decomposition")

            returned_error_faces = ed.error_faces_by_axons(
                neuron_obj, verbose=True, visualize_errors_at_end=False)
            current_mesh = neuron_obj.mesh

        #------- Doing the synapse Exclusion Writing ---------- #
        data_to_write_new, n_synapses, n_errored_synapses = ed.get_error_synapse_inserts(
            current_mesh,
            segment_id,
            returned_error_faces,
            minnie=minnie,
            return_synapse_stats=True,
            verbose=True)

        if len(data_to_write_new) > 0:
            print("Preparing to write errored synapses")
            minnie.SynapseExclude.insert(data_to_write_new,
                                         skip_duplicates=True)

        #------- Doing the Label Writing ---------- #
        new_key = dict(key,
                       n_face_errors=len(returned_error_faces),
                       face_idx_for_error=returned_error_faces,
                       n_synapses=n_synapses,
                       n_errored_synapses=n_errored_synapses)

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - global_start} ------"
        )
def fetch_compartment_mesh(compartment,
                           segment_id,
                          split_index=0,
                          original_mesh=None,
                        verbose=False,
                          plot_mesh = False):
    """
    Purpose: To get the mesh belonging to a certain compartment
    
    Ex: 
    original_mesh = du.fetch_segment_id_mesh(segment_id)

    comp_mesh = pv.fetch_compartment_mesh("apical_shaft",
                              segment_id,
                              split_index,
                            original_mesh=original_mesh,
                                          verbose = True,
                                          plot_mesh = True,
                             )
    """
    if compartment == "apical_total":
        compartment_faces = np.concatenate([pv.fetch_compartment_faces(compartment = c,
                                                             segment_id=segment_id,
                                                             split_index=split_index,
                                                         ) for c in apu.apical_total]).astype("int")
    else:
        compartment_faces = pv.fetch_compartment_faces(compartment = compartment,
                                                             segment_id=segment_id,
                                                             split_index=split_index,
                                                         )
    if verbose:
        print(f"# of faces = {len(compartment_faces)}")
        
    if original_mesh is None:
        original_mesh = du.fetch_segment_id_mesh(segment_id)
        
    compartment_mesh = original_mesh.submesh([compartment_faces],append=True)
    
    if not tu.is_mesh(compartment_mesh):
        compartment_mesh = tu.empty_mesh()
    
    if plot_mesh:
        print(f"Plotting {compartment}")
        nviz.plot_objects(original_mesh,
                         meshes=[compartment_mesh],
                         meshes_colors="red")
    
    return compartment_mesh
def fetch_compartment_skeleton(compartment,
                          segment_id,
                          split_index = 0,
                           verbose = False,
                               plot_skeleton = False,
                               original_mesh = None,
                          ):
    """
    Purpose: To retrieve the datajoint
    stored skeleton for that compartment
    
    Ex: 
    comp_skeleton = pv.fetch_compartment_skeleton("apical_shaft",
                             segment_id,
                             split_index,
                            plot_skeleton = True)
    """
    segment_id,split_index = pv.segment_id_and_split_index(segment_id,split_index)
    
    if compartment == "apical_total":
        comp_skeleton = sk.stack_skeletons([(du.proofreading_stats_table() & dict(segment_id=segment_id,
                                        split_index=split_index)).fetch1(f"{c}_skeleton") for c in apu.apical_total])
    else:
        comp_skeleton = (du.proofreading_stats_table() & dict(segment_id=segment_id,
                                        split_index=split_index)).fetch1(f"{compartment}_skeleton")
    
    if len(comp_skeleton) == 0:
        comp_skeleton = np.array([]).reshape(-1,2,3)
    if verbose:
        print(f"{compartment} skeleton = {sk.calculate_skeleton_distance(comp_skeleton)}")
            
    if plot_skeleton:
        if original_mesh is None:
            original_mesh = du.fetch_segment_id_mesh(segment_id)
        nviz.plot_objects(original_mesh,
                         skeletons = [comp_skeleton])
        
    return comp_skeleton
def fetch_compartments_skeletons(
    compartments,
    segment_id,
    split_index=0,
    verbose=False,

    #plotting arguments
    plot_skeleton = False,
    original_mesh=None,):
    """
    
    Ex: 
    import apical_utils as apu
    pv.fetch_compartments_skeletons(apu.default_compartment_order,
                                segment_id,
                                split_index,
                                 original_mesh = original_mesh,
                                plot_skeleton=True)
    
    """
    
    
    compartments = nu.convert_to_array_like(compartments)
    
    comp_skeletons = [pv.fetch_compartment_skeleton(c,segment_id,split_index,
                                                   verbose=verbose) for c in compartments]
    
    if plot_skeleton:
        if original_mesh is None:
            original_mesh = du.fetch_segment_id_mesh(segment_id)
        
        nviz.plot_objects(original_mesh,
                         skeletons=comp_skeletons,
                         skeletons_colors = apu.colors_from_compartments(compartments))
        
    return comp_skeletons
Beispiel #7
0
    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decimated mesh
        3) Get the somas info
        4) Run the preprocessing
        5) Calculate all starter stats
        6) Save the file in a certain location
        7) Pass stats and file location to insert
        """
        whole_pass_time = time.time()
        #1) Get the segment id from the key
        segment_id = key["segment_id"]
        description = str(key['decimation_version']) + "_25"
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        global_start = time.time()

        #2) Get the decimated mesh
        current_neuron_mesh = du.fetch_segment_id_mesh(segment_id)

        #3) Get the somas info
        somas = du.get_soma_mesh_list(segment_id)
        soma_ver = du.get_soma_mesh_list_ver(segment_id)

        print(f"somas = {somas}")

        #3b) Get the glia and nuclei information
        glia_faces, nuclei_faces = du.get_segment_glia_nuclei_faces(
            segment_id, return_empty_list=True)

        #4) Run the preprocessing

        total_neuron_process_time = time.time()

        print(f"\n--- Beginning preprocessing of {segment_id}---")
        recovered_neuron = neuron.Neuron(
            mesh=current_neuron_mesh,
            somas=somas,
            segment_id=segment_id,
            description=description,
            suppress_preprocessing_print=False,
            suppress_output=False,
            calculate_spines=True,
            widths_to_calculate=["no_spine_median_mesh_center"],
            glia_faces=glia_faces,
            nuclei_faces=nuclei_faces,
        )

        print(
            f"\n\n\n---- Total preprocessing time = {time.time() - total_neuron_process_time}"
        )

        #5) Don't have to do any of the processing anymore because will do in the neuron object
        stats_dict = recovered_neuron.neuron_stats()

        #6) Save the file in a certain location
        save_time = time.time()
        ret_file_path = recovered_neuron.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"Save time = {time.time() - save_time}")

        #7) Pass stats and file location to insert
        new_key = dict(key,
                       ver=soma_ver,
                       process_version=process_version,
                       index=0,
                       multiplicity=1,
                       decomposition=ret_file_path_str,
                       n_vertices=len(current_neuron_mesh.vertices),
                       n_faces=len(current_neuron_mesh.faces),
                       run_time=np.round(time.time() - whole_pass_time, 4))
        new_key.update(stats_dict)

        keys_to_delete = [
            "axon_length", "axon_area", "max_soma_volume", "max_soma_n_faces"
        ]

        for k_to_delete in keys_to_delete:
            del new_key[k_to_delete]

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - global_start} ------"
        )
    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decimated mesh
        3) Get the somas info
        4) Run the preprocessing
        5) Calculate all starter stats
        6) Save the file in a certain location
        7) Pass stats and file location to insert
        """
        whole_pass_time = time.time()
        #1) Get the segment id from the key
        segment_id = key["segment_id"]
        description = str(key['decimation_version']) + "_25"
        print(f"\n\n----- Working on {segment_id}-------")
        global_start = time.time()

        #2) Get the decimated mesh
        current_neuron_mesh = du.fetch_segment_id_mesh(segment_id,
                                                       minnie=minnie)

        #3) Get the somas info *************************** Need to change this when actually run *******************
        somas = du.get_soma_mesh_list(segment_id, minnie=minnie)
        print(f"somas = {somas}")
        #4) Run the preprocessing

        total_neuron_process_time = time.time()

        print(f"\n--- Beginning preprocessing of {segment_id}---")
        recovered_neuron = neuron.Neuron(
            mesh=current_neuron_mesh,
            somas=somas,
            segment_id=segment_id,
            description=description,
            suppress_preprocessing_print=False,
            suppress_output=False,
            calculate_spines=True,
            widths_to_calculate=["no_spine_median_mesh_center"])

        print(
            f"\n\n\n---- Total preprocessing time = {time.time() - total_neuron_process_time}"
        )

        #5) Don't have to do any of the processing anymore because will do in the neuron object
        stats_dict = recovered_neuron.neuron_stats()

        #6) Save the file in a certain location
        save_time = time.time()
        ret_file_path = recovered_neuron.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"Save time = {time.time() - save_time}")

        #7) Pass stats and file location to insert
        new_key = dict(key,
                       decomposition=ret_file_path_str,
                       n_vertices=len(current_neuron_mesh.vertices),
                       n_faces=len(current_neuron_mesh.faces),
                       run_time=np.round(time.time() - whole_pass_time, 4))
        new_key.update(stats_dict)

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - global_start} ------"
        )
    def make(self,key):
        """
        Pseudocode:
        1) Pull Down All of the Neurons
        2) Get the nucleus centers and the original mesh

        """
        
        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        
        print(f"\n\n------- AutoProofreadNeuron {segment_id}  ----------")
        
        neuron_objs,neuron_split_idxs = du.decomposition_with_spine_recalculation(segment_id)
        
        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")
        
        
        # 2)  ----- Pre-work ------

        nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id)

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")



        original_mesh = du.fetch_segment_id_mesh(segment_id)
        original_mesh_kdtree = KDTree(original_mesh.triangles_center)
        
        
        
        # 3) ----- Iterate through all of the Neurons and Proofread --------
        
        # lists to help save stats until write to ProofreadStats Table
        filtering_info_list = []
        synapse_stats_list = []
        total_error_synapse_ids_list = []
        
        
        for split_index,neuron_obj_pre_split in zip(neuron_split_idxs,neuron_objs):
            
            whole_pass_time = time.time()
    
            if verbose:
                print(f"\n-----Working on Neuron Split {split_index}-----")

                
            
            neuron_obj = neuron_obj_pre_split
#             if neuron_obj_pre_split.n_error_limbs > 0:
#                 if verbose:
#                     print(f"   ---> Pre-work: Splitting Neuron Limbs Because still error limbs exist--- ")
#                 neuron_objs_split = pru.split_neuron(neuron_obj_pre_split,
#                                              verbose=False)
                
#                 if len(neuron_objs_split) > 1:
#                     raise Exception(f"After splitting the neuron there were more than 1: {neuron_objs_split}")

#                 neuron_obj= neuron_objs_split[0]
#             else:
#                 neuron_obj = neuron_obj_pre_split
            
            

            # Part A: Proofreading the Neuron
            if verbose:
                print(f"\n   --> Part A: Proofreading the Neuron ----")


        #     nviz.visualize_neuron(neuron_obj,
        #                       limb_branch_dict="all")
        
        

            output_dict= pru.proofread_neuron(neuron_obj,
                                plot_limb_branch_filter_with_disconnect_effect=False,
                                plot_final_filtered_neuron=False,
                                verbose=True)

            filtered_neuron = output_dict["filtered_neuron"]
            cell_type_info = output_dict["cell_type_info"]
            filtering_info = output_dict["filtering_info"]

            
            


            # Part B: Getting Soma Centers and Matching To Nuclei
            if verbose:
                print(f"\n\n    --> Part B: Getting Soma Centers and Matching To Nuclei ----")


            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(neuron_obj,
                                     "S0",
                                      nucleus_ids,
                                      nucleus_centers,
                                     nuclei_distance_threshold = 15000,
                                      return_matching_info = True,
                                     verbose=True)

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            





            # Part C: Getting the Faces of the Original Mesh
            if verbose:
                print(f"\n\n    --> Part C: Getting the Faces of the Original Mesh ----")

            original_mesh_faces = tu.original_mesh_faces_map(original_mesh,
                                                        filtered_neuron.mesh,
                                                        exact_match=True,
                                                        original_mesh_kdtree=original_mesh_kdtree)
            
            original_mesh_faces_file = du.save_proofread_faces(original_mesh_faces,
                                                              segment_id=segment_id,
                                                              split_index=split_index)

            

        #     nviz.plot_objects(recovered_mesh)






            # Part D: Getting the Synapse Information
            if verbose:
                print(f"\n\n    --> Part D: Getting the Synapse Information ----")


            (keys_to_write,
             synapse_stats,
             total_error_synapse_ids) = pru.synapse_filtering(filtered_neuron,
                            split_index,
                            nucleus_id=winning_nucleus_id,
                            segment_id=None,
                            return_synapse_filter_info = True,
                            return_synapse_center_data = False,
                            return_error_synapse_ids = True,
                            mapping_threshold = 500,
                              plot_synapses=False,
                            verbose = True,
                            original_mesh_method = True,
                            original_mesh = original_mesh,
                            original_mesh_kdtree = original_mesh_kdtree,
                            valid_faces_on_original_mesh=original_mesh_faces, 
                                                          
                            )


            



            soma_x,soma_y,soma_z = nru.soma_centers(filtered_neuron,
                                               soma_name="S0",
                                               voxel_adjustment=True)

        
        
        
            
            #7) Creating the dictionary to insert into the AutoProofreadNeuron
            new_key = dict(key,
                           split_index = split_index,
                           proof_version = proof_version,
                           
                           multiplicity = len(neuron_objs),
                           
                           # -------- Important Excitatory Inhibitory Classfication ------- #
                        cell_type_predicted = cell_type_info["inh_exc_class"],
                        spine_category=cell_type_info["spine_category"],

                        n_axons=cell_type_info["n_axons"],
                        n_apicals=cell_type_info["n_axons"],
                           
                           
                        
    
                        # ----- Soma Information ----#
                        nucleus_id         = nucleus_info["nuclei_id"],
                        nuclei_distance      = np.round(nucleus_info["nuclei_distance"],2),
                        n_nuclei_in_radius   = nucleus_info["n_nuclei_in_radius"],
                        n_nuclei_in_bbox     = nucleus_info["n_nuclei_in_bbox"],

                        soma_x           = soma_x,
                        soma_y           =soma_y,
                        soma_z           =soma_z,

                        # ---------- Mesh Faces ------ #
                        mesh_faces = original_mesh_faces_file,

                           
                        # ------------- The Regular Neuron Information (will be computed in the stats dict) ----------------- #
                        
                        
                        
                           # ------ Information Used For Excitatory Inhibitory Classification -------- 
                        axon_angle_maximum=cell_type_info["axon_angle_maximum"],
                        spine_density_classifier=cell_type_info["neuron_spine_density"],
                        n_branches_processed=cell_type_info["n_branches_processed"],
                        skeletal_length_processed=cell_type_info["skeletal_length_processed"],
                        n_branches_in_search_radius=cell_type_info["n_branches_in_search_radius"],
                        skeletal_length_in_search_radius=cell_type_info["skeletal_length_in_search_radius"],

                           
                        
                           
                           run_time=np.round(time.time() - whole_pass_time,4)
                          )
            
            
            
            
            
            
            
            stats_dict = filtered_neuron.neuron_stats()
            new_key.update(stats_dict)

            
            # ------ Writing the Data To the Tables ----- #
            SynapseProofread.insert(keys_to_write,skip_duplicates=True)
            
            self.insert1(new_key,skip_duplicates=True,allow_direct_insert=True)
            
            
            
            #saving following information for later processing:
            filtering_info_list.append(filtering_info)
            synapse_stats_list.append(synapse_stats)
            total_error_synapse_ids_list.append(total_error_synapse_ids)
            
            
        
        # Once have inserted all the new neurons need to compute the stats
        if verbose:
            print("Computing the overall stats")
            
        overall_syn_error_rates = pru.calculate_error_rate(total_error_synapse_ids_list,
                        synapse_stats_list,
                        verbose=True)
        
        
        # Final Part: Create the stats table entries and insert
        
        proofread_stats_entries = []
        
        stats_to_make_sure_in_proofread_stats = [
            
         'axon_on_dendrite_merges_error_area',
         'axon_on_dendrite_merges_error_length',
         'low_branch_clusters_error_area',
         'low_branch_clusters_error_length',
         'dendrite_on_axon_merges_error_area',
         'dendrite_on_axon_merges_error_length',
         'double_back_and_width_change_error_area',
         'double_back_and_width_change_error_length',
         'crossovers_error_area',
         'crossovers_error_length',
         'high_degree_coordinates_error_area',
         'high_degree_coordinates_error_length',
        ]
        
        
        for sp_idx,split_index in enumerate(neuron_split_idxs):
            synapse_stats = synapse_stats_list[sp_idx]
            filtering_info = filtering_info_list[sp_idx]
            
            curr_key = dict(key,
                           split_index = split_index,
                           proof_version = proof_version,
                           

                            # ------------ For local valid synapses to that split_index
                            n_valid_syn_presyn_for_split=synapse_stats["n_valid_syn_presyn"],
                            n_valid_syn_postsyn_for_split=synapse_stats["n_valid_syn_postsyn"],

                           
                           
                           )
            
            
            for s in stats_to_make_sure_in_proofread_stats:
                if s not in filtering_info.keys():
                    curr_key[s] = None
            
            filter_key = {k:np.round(v,2) for k,v in filtering_info.items() if "area" in k or "length" in k}
            curr_key.update(filter_key)
            curr_key.update(overall_syn_error_rates)
            
            proofread_stats_entries.append(curr_key)
            
        
        ProofreadStats.insert(proofread_stats_entries,skip_duplicates=True)

            

        print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")
def plot_proofread_neuron(
    segment_id,
    split_index = 0,
    cell_type = None,
    original_mesh = None,
    
    plot_proofread_skeleton = False,
    
    proofread_mesh_color = "green",
    proofread_mesh_alpha = None,
    proofread_skeleton_color = "black",

    plot_nucleus = True,
    nucleus_size = 1,
    nucleus_color = "proofread_mesh_color",#"black",


    plot_synapses = True,
    synapses_size = 0.05,
    synapse_plot_type = "spine_bouton",#"compartment"#  "valid_error" #"valid_presyn_postsyn"
    synapse_compartments = None,
    synapse_spine_bouton_labels = None,
    plot_error_synapses = False,
    valid_synapses_color = "orange",
    error_synapses_color = "aliceblue",
    synapse_queries = None,
    synapse_queries_colors = None,

    plot_error_mesh = False,
    error_mesh_color = "black",
    error_mesh_alpha = 1,


    compartments = None,
    #compartments = ["apical_total"]
    #compartments= ["axon","dendrite"]
    plot_compartment_meshes = True,
    compartment_mesh_alpha = 0.3,
    plot_compartment_skeletons = True,

    verbose = False,
    print_spine_colors = True,
    print_compartment_colors = True,
    
    #arguments for plotting more scatters
    scatters = None,
    scatter_sizes = 0.2,
    scatters_colors = "yellow",
    
    show_at_end = True,
    append_figure = False,
    ):
    
    """
    Purpose: Will plot the saved
    proofread information of a neuron

    Ex: 
    #trying on inhibitory
    segment_id,split_index = (864691134917559306,0)

    original_mesh = du.fetch_segment_id_mesh(segment_id)

    pv.plot_proofread_neuron(
        segment_id,
        split_index,
        original_mesh=original_mesh,
        plot_error_mesh=False,
        verbose = True)


    """
    if not append_figure:
        import ipyvolume as ipv
        ipv.clear()
    
    su.ignore_warnings()
    
    if type(segment_id) == str:
        segment_id,split_index = pv.segment_id_and_split_index_from_node_name(segment_id)
    
    if verbose:
        print(f"Plotting {segment_id}_{split_index} (nucleus_id={pv.nucleus_id_from_segment_id(segment_id,split_index)})")
    
    if cell_type is None:
        cell_type = pv.cell_type_from_segment_id(segment_id,split_index)
        if verbose:
            print(f"cell_type = {cell_type}")
    if synapse_compartments is None:
        synapse_compartments = apu.compartments_to_plot(cell_type)
        
    if synapse_spine_bouton_labels is None:
        synapse_spine_bouton_labels = spu.spine_bouton_labels_to_plot()
        
    if compartments is None:
        compartments = apu.compartments_to_plot(cell_type)

    meshes = []
    meshes_colors = []
    skeletons = []
    skeletons_colors = []
    meshes_alpha = []
    
    
    if scatters is not None:
        scatters_colors = nu.convert_to_array_like(scatters_colors)
        if len(scatters_colors) == 1:
            scatters_colors = scatters_colors*len(scatters)
        scatter_sizes = nu.convert_to_array_like(scatter_sizes)
        if len(scatter_sizes) == 1:
            scatter_sizes = scatter_sizes*len(scatters)
    else:
        scatters = []
        scatters_colors = []
        scatter_sizes = []

    if original_mesh is None:
        original_mesh = du.fetch_segment_id_mesh(segment_id)

    compartment_color_dict = dict(valid_mesh=proofread_mesh_color)
        
    proof_mesh,error_mesh = pv.fetch_proofread_mesh(segment_id,
                            split_index = split_index,
                            original_mesh=original_mesh,
                            return_error_mesh=True)

    if plot_proofread_skeleton:
        proof_skeleton = pv.fetch_proofread_skeleton(segment_id,
                                   split_index,
                                   plot_skeleton=False,
                                   #original_mesh=original_mesh
                                                    )
    else:
        proof_skeleton = None

    if plot_error_mesh:
        meshes.append(error_mesh)
        meshes_colors.append(error_mesh_color)
        meshes_alpha.append(error_mesh_alpha)
        compartment_color_dict["error_mesh"] = error_mesh_color
    
    


    if plot_nucleus:
        nuc_center = pv.nucleus_center_from_segment_id(segment_id,
                                         split_index)
        
        if nucleus_color == "proofread_mesh_color":
            nucleus_color = proofread_mesh_color
        
        if nuc_center is None:
            print(f"No nucleus to plot")
        else:
            scatters += [nuc_center.reshape(-1,3)]
            scatters_colors += [nucleus_color]
            scatter_sizes += [nucleus_size]


    #get the synapse groups
    if plot_synapses:
    
        synapses_objs = pv.syanpse_objs_from_segment_id(segment_id,split_index)
        
        (syn_scatters,
        syn_colors,
        syn_sizes) = syu.synapse_plot_items_by_type_or_query(
                        synapses_objs,
                        synapses_size = synapses_size,
                        synapse_plot_type = synapse_plot_type,#"compartment"#  "valid_error"
                        synapse_compartments = synapse_compartments,
                        synapse_spine_bouton_labels = synapse_spine_bouton_labels,
                        plot_error_synapses = plot_error_synapses,
                        valid_synapses_color = valid_synapses_color,
                        error_synapses_color = error_synapses_color,
                        synapse_queries = synapse_queries,
                        synapse_queries_colors = synapse_queries_colors,
        
                        verbose = verbose,
                        print_spine_colors = print_spine_colors)

        scatters += syn_scatters
        scatters_colors += syn_colors
        scatter_sizes += syn_sizes
        
   

    if compartments is not None and len(compartments) > 0:
        comp_colors = apu.colors_from_compartments(compartments)
        if plot_compartment_meshes:
            comp_meshes = pv.fetch_compartments_meshes(compartments,
                                                      segment_id,
                                                     split_index,
                                                       original_mesh = original_mesh,
                                                     )
            meshes += comp_meshes
            meshes_colors += comp_colors
            meshes_alpha += [compartment_mesh_alpha]*len(comp_meshes)

        if plot_compartment_skeletons:
            comp_sk = pv.fetch_compartments_skeletons(compartments,
                                                      segment_id,
                                                     split_index,
                                                     )

            skeletons += comp_sk
            skeletons_colors += comp_colors
            
        compartment_color_dict.update({k:v for k,v in zip(compartments,comp_colors)})

    if print_compartment_colors:
        print(f"\nCompartment Colors:")
        for k,v in compartment_color_dict.items():
            print(f"  {k}:{v}")

    
#     print(f"proof_mesh== {proof_mesh}")
#     print(f"proof_skeleton = {proof_skeleton}")
#     if len(proof_skeleton) == 0:
#         proof_skeleton = None
#     print(f"skeletons = {skeletons}")
#     print(f"meshes = {meshes}")
#     print(f"scatters = {scatters}")
    nviz.plot_objects(main_mesh = proof_mesh,
                      main_mesh_alpha=proofread_mesh_alpha,
                      main_mesh_color=proofread_mesh_color,

                      main_skeleton=proof_skeleton,
                      main_skeleton_color=proofread_skeleton_color,

                     skeletons=skeletons,
                     skeletons_colors=skeletons_colors,

                     meshes=meshes,
                     meshes_colors=meshes_colors,
                     mesh_alpha=meshes_alpha,

                     scatters=scatters,
                     scatter_size=scatter_sizes,
                     scatters_colors=scatters_colors,
                      
                     show_at_end = show_at_end,
                    append_figure = append_figure,
                     )