def make(self,key):
        """
        Pseudocode:
        1) Pull Down All of the Neurons
        2) Get the nucleus centers and the original mesh

        """
        
        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        
        print(f"\n\n------- AutoProofreadNeuron {segment_id}  ----------")
        
        neuron_objs,neuron_split_idxs = du.decomposition_with_spine_recalculation(segment_id)
        
        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")
        
        
        # 2)  ----- Pre-work ------

        nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id)

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")



        original_mesh = du.fetch_segment_id_mesh(segment_id)
        original_mesh_kdtree = KDTree(original_mesh.triangles_center)
        
        
        
        # 3) ----- Iterate through all of the Neurons and Proofread --------
        
        # lists to help save stats until write to ProofreadStats Table
        filtering_info_list = []
        synapse_stats_list = []
        total_error_synapse_ids_list = []
        
        
        for split_index,neuron_obj_pre_split in zip(neuron_split_idxs,neuron_objs):
            
            whole_pass_time = time.time()
    
            if verbose:
                print(f"\n-----Working on Neuron Split {split_index}-----")

                
            
            neuron_obj = neuron_obj_pre_split
#             if neuron_obj_pre_split.n_error_limbs > 0:
#                 if verbose:
#                     print(f"   ---> Pre-work: Splitting Neuron Limbs Because still error limbs exist--- ")
#                 neuron_objs_split = pru.split_neuron(neuron_obj_pre_split,
#                                              verbose=False)
                
#                 if len(neuron_objs_split) > 1:
#                     raise Exception(f"After splitting the neuron there were more than 1: {neuron_objs_split}")

#                 neuron_obj= neuron_objs_split[0]
#             else:
#                 neuron_obj = neuron_obj_pre_split
            
            

            # Part A: Proofreading the Neuron
            if verbose:
                print(f"\n   --> Part A: Proofreading the Neuron ----")


        #     nviz.visualize_neuron(neuron_obj,
        #                       limb_branch_dict="all")
        
        

            output_dict= pru.proofread_neuron(neuron_obj,
                                plot_limb_branch_filter_with_disconnect_effect=False,
                                plot_final_filtered_neuron=False,
                                verbose=True)

            filtered_neuron = output_dict["filtered_neuron"]
            cell_type_info = output_dict["cell_type_info"]
            filtering_info = output_dict["filtering_info"]

            
            


            # Part B: Getting Soma Centers and Matching To Nuclei
            if verbose:
                print(f"\n\n    --> Part B: Getting Soma Centers and Matching To Nuclei ----")


            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(neuron_obj,
                                     "S0",
                                      nucleus_ids,
                                      nucleus_centers,
                                     nuclei_distance_threshold = 15000,
                                      return_matching_info = True,
                                     verbose=True)

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            





            # Part C: Getting the Faces of the Original Mesh
            if verbose:
                print(f"\n\n    --> Part C: Getting the Faces of the Original Mesh ----")

            original_mesh_faces = tu.original_mesh_faces_map(original_mesh,
                                                        filtered_neuron.mesh,
                                                        exact_match=True,
                                                        original_mesh_kdtree=original_mesh_kdtree)
            
            original_mesh_faces_file = du.save_proofread_faces(original_mesh_faces,
                                                              segment_id=segment_id,
                                                              split_index=split_index)

            

        #     nviz.plot_objects(recovered_mesh)






            # Part D: Getting the Synapse Information
            if verbose:
                print(f"\n\n    --> Part D: Getting the Synapse Information ----")


            (keys_to_write,
             synapse_stats,
             total_error_synapse_ids) = pru.synapse_filtering(filtered_neuron,
                            split_index,
                            nucleus_id=winning_nucleus_id,
                            segment_id=None,
                            return_synapse_filter_info = True,
                            return_synapse_center_data = False,
                            return_error_synapse_ids = True,
                            mapping_threshold = 500,
                              plot_synapses=False,
                            verbose = True,
                            original_mesh_method = True,
                            original_mesh = original_mesh,
                            original_mesh_kdtree = original_mesh_kdtree,
                            valid_faces_on_original_mesh=original_mesh_faces, 
                                                          
                            )


            



            soma_x,soma_y,soma_z = nru.soma_centers(filtered_neuron,
                                               soma_name="S0",
                                               voxel_adjustment=True)

        
        
        
            
            #7) Creating the dictionary to insert into the AutoProofreadNeuron
            new_key = dict(key,
                           split_index = split_index,
                           proof_version = proof_version,
                           
                           multiplicity = len(neuron_objs),
                           
                           # -------- Important Excitatory Inhibitory Classfication ------- #
                        cell_type_predicted = cell_type_info["inh_exc_class"],
                        spine_category=cell_type_info["spine_category"],

                        n_axons=cell_type_info["n_axons"],
                        n_apicals=cell_type_info["n_axons"],
                           
                           
                        
    
                        # ----- Soma Information ----#
                        nucleus_id         = nucleus_info["nuclei_id"],
                        nuclei_distance      = np.round(nucleus_info["nuclei_distance"],2),
                        n_nuclei_in_radius   = nucleus_info["n_nuclei_in_radius"],
                        n_nuclei_in_bbox     = nucleus_info["n_nuclei_in_bbox"],

                        soma_x           = soma_x,
                        soma_y           =soma_y,
                        soma_z           =soma_z,

                        # ---------- Mesh Faces ------ #
                        mesh_faces = original_mesh_faces_file,

                           
                        # ------------- The Regular Neuron Information (will be computed in the stats dict) ----------------- #
                        
                        
                        
                           # ------ Information Used For Excitatory Inhibitory Classification -------- 
                        axon_angle_maximum=cell_type_info["axon_angle_maximum"],
                        spine_density_classifier=cell_type_info["neuron_spine_density"],
                        n_branches_processed=cell_type_info["n_branches_processed"],
                        skeletal_length_processed=cell_type_info["skeletal_length_processed"],
                        n_branches_in_search_radius=cell_type_info["n_branches_in_search_radius"],
                        skeletal_length_in_search_radius=cell_type_info["skeletal_length_in_search_radius"],

                           
                        
                           
                           run_time=np.round(time.time() - whole_pass_time,4)
                          )
            
            
            
            
            
            
            
            stats_dict = filtered_neuron.neuron_stats()
            new_key.update(stats_dict)

            
            # ------ Writing the Data To the Tables ----- #
            SynapseProofread.insert(keys_to_write,skip_duplicates=True)
            
            self.insert1(new_key,skip_duplicates=True,allow_direct_insert=True)
            
            
            
            #saving following information for later processing:
            filtering_info_list.append(filtering_info)
            synapse_stats_list.append(synapse_stats)
            total_error_synapse_ids_list.append(total_error_synapse_ids)
            
            
        
        # Once have inserted all the new neurons need to compute the stats
        if verbose:
            print("Computing the overall stats")
            
        overall_syn_error_rates = pru.calculate_error_rate(total_error_synapse_ids_list,
                        synapse_stats_list,
                        verbose=True)
        
        
        # Final Part: Create the stats table entries and insert
        
        proofread_stats_entries = []
        
        stats_to_make_sure_in_proofread_stats = [
            
         'axon_on_dendrite_merges_error_area',
         'axon_on_dendrite_merges_error_length',
         'low_branch_clusters_error_area',
         'low_branch_clusters_error_length',
         'dendrite_on_axon_merges_error_area',
         'dendrite_on_axon_merges_error_length',
         'double_back_and_width_change_error_area',
         'double_back_and_width_change_error_length',
         'crossovers_error_area',
         'crossovers_error_length',
         'high_degree_coordinates_error_area',
         'high_degree_coordinates_error_length',
        ]
        
        
        for sp_idx,split_index in enumerate(neuron_split_idxs):
            synapse_stats = synapse_stats_list[sp_idx]
            filtering_info = filtering_info_list[sp_idx]
            
            curr_key = dict(key,
                           split_index = split_index,
                           proof_version = proof_version,
                           

                            # ------------ For local valid synapses to that split_index
                            n_valid_syn_presyn_for_split=synapse_stats["n_valid_syn_presyn"],
                            n_valid_syn_postsyn_for_split=synapse_stats["n_valid_syn_postsyn"],

                           
                           
                           )
            
            
            for s in stats_to_make_sure_in_proofread_stats:
                if s not in filtering_info.keys():
                    curr_key[s] = None
            
            filter_key = {k:np.round(v,2) for k,v in filtering_info.items() if "area" in k or "length" in k}
            curr_key.update(filter_key)
            curr_key.update(overall_syn_error_rates)
            
            proofread_stats_entries.append(curr_key)
            
        
        ProofreadStats.insert(proofread_stats_entries,skip_duplicates=True)

            

        print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")
    def make(self,key):
        """
        Purpose: To decimate a mesh by a perscribed
        decimation ratio and algorithm
        
        Pseudocode: 
        1) Get the current mesh
        2) Decimationt he current mesh with the corresponding decimate method
        3) Get n_verts,n_faces and save as h5py file
        
        """
        
        segment_id = key["segment_id"]
        decimation_method =  key["decimation_method"]
        mesh_fragment_method_set = key["mesh_fragment_method_set"]
        
        ver =key["ver"]
        
        if verbose:
            print(f"\n\n--Working on {segment_id}: (decimation_method = {decimation_method})")
        
        
        
        
        param_tables = MeshFragmentMethodSet.restrict_one_part_with_hash(mesh_fragment_method_set)
        soma_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Soma'").fetch1("mesh_fragment_method")).fetch1()
        glia_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Glia'").fetch1("mesh_fragment_method")).fetch1()
        nuclei_kwargs = MeshFragmentMethod.restrict_one_part_with_hash(
            (param_tables & "table_type = 'Nuclei'").fetch1("mesh_fragment_method")).fetch1()
        all_kwargs = gu.merge_dicts([glia_kwargs,nuclei_kwargs,soma_kwargs])
        
        mesh_fragment_method_soma = (param_tables & dict(table_type='Soma')).fetch1("mesh_fragment_method")
        mesh_fragment_method_nuclei = (param_tables & dict(table_type='Nuclei')).fetch1("mesh_fragment_method")
        mesh_fragment_method_glia = (param_tables & dict(table_type='Glia')).fetch1("mesh_fragment_method")
            
        
        st = time.time()
        #1) Get the current mesh
        new_mesh = (h01mor.Decimation.Object() & key).fetch1("mesh")
        
        if verbose:
            print(f"Fetching Decimation time: {time.time()-st}")

        
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
            verbose = True,
            **all_kwargs
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = new_mesh
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = hdju.save_glia_nuclei_files(
            glia_faces=glia_faces,
            nuclei_faces=nuclei_faces,
            segment_id=segment_id,
            filename_append = filename_append)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
            
            
        
        glia_key = dict(key,
                        mesh_fragment_method = mesh_fragment_method_glia,
                        n_faces = n_glia_faces,
                        faces=glia_path)
        
        nuclei_key = dict(key,
                        mesh_fragment_method = mesh_fragment_method_nuclei,
                        n_faces = n_nuclei_faces,
                        faces=nuclei_path)
                        
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        self.insert1(key,
            skip_duplicates=True,
            ignore_extra_fields=True,
           allow_direct_insert=True)
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_soma_0.h5',
                                                    filepath=str(hdju.external_path_mesh)
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               centroid_x_nm=None,
                               centroid_y_nm=None,
                               centroid_z_nm=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               surface_area = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=np.round(run_time,4),
                               mesh_fragment_method = mesh_fragment_method_soma,
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.Soma.insert1(insert_dict,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
            self.SomaObject.insert1(insert_dict,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
            
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center_nm = np.mean(current_soma.vertices,axis=0) 
            auto_prediction_center = auto_prediction_center_nm / hu.voxel_to_nm_scaling
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            if max_hole_length is not None:
                max_hole_length = np.round(max_hole_length,3)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_soma_{i+1}.h5',
                                                filepath=str(hdju.external_path_mesh)
                                             )


            divisor=1000000
            insert_dict = dict(key,
                              soma_index=i+1,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               centroid_x_nm=auto_prediction_center_nm[0],
                               centroid_y_nm=auto_prediction_center_nm[1],
                               centroid_z_nm=auto_prediction_center_nm[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = np.round(current_soma.convex_hull.volume/1000000000,3),
                               surface_area = np.round(current_soma.area/divisor,3),
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = max_hole_length,
                               run_time=np.round(run_time,4),
                               mesh_fragment_method = mesh_fragment_method_soma,
                              )



            dicts_to_insert.append(insert_dict)
            
            
        
        self.Glia.insert1(glia_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.GliaObject.insert1(glia_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        
        self.Nuclei.insert1(nuclei_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.NucleiObject.insert1(nuclei_key,
                          allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        
        self.Soma.insert(dicts_to_insert,
                         allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
        self.SomaObject.insert(dicts_to_insert,
                         allow_direct_insert = True,
                         ignore_extra_fields = True,
                         skip_duplicates=True)
    def make(self,key):
        """
        Pseudocode: 
        1) Compute all of the
        2) Save the mesh as an h5 py file
        3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces
        4) Insert
        
        
        """
        
        #get the mesh data
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        print(key)
        new_mesh = (minnie.Decimation() & key).fetch1("mesh")
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = trimesh.Trimesh(vertices=current_mesh_verts,
                                   faces=current_mesh_faces)
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = du.save_glia_nuclei_files(glia_faces=glia_faces,
                                 nuclei_faces=nuclei_faces,
                                 segment_id=segment_id)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
        glia_nuclei_key = dict(key,
                               ver=current_version,
                               n_glia_faces=n_glia_faces,
                               #glia_faces = glia_faces,
                               glia_faces = glia_path,
                               n_nuclei_faces = n_nuclei_faces,
                               #nuclei_faces = nuclei_faces
                               nuclei_faces = nuclei_path,
                              )
        
        NeuronGliaNuclei.insert1(glia_nuclei_key,replace=True)
        print(f"Finished saving off glia and nuclei information : {glia_nuclei_key}")
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_0.h5',
                                                    filepath=str(du.get_somas_path())
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                               ver=current_version,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=run_time
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.insert1(insert_dict,skip_duplicates=True)
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center = np.mean(current_soma.vertices,axis=0) / np.array([4,4,40])
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_{i}.h5',
                                                filepath=str(du.get_somas_path())
                                             )



            insert_dict = dict(key,
                              soma_index=i+1,
                               ver=current_version,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = current_soma.convex_hull.volume/1000000000,
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = np.round(max_hole_length,3),
                               run_time=np.round(run_time,4)
                              )



            dicts_to_insert.append(insert_dict)
        self.insert(dicts_to_insert,skip_duplicates=True)