def make(self, key):
        """
        Pseudocode: 
        1) Compute all of the
        2) Save the mesh as an h5 py file
        3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces
        4) Insert
        
        
        """

        #get the mesh data
        print(f"\n\n\n---- Working on {key['segment_id']} ----")
        print(key)
        new_mesh = (minnie.Decimation() & key).fetch1("mesh")
        current_mesh_verts, current_mesh_faces = new_mesh.vertices, new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, run_time, total_soma_list_sdf) = extract_soma_center(
            segment_id,
            current_mesh_verts,
            current_mesh_faces,
        )

        print(
            f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
            f"\n    with sdf values = {total_soma_list_sdf}")

        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print(
                "There were no somas found for this mesh so just writing empty data"
            )

            returned_file_path = tu.write_h5_file(
                vertices=np.array([]),
                faces=np.array([]),
                segment_id=segment_id,
                filename=f'{segment_id}_0.h5',
                filepath=str(du.get_somas_path()))

            insert_dict = dict(
                key,
                soma_index=0,
                centroid_x=None,
                centroid_y=None,
                centroid_z=None,
                #distance_from_prediction=None,
                #prediction_matching_index = None,
                n_vertices=0,
                n_faces=0,
                mesh=returned_file_path,
                multiplicity=0,
                sdf=None,
                max_side_ratio=None,
                bbox_volume_ratio=None,
                max_hole_length=None,
                run_time=run_time)

            #raise Exception("to prevent writing because none were found")
            self.insert1(insert_dict, skip_duplicates=True)
            return

        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio = [
            np.max(side_length_ratios(m)) for m in total_soma_list
        ]
        bbox_volume_ratio = [soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []

        for i, (current_soma, soma_sdf, sz_ratio, vol_ratio) in enumerate(
                zip(total_soma_list, total_soma_list_sdf, max_side_ratio,
                    bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center = np.mean(current_soma.vertices,
                                             axis=0) / np.array([4, 4, 40])
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)

            returned_file_path = tu.write_h5_file(
                vertices=current_soma.vertices,
                faces=current_soma.faces,
                segment_id=segment_id,
                filename=f'{segment_id}_{i}.h5',
                filepath=str(du.get_somas_path()))

            insert_dict = dict(key,
                               soma_index=i + 1,
                               centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               n_vertices=len(current_soma.vertices),
                               n_faces=len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf=np.round(soma_sdf, 3),
                               max_side_ratio=np.round(sz_ratio, 3),
                               bbox_volume_ratio=np.round(vol_ratio, 3),
                               max_hole_length=np.round(max_hole_length, 3),
                               run_time=np.round(run_time, 4))

            dicts_to_insert.append(insert_dict)
        self.insert(dicts_to_insert, skip_duplicates=True)
    def make(self,key):
        """
        Pseudocode: 
        1) Compute all of the
        2) Save the mesh as an h5 py file
        3) Store the saved path as the decomposition part of the dictionary and erase the vertices and faces
        4) Insert
        
        
        """
        
        #get the mesh data
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        print(key)
        new_mesh = (minnie.Decimation() & key).fetch1("mesh")
        current_mesh_verts,current_mesh_faces = new_mesh.vertices,new_mesh.faces

        segment_id = key["segment_id"]

        (total_soma_list, 
         run_time, 
         total_soma_list_sdf,
         glia_pieces,
         nuclei_pieces) = sm.extract_soma_center(
                            segment_id,
                            current_mesh_verts,
                            current_mesh_faces,
            return_glia_nuclei_pieces=True,
        )
        
        # -------- 1/9 Addition: Going to save off the glia and nuclei pieces ----------- #
        """
        Psuedocode:
        For both glia and nuclie pieces
        1) If the length of array is greater than 0 --> combine the mesh and map the indices to original mesh
        2) If not then just put None     
        """
        orig_mesh = trimesh.Trimesh(vertices=current_mesh_verts,
                                   faces=current_mesh_faces)
        
        if len(glia_pieces)>0:
            glia_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(glia_pieces))
            n_glia_faces = len(glia_faces)
        else:
            glia_faces = None
            n_glia_faces = 0
            
        if len(nuclei_pieces)>0:
            nuclei_faces = tu.original_mesh_faces_map(orig_mesh,tu.combine_meshes(nuclei_pieces))
            n_nuclei_faces = len(nuclei_faces)
        else:
            nuclei_faces = None
            n_nuclei_faces = 0
            
        # --------- saving the nuclei and glia saves
        glia_path,nuclei_path = du.save_glia_nuclei_files(glia_faces=glia_faces,
                                 nuclei_faces=nuclei_faces,
                                 segment_id=segment_id)
        
        print(f" glia_path = {glia_path} \n nuclei_path = {nuclei_path}")
            
        glia_nuclei_key = dict(key,
                               ver=current_version,
                               n_glia_faces=n_glia_faces,
                               #glia_faces = glia_faces,
                               glia_faces = glia_path,
                               n_nuclei_faces = n_nuclei_faces,
                               #nuclei_faces = nuclei_faces
                               nuclei_faces = nuclei_path,
                              )
        
        NeuronGliaNuclei.insert1(glia_nuclei_key,replace=True)
        print(f"Finished saving off glia and nuclei information : {glia_nuclei_key}")
        
        # ---------------- End of 1/9 Addition --------------------------------- #
        
        
        
        print(f"Run time was {run_time} \n    total_soma_list = {total_soma_list}"
             f"\n    with sdf values = {total_soma_list_sdf}")
        
        #check if soma list is empty and did not find soma
        if len(total_soma_list) <= 0:
            print("There were no somas found for this mesh so just writing empty data")
            

            returned_file_path = tu.write_h5_file(
                                                vertices=np.array([]),
                                                  faces=np.array([]),
                                                  segment_id=segment_id,
                                                  filename = f'{segment_id}_0.h5',
                                                    filepath=str(du.get_somas_path())
                                                 )

            
            
            insert_dict = dict(key,
                              soma_index=0,
                               ver=current_version,
                              centroid_x=None,
                               centroid_y=None,
                               centroid_z=None,
                               #distance_from_prediction=None,
                               #prediction_matching_index = None,
                               n_vertices=0,
                               n_faces=0,
                               mesh=returned_file_path,
                               multiplicity=0,
                               sdf = None,
                               volume = None,
                               max_side_ratio = None,
                               bbox_volume_ratio = None,
                               max_hole_length=None,
                               run_time=run_time
                              )
            
            #raise Exception("to prevent writing because none were found")
            self.insert1(insert_dict,skip_duplicates=True)
            return
        
        #if there is one or more soma found, get the volume and side length checks
        max_side_ratio =  [np.max(sm.side_length_ratios(m)) for m in total_soma_list]
        bbox_volume_ratio =  [sm.soma_volume_ratio(m) for m in total_soma_list]
        dicts_to_insert = []


        for i,(current_soma,soma_sdf,sz_ratio,vol_ratio) in enumerate(zip(total_soma_list,total_soma_list_sdf,max_side_ratio,bbox_volume_ratio)):
            print("Trying to write off file")
            """ Currently don't need to export the meshes
            current_soma.export(f"{key['segment_id']}/{key['segment_id']}_soma_{i}.off")
            """
            auto_prediction_center = np.mean(current_soma.vertices,axis=0) / np.array([4,4,40])
            auto_prediction_center = auto_prediction_center.astype("int")
            print(f"Predicted Coordinates are {auto_prediction_center}")
            max_hole_length = tu.largest_hole_length(current_soma)
            
            returned_file_path = tu.write_h5_file(
                                            vertices=current_soma.vertices,
                                              faces=current_soma.faces,
                                              segment_id=segment_id,
                                              filename = f'{segment_id}_{i}.h5',
                                                filepath=str(du.get_somas_path())
                                             )



            insert_dict = dict(key,
                              soma_index=i+1,
                               ver=current_version,
                              centroid_x=auto_prediction_center[0],
                               centroid_y=auto_prediction_center[1],
                               centroid_z=auto_prediction_center[2],
                               n_vertices = len(current_soma.vertices),
                               n_faces = len(current_soma.faces),
                               mesh=returned_file_path,
                               multiplicity=len(total_soma_list),
                               sdf = np.round(soma_sdf,3),
                               volume = current_soma.convex_hull.volume/1000000000,
                               max_side_ratio = np.round(sz_ratio,3),
                               bbox_volume_ratio = np.round(vol_ratio,3),
                               max_hole_length = np.round(max_hole_length,3),
                               run_time=np.round(run_time,4)
                              )



            dicts_to_insert.append(insert_dict)
        self.insert(dicts_to_insert,skip_duplicates=True)