def make(self, key): """ Pseudocode: 1) Pull Down all the Neuron Objects associated with a segment_id For each neuron: 2) Run the full axon preprocessing 3) Save off the neuron 4) Save dict entry to list 5) Write the new entry to the table """ # 1) Pull Down All of the Neurons segment_id = key["segment_id"] if verbose: print(f"------- Working on Neuron {segment_id} -----") whole_pass_time = time.time() #1) Pull Down all the Neuron Objects associated with a segment_id neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation( segment_id, ignore_DecompositionAxon=True, ignore_DecompositionCellType=True) if verbose: print(f"Number of Neurons found ={len(neuron_objs)}") #For each neuron: dict_to_write = [] # -------- getting the nuclei info to match # ver = 88 # nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id, # nuclei_version=ver) nucleus_ids, nucleus_centers = du.segment_to_nuclei(segment_id, nuclei_version=ver) print(f"nucleus_ids = {nucleus_ids}") print(f"nucleus_centers = {nucleus_centers}") for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs): if verbose: print(f"--> Working on Split Index {split_index} -----") st = time.time() # ------------- Does all of the processing ------------------- #1) ------ Getting the paired nuclei ------ winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei( neuron_obj, "S0", nucleus_ids, nucleus_centers, nuclei_distance_threshold=15000, return_matching_info=True, verbose=True) # else: # winning_nucleus_id = 12345 # nucleus_info = dict() # nucleus_info["nucleus_id"] = winning_nucleus_id # nucleus_info["nuclei_distance"] = 0 # nucleus_info["n_nuclei_in_radius"] = 1 # nucleus_info["n_nuclei_in_bbox"] = 1 if verbose: print(f"nucleus_info = {nucleus_info}") print(f"winning_nucleus_id = {winning_nucleus_id}") #2) ------- Finding the Allen Cell Types ------- allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id( winning_nucleus_id) if verbose: print(f"allen_cell_type_info = {allen_cell_type_info}") #4) -------- Running the cell classification and stats-------------- if verbose: print( f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n" ) filter_time = time.time() (inh_exc_class, spine_category, axon_angles, n_axons, n_apicals, neuron_spine_density, n_branches_processed, skeletal_length_processed, n_branches_in_search_radius, skeletal_length_in_search_radius ) = clu.inhibitory_excitatory_classifier( neuron_obj, return_spine_classification=True, return_axon_angles=True, return_n_axons=True, return_n_apicals=True, return_spine_statistics=True, axon_limb_branch_dict_precomputed=None, axon_angles_precomputed=None, verbose=verbose) if verbose: print( f"Total time for classification = {time.time() - filter_time}" ) all_axon_angles = [] for limb_idx, limb_data in axon_angles.items(): for candidate_idx, cand_angle in limb_data.items(): all_axon_angles.append(cand_angle) if len(axon_angles) > 0: axon_angle_maximum = np.max(all_axon_angles) else: axon_angle_maximum = 0 if verbose: print("\n -- Cell Type Classification Results --") print(f"inh_exc_class={inh_exc_class}") print(f"spine_category={spine_category}") print(f"axon_angles={axon_angles}") print(f"n_axons={n_axons}") print(f"n_apicals={n_apicals}") print(f"neuron_spine_density={neuron_spine_density}") print(f"n_branches_processed={n_branches_processed}") print(f"skeletal_length_processed={skeletal_length_processed}") print( f"n_branches_in_search_radius={n_branches_in_search_radius}" ) print( f"skeletal_length_in_search_radius={skeletal_length_in_search_radius}" ) baylor_cell_type_info = dict( cell_type_predicted=inh_exc_class, spine_category=spine_category, axon_angle_maximum=axon_angle_maximum, n_axons=n_axons, n_apicals=n_apicals, spine_density_classifier=neuron_spine_density, n_branches_processed=neuron_spine_density, skeletal_length_processed=skeletal_length_processed, n_branches_in_search_radius=n_branches_in_search_radius, skeletal_length_in_search_radius= skeletal_length_in_search_radius, ) #5) ----- Deciding on cell type to use for axon e_i_class = inh_exc_class if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[ "e_i"] is not None: e_i_class = allen_cell_type_info["e_i"] if verbose: print( f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}" ) #6) -------- If excitatory running the axon processing-------------- """ Psuedocode: If e_i class is excitatory: 1) Filter away the axon on dendrite 2) Do the higher fidelity axon processing 3) Compute the axon features """ if e_i_class == "excitatory" and neuron_obj.axon_limb_name is not None: if verbose: print( f"Excitatory so performing high fidelity axon and computing axon features" ) # 1) Filter away the axon on dendrite # 2) Do the higher fidelity axon processing o_neuron, filtering_info = au.complete_axon_processing( neuron_obj, perform_axon_classification=False, return_filtering_info=True) filtering_info = { k: np.round(v, 2) for k, v in filtering_info.items() if "area" in k or "length" in k } #3) Compute the axon features axon_features = au.axon_features_from_neuron_obj(o_neuron) else: nru.clear_all_branch_labels(neuron_obj, labels_to_clear="axon") o_neuron = neuron_obj axon_features = dict() filtering_info = dict() #3) ------ Adding the Synapses ----------- o_neuron = syu.add_synapses_to_neuron_obj( o_neuron, validation=validation, verbose=True, original_mesh=None, plot_valid_error_synapses=False, calculate_synapse_soma_distance=False, add_valid_synapses=True, add_error_synapses=False) # ------- Saving off the neuron object ---------------- save_time = time.time() ret_file_path = o_neuron.save_compressed_neuron( output_folder=str(du.get_decomposition_path()), #output_folder = "./", file_name= f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}", return_file_path=True, export_mesh=False, suppress_output=True) ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2" if verbose: print(f"ret_file_path_str = {ret_file_path_str}") print(f"Save time = {time.time() - save_time}") n_dict = dict( key, split_index=split_index, axon_version=au.axon_version, decomposition=ret_file_path_str, run_time=np.round(time.time() - st, 2), cell_type_for_axon=e_i_class, ) dicts_for_update = [ baylor_cell_type_info, allen_cell_type_info, nucleus_info, filtering_info, axon_features ] for d in dicts_for_update: n_dict.update(d) self.insert1(n_dict, skip_duplicates=True, allow_direct_insert=True) #dict_to_write.append(n_dict) #write the #self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True) print( f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***" )
def make(self,key): """ Pseudocode: 1) Pull Down All of the Neurons 2) Get the nucleus centers and the original mesh """ # 1) Pull Down All of the Neurons segment_id = key["segment_id"] print(f"\n\n------- AutoProofreadNeuron {segment_id} ----------") neuron_objs,neuron_split_idxs = du.decomposition_with_spine_recalculation(segment_id) if verbose: print(f"Number of Neurons found ={len(neuron_objs)}") # 2) ----- Pre-work ------ nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id) if verbose: print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}") print(f"nucleus_ids = {nucleus_ids}") print(f"nucleus_centers = {nucleus_centers}") original_mesh = du.fetch_segment_id_mesh(segment_id) original_mesh_kdtree = KDTree(original_mesh.triangles_center) # 3) ----- Iterate through all of the Neurons and Proofread -------- # lists to help save stats until write to ProofreadStats Table filtering_info_list = [] synapse_stats_list = [] total_error_synapse_ids_list = [] for split_index,neuron_obj_pre_split in zip(neuron_split_idxs,neuron_objs): whole_pass_time = time.time() if verbose: print(f"\n-----Working on Neuron Split {split_index}-----") neuron_obj = neuron_obj_pre_split # if neuron_obj_pre_split.n_error_limbs > 0: # if verbose: # print(f" ---> Pre-work: Splitting Neuron Limbs Because still error limbs exist--- ") # neuron_objs_split = pru.split_neuron(neuron_obj_pre_split, # verbose=False) # if len(neuron_objs_split) > 1: # raise Exception(f"After splitting the neuron there were more than 1: {neuron_objs_split}") # neuron_obj= neuron_objs_split[0] # else: # neuron_obj = neuron_obj_pre_split # Part A: Proofreading the Neuron if verbose: print(f"\n --> Part A: Proofreading the Neuron ----") # nviz.visualize_neuron(neuron_obj, # limb_branch_dict="all") output_dict= pru.proofread_neuron(neuron_obj, plot_limb_branch_filter_with_disconnect_effect=False, plot_final_filtered_neuron=False, verbose=True) filtered_neuron = output_dict["filtered_neuron"] cell_type_info = output_dict["cell_type_info"] filtering_info = output_dict["filtering_info"] # Part B: Getting Soma Centers and Matching To Nuclei if verbose: print(f"\n\n --> Part B: Getting Soma Centers and Matching To Nuclei ----") winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(neuron_obj, "S0", nucleus_ids, nucleus_centers, nuclei_distance_threshold = 15000, return_matching_info = True, verbose=True) if verbose: print(f"nucleus_info = {nucleus_info}") print(f"winning_nucleus_id = {winning_nucleus_id}") # Part C: Getting the Faces of the Original Mesh if verbose: print(f"\n\n --> Part C: Getting the Faces of the Original Mesh ----") original_mesh_faces = tu.original_mesh_faces_map(original_mesh, filtered_neuron.mesh, exact_match=True, original_mesh_kdtree=original_mesh_kdtree) original_mesh_faces_file = du.save_proofread_faces(original_mesh_faces, segment_id=segment_id, split_index=split_index) # nviz.plot_objects(recovered_mesh) # Part D: Getting the Synapse Information if verbose: print(f"\n\n --> Part D: Getting the Synapse Information ----") (keys_to_write, synapse_stats, total_error_synapse_ids) = pru.synapse_filtering(filtered_neuron, split_index, nucleus_id=winning_nucleus_id, segment_id=None, return_synapse_filter_info = True, return_synapse_center_data = False, return_error_synapse_ids = True, mapping_threshold = 500, plot_synapses=False, verbose = True, original_mesh_method = True, original_mesh = original_mesh, original_mesh_kdtree = original_mesh_kdtree, valid_faces_on_original_mesh=original_mesh_faces, ) soma_x,soma_y,soma_z = nru.soma_centers(filtered_neuron, soma_name="S0", voxel_adjustment=True) #7) Creating the dictionary to insert into the AutoProofreadNeuron new_key = dict(key, split_index = split_index, proof_version = proof_version, multiplicity = len(neuron_objs), # -------- Important Excitatory Inhibitory Classfication ------- # cell_type_predicted = cell_type_info["inh_exc_class"], spine_category=cell_type_info["spine_category"], n_axons=cell_type_info["n_axons"], n_apicals=cell_type_info["n_axons"], # ----- Soma Information ----# nucleus_id = nucleus_info["nuclei_id"], nuclei_distance = np.round(nucleus_info["nuclei_distance"],2), n_nuclei_in_radius = nucleus_info["n_nuclei_in_radius"], n_nuclei_in_bbox = nucleus_info["n_nuclei_in_bbox"], soma_x = soma_x, soma_y =soma_y, soma_z =soma_z, # ---------- Mesh Faces ------ # mesh_faces = original_mesh_faces_file, # ------------- The Regular Neuron Information (will be computed in the stats dict) ----------------- # # ------ Information Used For Excitatory Inhibitory Classification -------- axon_angle_maximum=cell_type_info["axon_angle_maximum"], spine_density_classifier=cell_type_info["neuron_spine_density"], n_branches_processed=cell_type_info["n_branches_processed"], skeletal_length_processed=cell_type_info["skeletal_length_processed"], n_branches_in_search_radius=cell_type_info["n_branches_in_search_radius"], skeletal_length_in_search_radius=cell_type_info["skeletal_length_in_search_radius"], run_time=np.round(time.time() - whole_pass_time,4) ) stats_dict = filtered_neuron.neuron_stats() new_key.update(stats_dict) # ------ Writing the Data To the Tables ----- # SynapseProofread.insert(keys_to_write,skip_duplicates=True) self.insert1(new_key,skip_duplicates=True,allow_direct_insert=True) #saving following information for later processing: filtering_info_list.append(filtering_info) synapse_stats_list.append(synapse_stats) total_error_synapse_ids_list.append(total_error_synapse_ids) # Once have inserted all the new neurons need to compute the stats if verbose: print("Computing the overall stats") overall_syn_error_rates = pru.calculate_error_rate(total_error_synapse_ids_list, synapse_stats_list, verbose=True) # Final Part: Create the stats table entries and insert proofread_stats_entries = [] stats_to_make_sure_in_proofread_stats = [ 'axon_on_dendrite_merges_error_area', 'axon_on_dendrite_merges_error_length', 'low_branch_clusters_error_area', 'low_branch_clusters_error_length', 'dendrite_on_axon_merges_error_area', 'dendrite_on_axon_merges_error_length', 'double_back_and_width_change_error_area', 'double_back_and_width_change_error_length', 'crossovers_error_area', 'crossovers_error_length', 'high_degree_coordinates_error_area', 'high_degree_coordinates_error_length', ] for sp_idx,split_index in enumerate(neuron_split_idxs): synapse_stats = synapse_stats_list[sp_idx] filtering_info = filtering_info_list[sp_idx] curr_key = dict(key, split_index = split_index, proof_version = proof_version, # ------------ For local valid synapses to that split_index n_valid_syn_presyn_for_split=synapse_stats["n_valid_syn_presyn"], n_valid_syn_postsyn_for_split=synapse_stats["n_valid_syn_postsyn"], ) for s in stats_to_make_sure_in_proofread_stats: if s not in filtering_info.keys(): curr_key[s] = None filter_key = {k:np.round(v,2) for k,v in filtering_info.items() if "area" in k or "length" in k} curr_key.update(filter_key) curr_key.update(overall_syn_error_rates) proofread_stats_entries.append(curr_key) ProofreadStats.insert(proofread_stats_entries,skip_duplicates=True) print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")
def make(self, key): """ Pseudocode: 1) Pull Down all the Neuron Objects associated with a segment_id For each neuron: 2) Run the full axon preprocessing 3) Save off the neuron 4) Save dict entry to list 5) Write the new entry to the table """ # 1) Pull Down All of the Neurons segment_id = key["segment_id"] if len(key_source_inh & dict(segment_id=segment_id)) > 0: manual_e_i = "inhibitory" elif len(key_source_exc & dict(segment_id=segment_id)) > 0: manual_e_i = "excitatory" else: raise Exception("Not in exc or inh table") if verbose: print(f"------- Working on Neuron {segment_id} -----") whole_pass_time = time.time() #1) Pull Down all the Neuron Objects associated with a segment_id neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation( segment_id, ignore_DecompositionAxon=True, ignore_DecompositionCellType=True) if verbose: print(f"Number of Neurons found ={len(neuron_objs)}") #For each neuron: dict_to_write = [] ''' ------ Old way of getting the nucleus info for the manual proofread data ------- # -------- getting the nuclei info to match try: segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1() except: segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1() nucleus_id = segment_map_dict["nucleus_id"] nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id) nucleus_ids = [nucleus_id] nucleus_centers = [nuc_center_coords] print(f"nucleus_ids = {nucleus_ids}") print(f"nucleus_centers = {nucleus_centers}")''' nucleus_ids, nucleus_centers = du.segment_to_nuclei( segment_id, #nuclei_version=ver ) if verbose: print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}") print(f"nucleus_ids = {nucleus_ids}") print(f"nucleus_centers = {nucleus_centers}") for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs): if verbose: print(f"--> Working on Split Index {split_index} -----") st = time.time() # ------------- Does all of the processing ------------------- #1) ------ Getting the paired nuclei ------ winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei( neuron_obj, "S0", nucleus_ids, nucleus_centers, nuclei_distance_threshold=15000, return_matching_info=True, verbose=True) # else: # winning_nucleus_id = 12345 # nucleus_info = dict() # nucleus_info["nucleus_id"] = winning_nucleus_id # nucleus_info["nuclei_distance"] = 0 # nucleus_info["n_nuclei_in_radius"] = 1 # nucleus_info["n_nuclei_in_bbox"] = 1 if verbose: print(f"nucleus_info = {nucleus_info}") print(f"winning_nucleus_id = {winning_nucleus_id}") #2) ------- Finding the Allen Cell Types ------- allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id( winning_nucleus_id) if verbose: print(f"allen_cell_type_info = {allen_cell_type_info}") # 3) ---- Doing Baylor Cell Type Classification --------- # 3a) --- Adding the synapses and spine labels if verbose: print( f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n" ) st = time.time() if verbose: print(f"Adding the synapses and the head_neck_shaft") neuron_obj = syu.add_synapses_to_neuron_obj( neuron_obj, validation=validation, verbose=verbose, original_mesh=None, plot_valid_error_synapses=False, calculate_synapse_soma_distance=False, add_valid_synapses=True, add_error_synapses=False, ) neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj, verbose=verbose) if verbose: print( f"Done adding synapses and head_neck_shaft: {time.time() - st}" ) # 3b) --- Running the stats for Baylor Classification filter_time = time.time() ''' limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density( neuron_obj = neuron_obj, verbose = False,) (syn_density_post, syn_density_head, syn_density_neck, syn_density_shaft, skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj, limb_branch_dict = limb_branch_dict, verbose = True) (spine_density, skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj, verbose = True, multiplier = 1000) if verbose: print(f"Total time for density calculations = {time.time() - filter_time}") # 4) ------ Predicting the E/I Group Based on the data collected -------- baylor_cell_type_info = dict( syn_density_post = syn_density_post, syn_density_head = syn_density_head, syn_density_neck = syn_density_neck, syn_density_shaft = syn_density_shaft, skeletal_length_processed_syn=skeletal_length_processed_syn, spine_density=spine_density, skeletal_length_processed_spine = skeletal_length_processed_spine ) baylor_e_i = ctu.e_i_classification_single(data=[syn_density_shaft,spine_density], features=["syn_density_shaft","spine_density"], verbose = True, return_label_name = True ) ''' baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj( neuron_obj, verbose=True, return_cell_type_info=True, return_dendrite_branch_stats=True) baylor_cell_type_info["baylor_e_i"] = baylor_e_i #5) ----- Deciding on cell type to use for axon if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[ "allen_e_i"] is not None: e_i_class = allen_cell_type_info["allen_e_i"] cell_type_used = "allen" else: e_i_class = baylor_e_i cell_type_used = "baylor" if verbose: print( f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}" ) #3) ------ Axon Classification (and getting the axon features)------------------ o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing( neuron_obj, cell_type=e_i_class, add_synapses_and_head_neck_shaft_spines=False, validation=validation, plot_initial_axon=False, plot_axon_on_dendrite=False, return_filtering_info=True, return_axon_angle_info=True, verbose=verbose) filtering_info = { k: np.round(v, 2) for k, v in filtering_info.items() if "area" in k or "length" in k } axon_features = au.axon_features_from_neuron_obj(o_neuron) # ------- Saving off the neuron object ---------------- save_time = time.time() ret_file_path = o_neuron.save_compressed_neuron( output_folder=str(du.get_decomposition_path()), #output_folder = "./", file_name= f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}_pipe_v6_e_i_val_3", return_file_path=True, export_mesh=False, suppress_output=True) ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2" if verbose: print(f"ret_file_path_str = {ret_file_path_str}") print(f"Save time = {time.time() - save_time}") # ---------------- # ---- 8/10 Addition ---------- if save_axon_skeleton: axon_skeleton_file = du.save_proofread_skeleton( o_neuron.axon_skeleton, segment_id=o_neuron.segment_id, split_index=split_index, file_name_ending=f"decomp_cell_type_axon_skeleton_e_i_val_3" ) else: axon_skeleton_file = None #---- 8/29 Addition: Will compute the soma center of the mesh in nm --- soma_x_nm, soma_y_nm, soma_z_nm = neuron_obj["S0"].mesh_center if verbose: print( f"soma_x_nm, soma_y_nm, soma_z_nm = {soma_x_nm, soma_y_nm, soma_z_nm}" ) n_dict = dict( key, split_index=split_index, axon_version=au.axon_version, decomposition=ret_file_path_str, run_time=np.round(time.time() - st, 2), manual_e_i=manual_e_i, cell_type=e_i_class, cell_type_used=cell_type_used, axon_skeleton=str(axon_skeleton_file), soma_x_nm=soma_x_nm, soma_y_nm=soma_y_nm, soma_z_nm=soma_z_nm, n_syn_pre=neuron_obj.n_synapses_pre, n_syn_post=neuron_obj.n_synapses_post, ) soma_stats_dict = ctu.soma_stats_for_cell_type(neuron_obj) dicts_for_update = [ baylor_cell_type_info, allen_cell_type_info, nucleus_info, filtering_info, axon_features, axon_angles_dict, soma_stats_dict ] for d in dicts_for_update: n_dict.update(d) self.insert1(n_dict, skip_duplicates=True, allow_direct_insert=True) print( f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***" )