def make(self, key): """ Purpose: To extract the axon/dendrite of a split neuron 1) Pull down the neuron 2) Get the neuron ids and nucleus centers corresponding to that segent id Iterate through all the neuron objects a0) Recompute the width a) Get the winning nucleus_id b) Get the cell type info from the central database c) Add synapses to neuron obj d) Add spine categories to neuorn object e) classifiy E/I cell type according to Baylor rules f) Pick the cell type to use g) Perfrom complete aon processing h) Get aon Features i) Save neurong object j) Save Axon/Dendrite before proofreading k) Write to dj table """ global_time = time.time() segment_id = key["segment_id"] decomposition_cell_type_hash = key["decomposition_cell_type_method"] decomposition_split_method = hdju.decomposition_split_method_hash_from_segment_id( segment_id, verbose=True) if verbose: print( f"\n\n--Working on {segment_id}: (decomposition_cell_type_hash = " f"{decomposition_cell_type_hash}, decomposition_split_method = {decomposition_split_method})" ) #0) Visualizing the neuron if plotting: print(f"Visualizing the intial neuron") hdju.plot_mesh_with_somas( segment_id=segment_id, #split_index=0, with_skeleton=True, align_from_soma_center=True) # ---1) Pulling down the neuron--- st = time.time() n_objs, sp_indexes = hdju.neuron_objs_from_decomposition_stage( segment_id, verbose=True, return_one=False) if verbose: print(f"---1) Pulling down the neuron---: {time.time() - st}") st = time.time() # ---2) Get the nucleus ids and nucleus centers for that segment id--- nucleus_ids, nucleus_centers = hdju.nuclei_from_segment_id( segment_id, return_centers=True, return_nm=True) if verbose: print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}") print(f"nucleus_ids = {nucleus_ids}") print(f"nucleus_centers = {nucleus_centers}") curr_idx = 0 neuron_obj_pre_filt = n_objs[curr_idx] split_index = sp_indexes[curr_idx] if plot_initial_neuron: neuron_obj_rot = hu.align_neuron_obj(neuron_obj_pre_filt) nviz.visualize_neuron(neuron_obj_rot, limb_branch_dict="all") if verbose: print(f"--> Working on Split Index {split_index} -----") if verbose: print( f"---2) Get the nucleus ids and nucleus centers--- {time.time() - st}" ) st = time.time() # -- a0) Prep work: Recompute the Widths -- if filter_low_branch_cluster_dendrite: neuron_obj, filtering_info_low_branch = pru.apply_proofreading_filters_to_neuron( input_neuron=neuron_obj_pre_filt, filter_list=[pru.low_branch_length_clusters_dendrite_filter], plot_limb_branch_filter_with_disconnect_effect=False, plot_limb_branch_filter_away= plot_limb_branch_filter_away_low_branch, plot_final_neuron=False, return_error_info=True, verbose=False, verbose_outline=verbose) else: neuron_obj = neuron_obj_pre_filt filtering_info_low_branch = {} neuron_obj = wu.neuron_width_calculation_standard(neuron_obj, verbose=True) if verbose: print(f"a0) Prep work: Recompute the Widths: {time.time() - st}") st = time.time() # --- a) Get the winning nucleus_id and nucleus info winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei( neuron_obj, "S0", nucleus_ids, nucleus_centers, nuclei_distance_threshold=15000, return_matching_info=True, verbose=True) if verbose: print(f"nucleus_info = {nucleus_info}") print(f"winning_nucleus_id = {winning_nucleus_id}") if winning_nucleus_id is None: if verbose: print( f"No winning nuclues found so assigning the only nucleus id" ) winning_nucleus_id = nucleus_ids[0] if verbose: print( f"--- a) Get the winning nucleus_id and nucleus info: {time.time() - st}" ) st = time.time() # ---b) Get the cell type info from database database_cell_type_info = hdju.nuclei_classification_info_from_nucleus_id( winning_nucleus_id) database_e_i_class = database_cell_type_info[ f"{data_type}_e_i_cell_type"] if verbose: print(f"database_cell_type_info = {database_cell_type_info}") print(f"database_e_i_class = {database_e_i_class}") if verbose: print( f"---b) Get the cell type info from database: {time.time() - st}" ) st = time.time() # ---c/d) Add synapses and spine categories import synapse_utils as syu neuron_obj = syu.add_synapses_to_neuron_obj( neuron_obj, validation=False, verbose=verbose, original_mesh=None, plot_valid_error_synapses=False, calculate_synapse_soma_distance=False, add_valid_synapses=True, add_error_synapses=False, ) neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj, verbose=verbose) if plot_synapses: syu.plot_synapses(neuron_obj) if plot_spines: spu.plot_spines_head_neck(neuron_obj) if verbose: print( f"---c/d) Add synapses and spine categories: {time.time() - st}" ) st = time.time() #---e) classifiy E/I cell type according to Baylor rules baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj( neuron_obj, plot_on_model_map=False, plot_spines_and_sk_filter_for_syn=plot_spines_and_sk_filter_for_syn, plot_spines_and_sk_filter_for_spine= plot_spines_and_sk_filter_for_spine, verbose=True, return_cell_type_info=True) baylor_cell_type_info["baylor_e_i"] = baylor_e_i if verbose: print(f"baylor_cell_type_info = \n{baylor_cell_type_info}") if verbose: print( f"---e) classifiy E/I cell type according to Baylor rules: {time.time() - st}" ) st = time.time() #--- f) Pick the cell type to use if (inh_exc_class_to_use_for_axon == "h01" and database_e_i_class in ["excitatory", "inhibitory"]): e_i_class = database_e_i_class if verbose: print(f"Using h01 e/i cell type") cell_type_used = "h01" else: if verbose: print(f"Using baylor e/i cell type") e_i_class = baylor_e_i cell_type_used = "baylor" if verbose: print( f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}" ) if verbose: print(f"---f) Pick the cell type to use: {time.time() - st}") st = time.time() #---# g) Perfrom complete aon processing if plot_aligned_neuron: print(f"plot_aligned_neuron") neuron_obj_rot = hu.align_neuron_obj(neuron_obj) nviz.visualize_neuron(neuron_obj_rot, limb_branch_dict="all") o_neuron_unalign, filtering_info, axon_angles_dict = au.complete_axon_processing( neuron_obj, cell_type=e_i_class, add_synapses_and_head_neck_shaft_spines=False, validation=False, plot_initial_axon=plot_initial_axon, plot_axon_on_dendrite=plot_axon_on_dendrite, return_filtering_info=True, return_axon_angle_info=True, plot_high_fidelity_axon=plot_high_fidelity_axon, plot_boutons_web=plot_boutons_web, add_synapses_after_high_fidelity_axon=True, verbose=verbose) #o_neuron_unalign = hu.unalign_neuron_obj(o_neuron) # if verbose: # print(f"Readding Synapses to the high fidelity axon after all processing donw") # o_neuron_unalign = syu.add_synapses_to_neuron_obj(o_neuron_unalign, # validation = False, # verbose = verbose, # original_mesh = None, # plot_valid_error_synapses = False, # calculate_synapse_soma_distance = False, # add_valid_synapses = True, # add_error_synapses=False, # limb_branch_dict_to_add_synapses=o_neuron_unalign.axon_limb_branch_dict) if verbose: print( f"After add_synapses_after_high_fidelity_axon: # of neuron_obj.synapses_somas = {len(o_neuron_unalign.synapses_somas)}" ) if plot_unaligned_synapses: syu.plot_synapses(o_neuron_unalign, total_synapses=True) if plot_unaligned_axon: nviz.plot_axon(o_neuron_unalign) if verbose: print(f"---g) Perfrom complete aon processing: {time.time() - st}") st = time.time() # --- h) Get the axon and dendrite stats ---- dendrite_stats = nst.skeleton_stats_dendrite(o_neuron_unalign, include_centroids=False) axon_stats = nst.skeleton_stats_axon(o_neuron_unalign, include_centroids=False) stats_dict = o_neuron_unalign.neuron_stats( stats_to_ignore=[ "n_not_processed_soma_containing_meshes", "n_error_limbs", "n_same_soma_multi_touching_limbs", "n_multi_soma_touching_limbs", "n_somas", "spine_density" ], include_skeletal_stats=False, include_centroids=True, voxel_adjustment_vector=voxel_adjustment_vector, ) if verbose: print( f"--- h) Get the axon and dendrite stats: {time.time() - st}") st = time.time() #---- i) Calculating the synapse info ------ syn_dict = syu.n_synapses_analysis_axon_dendrite(o_neuron_unalign, verbose=True) # --- j) saving neuron and skeleton ---- #4) Save the neuron object in a certain location file_name = f"{o_neuron_unalign.segment_id}_{split_index}_{decomposition_cell_type_hash}" file_name_decomp = f"{file_name}_{dataset}_cell_type_decomp" output_folder = str(target_dir_decomp) ret_file_path = o_neuron_unalign.save_compressed_neuron( output_folder=output_folder, file_name=file_name_decomp, return_file_path=True, export_mesh=False, suppress_output=True, ) ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2" axon_skeleton = o_neuron_unalign.axon_skeleton file_name_decomp_sk_axon = f"{file_name_decomp}_axon_sk" ret_sk_filepath_ax = su.compressed_pickle( axon_skeleton, filename=file_name_decomp_sk_axon, folder=str(target_dir_sk), return_filepath=True) dendrite_skeleton = o_neuron_unalign.dendrite_skeleton file_name_decomp_sk_dendr = f"{file_name_decomp}_dendr_sk" ret_sk_filepath_dendr = su.compressed_pickle( dendrite_skeleton, filename=file_name_decomp_sk_dendr, folder=str(target_dir_sk), return_filepath=True) if verbose: print(f"neuron ret_file_path_str = {ret_file_path_str}") print(f"ret_sk_filepath_ax = {ret_sk_filepath_ax}") print(f"ret_sk_filepath_dendr = {ret_sk_filepath_dendr}") if verbose: print(f"--- i) saving neuron and skeleton ----") st = time.time() nucleus_info h01_e_i_cell_type = database_e_i_class database_cell_type_info baylor_e_i, baylor_cell_type_info e_i_class cell_type_used filtering_info, axon_angles_dict o_neuron_unalign dendrite_stats axon_stats stats_dict ret_file_path_str ret_sk_filepath_ax ret_sk_filepath_dendr # 7) make the insertions run_time = run_time = np.round(time.time() - global_time, 4) # -- decomp table -- n_dict = dict( key.copy(), decomposition_split_method=decomposition_split_method, multiplicity=1, split_index=split_index, decomposition=str(ret_file_path_str), axon_skeleton=str(ret_sk_filepath_ax), dendrite_skeleton=str(ret_sk_filepath_dendr), #--- cell types h01_e_i_cell_type=database_e_i_class, cell_type=e_i_class, cell_type_used=cell_type_used, #----- synapses --- n_syn_pre=neuron_obj.n_synapses_pre, n_syn_post=neuron_obj.n_synapses_post, run_time=run_time, # statistics for the split ) dicts_for_update = [ nucleus_info, database_cell_type_info, filtering_info, axon_angles_dict, dendrite_stats, axon_stats, stats_dict, baylor_cell_type_info, filtering_info_low_branch, syn_dict ] for d in dicts_for_update: n_dict.update(d) print(f"n_dict = {n_dict}") for curr_obj in [self, SkeletonAxonDendrite]: curr_obj.insert1(n_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True) curr_obj.Object.insert1(n_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True)
def make(self,key): """ Pseudocode: 1) Pull Down All of the Neurons 2) Get the nucleus centers and the original mesh """ whole_pass_time = time.time() # 1) Pull Down All of the Neurons segment_id = key["segment_id"] split_index = key["split_index"] if verbose: print(f"\n\n------- Working on Neuron {segment_id}_{split_index} -----") cell_type,nucleus_id = (minnie.DecompositionCellTypeV7() & key).fetch1(f"{cell_type_used}_e_i","nucleus_id") if verbose: print(f"---- Working on Neuron {segment_id}:{split_index}") print(f"nucleus_id = {nucleus_id},cell_type = {cell_type}") neuron_obj = du.decomposition_with_spine_recalculation(segment_id,split_index = split_index) import axon_utils as au au.compute_axon_on_dendrite_limb_branch_dict(neuron_obj = neuron_obj, plot_axon_on_dendrite=False, verbose = True, clean_prior_labels = True) neuron_obj = syu.add_synapses_to_neuron_obj(neuron_obj, validation = False, verbose = verbose, original_mesh = None, plot_valid_error_synapses = False, calculate_synapse_soma_distance = False, add_valid_synapses = True, add_error_synapses=False,) if plot_data: nviz.plot_axon(neuron_obj) #2) Running the neuron proofreading # neuron_obj_proof,filtering_info = pru.proofread_neuron_full( # neuron_obj, # filter_list = [pru.exc_axon_on_dendrite_merges_filter()], # # arguments for processing down in DecompositionCellTypeV7 # cell_type=cell_type, # add_valid_synapses = False, # validation = validation, # add_spines = False, # perform_axon_processing = False, # return_after_axon_processing = False, # #arguments for processing after DecompositionCellTypeV7 to Proofread Neuron # plot_head_neck_shaft_synapses = plot_data, # plot_soma_synapses = plot_data, # proofread_verbose = proofread_verbose, # verbose_outline = verbose, # plot_limb_branch_filter_with_disconnect_effect = plot_data, # plot_final_filtered_neuron = False, # plot_synapses_after_proofread = False, # plot_compartments = plot_data, # plot_valid_synapses = plot_data, # plot_error_synapses = plot_data, # verbose = verbose, # debug_time = verbose, # ) o_neuron,filtering_info = pru.proofread_neuron_class_predetermined( neuron_obj=neuron_obj, inh_exc_class = cell_type, plot_limb_branch_filter_with_disconnect_effect = plot_data, verbose = verbose, verbose_outline = verbose, high_fidelity_axon_on_excitatory = False, plot_final_filtered_neuron = plot_data, filter_list=[pru.exc_axon_on_dendrite_merges_filter()], ) #4) Collect and Write Neuron Stats limb_branch_to_cancel = pru.extract_from_filter_info(filtering_info, name_to_extract="limb_branch_dict_to_cancel") red_blue_suggestions = pru.extract_from_filter_info(filtering_info, name_to_extract = "red_blue_suggestions") filter_key = {k:np.round(v,2) for k,v in filtering_info.items() if "area" in k or "length" in k} neuron_stats_dict = dict(key, proof_version = proof_version, limb_branch_to_cancel=limb_branch_to_cancel, red_blue_suggestions=red_blue_suggestions, cell_type=cell_type, nucleus_id=nucleus_id) neuron_stats_dict.update(filter_key) AutoProofreadAxonOnDendrite.insert1(neuron_stats_dict, skip_duplicates = True, ignore_extra_fields = True,) print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")
def make(self, key): """ Pseudocode: 1) Pull Down All of the Neurons 2) Get the nucleus centers and the original mesh """ whole_pass_time = time.time() # 1) Pull Down All of the Neurons segment_id = key["segment_id"] split_index = key["split_index"] if verbose: print( f"\n\n------- Working on Neuron {segment_id}_{split_index} -----" ) cell_type, nucleus_id = (minnie.DecompositionCellTypeV7() & key).fetch1(f"{cell_type_used}_e_i", "nucleus_id") if verbose: print(f"---- Working on Neuron {segment_id}:{split_index}") print(f"nucleus_id = {nucleus_id},cell_type = {cell_type}") neuron_obj = du.decomposition_with_spine_recalculation( segment_id, split_index=split_index) if plot_data: nviz.plot_axon(neuron_obj) #2) Running the neuron proofreading neuron_obj_proof, filtering_info = pru.proofread_neuron_full( neuron_obj, # arguments for processing down in DecompositionCellTypeV7 cell_type=cell_type, add_valid_synapses=False, validation=validation, add_spines=False, perform_axon_processing=False, return_after_axon_processing=False, #arguments for processing after DecompositionCellTypeV7 to Proofread Neuron plot_head_neck_shaft_synapses=plot_data, plot_soma_synapses=plot_data, proofread_verbose=proofread_verbose, verbose_outline=verbose, plot_limb_branch_filter_with_disconnect_effect=plot_data, plot_final_filtered_neuron=False, plot_synapses_after_proofread=False, plot_compartments=plot_data, plot_valid_synapses=plot_data, plot_error_synapses=plot_data, verbose=verbose, debug_time=verbose, ) #3) Collect and Write Data to Synapse Table dj_keys_valid = syu.synapses_to_dj_keys(neuron_obj_proof, valid_synapses=True, verbose=verbose, nucleus_id=nucleus_id, split_index=split_index) dj_keys_error = syu.synapses_to_dj_keys(neuron_obj_proof, valid_synapses=False, verbose=verbose, nucleus_id=nucleus_id, split_index=split_index) if verbose: print(f"n_synapses_total = {neuron_obj_proof.n_synapses_total}") AutoProofreadSynapse7.insert(dj_keys_valid, skip_duplicates=True) AutoProofreadSynapseErrors7.insert(dj_keys_error, skip_duplicates=True) #4) Collect and Write Neuron Stats limb_branch_to_cancel = pru.extract_from_filter_info( filtering_info, name_to_extract="limb_branch_dict_to_cancel") red_blue_suggestions = pru.extract_from_filter_info( filtering_info, name_to_extract="red_blue_suggestions") filter_key = { k: np.round(v, 2) for k, v in filtering_info.items() if "area" in k or "length" in k } mesh_skeleton_file_paths = pru.save_off_meshes_skeletons( neuron_obj_proof, verbose=False, split_index=key["split_index"], file_name_ending=f"proofv{proof_version}") neuron_stats_dict = dict( key, proof_version=proof_version, limb_branch_to_cancel=limb_branch_to_cancel, red_blue_suggestions=red_blue_suggestions, ) neuron_stats_dict.update(mesh_skeleton_file_paths) neuron_stats_dict.update(filter_key) #--------- 12/8: Adding the neuron_graph object that will be retrieved later ---- if save_G_with_attrs: G = ctcu.G_with_attrs_from_neuron_obj(neuron_obj_proof, plot_G=False) G_path = ctcu.save_G_with_attrs(G, segment_id=segment_id, split_index=split_index) if verbose: print(f"Saved G_path = {G_path}") neuron_stats_dict["neuron_graph"] = G_path AutoProofreadStats7.insert1(neuron_stats_dict, skip_duplicates=True) # 5) Collecting Stats for the AutoProofreadNeurons6 table #a) Neuron basics if verbose: print(f"\n--5a) Neuron basics") dicts_to_update = [] multiplicity = du.multiplicity_from_segment_id(segment_id) soma_x, soma_y, soma_z = nru.soma_centers(neuron_obj, soma_name="S0", voxel_adjustment=True) basic_cell_dict = dict(multiplicity=multiplicity, soma_x=soma_x, soma_y=soma_y, soma_z=soma_z, cell_type=cell_type, cell_type_used=cell_type_used) dicts_to_update.append(basic_cell_dict) #b) Neuron Overall Statistics if verbose: print(f"\n--5b) Neuron Overall Statistics") neuron_stats_dict = neuron_obj_proof.neuron_stats( stats_to_ignore=["axon_length", "axon_area"]) dicts_to_update.append(neuron_stats_dict) #c) compartment Stats if verbose: print(f"\n--5c) compartment Stats") comp_stats = apu.compartments_stats(neuron_obj_proof, compartment_labels=None, verbose=False) dicts_to_update.append(comp_stats) #d) Synapse Stats if verbose: print(f"\n--5d) Synapse Stats") syn_stats = syu.complete_n_synapses_analysis(neuron_obj_proof) dicts_to_update.append(syn_stats) #e) Cell Typing Info after proofreading if verbose: print(f"\n--5e) Cell Typing Info after proofreading") baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj( neuron_obj_proof, verbose=False, return_cell_type_info=True) baylor_cell_type_info["baylor_e_i"] = baylor_e_i baylor_cell_type_info = { f"{k}_after_proof": v for k, v in baylor_cell_type_info.items() } dicts_to_update.append(baylor_cell_type_info) #c if verbose: print(f"\n--5e) Cell Typing Info after proofreading") axon_feature_dict = au.axon_features_from_neuron_obj( neuron_obj_proof, features_to_exclude=("length", "n_branches")) apical_feature_dict = apu.compartment_features_from_skeleton_and_soma_center( neuron_obj_proof, compartment_label="apical_total", name_prefix="apical", features_to_exclude=("length", "n_branches"), ) basal_feature_dict = apu.compartment_features_from_skeleton_and_soma_center( neuron_obj_proof, compartment_label="basal", name_prefix="basal", features_to_exclude=("length", "n_branches"), ) dendrite_feature_dict = apu.compartment_features_from_skeleton_and_soma_center( neuron_obj_proof, compartment_label="dendrite", name_prefix="dendrite", features_to_exclude=("length", "n_branches"), ) dicts_to_update += [ axon_feature_dict, apical_feature_dict, basal_feature_dict, dendrite_feature_dict ] #g) Repeating old features from DecompositionCellType table if verbose: print( f"\n--5g) Repeating old features from DecompositionCellTypeV7 table" ) decomp_cell_type_features = [ "nucleus_id", "nuclei_distance", "n_nuclei_in_radius", "n_nuclei_in_bbox", "soma_x_nm", "soma_y_nm", "soma_z_nm", "baylor_e_i", "allen_e_i", "cell_type_used", "cell_type", "axon_angle_max", "axon_angle_min", "n_axon_angles", "allen_e_i_n_nuc", "allen_cell_type", "allen_cell_type_n_nuc", "allen_cell_type_e_i", ] decomp_dict = (minnie.DecompositionCellTypeV7() & key).fetch( *decomp_cell_type_features, as_dict=True)[0] decomp_dict["cell_type_used_for_axon"] = decomp_dict["cell_type_used"] decomp_dict["cell_type_for_axon"] = decomp_dict["cell_type"] del decomp_dict["cell_type_used"] del decomp_dict["cell_type"] dicts_to_update.append(decomp_dict) if plot_data: nviz.plot_compartments(neuron_obj_proof) #h) Writing the Data if verbose: print(f"\n--5h) Writing the Data") neuron_proof_dict = dict(key, proof_version=proof_version, run_time=np.round( time.time() - whole_pass_time, 2)) for d_u in dicts_to_update: neuron_proof_dict.update(d_u) AutoProofreadNeurons7.insert1(neuron_proof_dict, skip_duplicates=True) print( f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***" )