def make(self, key): """ Pseudocode for process: 1) Get the segment id from the key 2) Get the decomposed neurong object from Decomposition table 3) Run the multi_soma split suggestions algorithm 4) Get the number of splits required for this neuron 5) Split the neuron into a list of neuron objects 6) For each neuron object in the list: - get the number of errored limbs (to indicate the success type) - Change the description to include the multiplicity - Compute the information on the largest soma faces and volume - Save the neuron object to the external - Add the new write key to a list to commit 7) Write all of the keys """ whole_pass_time = time.time() # 1) Get the segment id from the key segment_id = key["segment_id"] print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----") # 2) Get the decomposed neuron object from Decomposition table and the split suggestions neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition") neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path) """ Old way that downloaded from another table # 3) Retrieve the multi soma suggestions split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results") """ #3) Calculated the split results split_results = pru.multi_soma_split_suggestions( neuron_obj, plot_intermediates=False) # 4) Get the number of splits required for this neuron n_paths_cut = pru.get_n_paths_cut(split_results) if verbose: print(f"n_paths_cut = {n_paths_cut}") # 5) Split the neuron into a list of neuron objects (neuron_list, neuron_list_errored_limbs_area, neuron_list_errored_limbs_skeletal_length, neuron_list_n_multi_soma_errors, neuron_list_n_same_soma_errors) = pru.split_neuron( neuron_obj, limb_results=split_results, verbose=verbose, return_error_info=True) print(f"neuron_list = {neuron_list}") print( f"neuron_list_errored_limbs_area = {neuron_list_errored_limbs_area}" ) print( f"neuron_list_n_multi_soma_errors = {neuron_list_n_multi_soma_errors}" ) print( f"neuron_list_n_same_soma_errors = {neuron_list_n_same_soma_errors}" ) if verbose: print(f"Number of neurons: {len(neuron_list)}") neuron_entries = [] for neuron_idx in range(len(neuron_list)): """ # 6) For each neuron object in the list: # - get the number of errored limbs (to indicate the success type) # - Compute the information on the largest soma faces and volume # - Save the neuron object to the external # - Add the new write key to a list to commit """ n = neuron_list[neuron_idx] error_imbs_cancelled_area = neuron_list_errored_limbs_area[ neuron_idx] error_imbs_cancelled_skeletal_length = neuron_list_errored_limbs_skeletal_length[ neuron_idx] n_multi_soma_limbs_cancelled = neuron_list_n_multi_soma_errors[ neuron_idx] n_same_soma_limbs_cancelled = neuron_list_n_same_soma_errors[ neuron_idx] #for n in neuron_list: # nviz.visualize_neuron(n, # limb_branch_dict="all") # - get the number of errored limbs (to indicate the success type) if n.n_error_limbs == 0: split_success = 0 elif n.multi_soma_touching_limbs == 0: split_successs = 1 elif n.same_soma_multi_touching_limbs == 0: split_success = 2 else: split_success = 3 if verbose: print(f"split_success = {split_success}") # - Compute the information on the largest soma faces and volume soma_volumes = [ n[k].volume / 1000000000 for k in n.get_soma_node_names() ] soma_n_faces = [ len(n[k].mesh.faces) for k in n.get_soma_node_names() ] largest_n_faces = np.max(soma_n_faces) largest_volume = np.max(soma_volumes) if verbose: print(f"largest_n_faces = {largest_n_faces}") print(f"largest_volume = {largest_volume}") if "split" not in n.description: n.description += "_soma_0_split" #6) Save the file in a certain location if True: save_time = time.time() ret_file_path = n.save_compressed_neuron(output_folder=str( du.get_decomposition_path()), return_file_path=True, export_mesh=False, suppress_output=True) ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2" print(f"Save time = {time.time() - save_time}") else: print("Storing a dummy value for neuron") ret_file_path_str = "dummy" #7) Pass stats and file location to insert new_key = dict( key, split_index=neuron_idx, split_version=split_version, multiplicity=len(neuron_list), n_splits=n_paths_cut, split_success=split_success, n_error_limbs_cancelled=len(error_imbs_cancelled_area), n_multi_soma_limbs_cancelled=n_multi_soma_limbs_cancelled, n_same_soma_limbs_cancelled=n_same_soma_limbs_cancelled, error_imbs_cancelled_area=np.round( np.sum(error_imbs_cancelled_area), 4), error_imbs_cancelled_skeletal_length=np.round( np.sum(error_imbs_cancelled_skeletal_length) / 1000, 4), split_results=split_results, max_soma_n_faces=largest_n_faces, max_soma_volume=largest_volume, decomposition=ret_file_path_str, n_vertices=len(n.mesh.vertices), n_faces=len(n.mesh.faces), run_time=np.round(time.time() - whole_pass_time, 4)) stats_dict = n.neuron_stats() new_key.update(stats_dict) attributes_to_remove = ["axon_length", "axon_area", "n_boutons"] for k in attributes_to_remove: del new_key[k] neuron_entries.append(new_key) self.insert(neuron_entries, allow_direct_insert=True, skip_duplicates=True) print( f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------" )
def make(self, key): """ Pseudocode for process: 1) Get the segment id from the key 2) Get the decomposed neurong object from Decomposition table 3) Run the multi_soma split suggestions algorithm 4) Get the number of splits required for this neuron 5) Split the neuron into a list of neuron objects 6) For each neuron object in the list: - get the number of errored limbs (to indicate the success type) - Change the description to include the multiplicity - Compute the information on the largest soma faces and volume - Save the neuron object to the external - Add the new write key to a list to commit 7) Write all of the keys """ whole_pass_time = time.time() # 1) Get the segment id from the key segment_id = key["segment_id"] print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----") # 2) Get the decomposed neuron object from Decomposition table and the split suggestions neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition") neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path) """ Old way that downloaded from another table # 3) Retrieve the multi soma suggestions split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results") """ #3) Calculated the split results split_results, red_blue_split_results = pru.multi_soma_split_suggestions( neuron_obj, plot_intermediates=False, only_multi_soma_paths=True, default_cut_edge="last", verbose=True, debug=False, output_red_blue_suggestions=True, split_red_blue_by_common_upstream=True, apply_valid_upstream_branches_restriction=True, debug_red_blue=False, ) # 4) Get the number of splits required for this neuron n_paths_cut = pru.get_n_paths_cut(split_results) #7) Pass stats and file location to insert new_key = dict(key, split_version=split_version, n_splits=n_paths_cut, split_results=split_results, red_blue_split_results=red_blue_split_results, run_time=np.round(time.time() - whole_pass_time, 4)) self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True) print( f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------" )