def make(self, key): """ Pseudocode for process: 1) Get the segment id from the key 2) Get the decomposed neurong object from Decomposition table 3) Run the multi_soma split suggestions algorithm 4) Get the number of splits required for this neuron 5) Split the neuron into a list of neuron objects 6) For each neuron object in the list: - get the number of errored limbs (to indicate the success type) - Change the description to include the multiplicity - Compute the information on the largest soma faces and volume - Save the neuron object to the external - Add the new write key to a list to commit 7) Write all of the keys """ whole_pass_time = time.time() # 1) Get the segment id from the key segment_id = key["segment_id"] print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----") # 2) Get the decomposed neuron object from Decomposition table and the split suggestions neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition") neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path) """ Old way that downloaded from another table # 3) Retrieve the multi soma suggestions split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results") """ #3) Calculated the split results split_results, red_blue_split_results = pru.multi_soma_split_suggestions( neuron_obj, plot_intermediates=False, only_multi_soma_paths=True, default_cut_edge="last", verbose=True, debug=False, output_red_blue_suggestions=True, split_red_blue_by_common_upstream=True, apply_valid_upstream_branches_restriction=True, debug_red_blue=False, ) # 4) Get the number of splits required for this neuron n_paths_cut = pru.get_n_paths_cut(split_results) #7) Pass stats and file location to insert new_key = dict(key, split_version=split_version, n_splits=n_paths_cut, split_results=split_results, red_blue_split_results=red_blue_split_results, run_time=np.round(time.time() - whole_pass_time, 4)) self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True) print( f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------" )
def make(self, key): """ Pseudocode for process: 1) Get the segment id from the key 2) Get the decomposed neurong object from Decomposition table 3) Run the multi_soma split suggestions algorithm 4) Get the number of splits required for this neuron 5) Split the neuron into a list of neuron objects 6) For each neuron object in the list: - get the number of errored limbs (to indicate the success type) - Change the description to include the multiplicity - Compute the information on the largest soma faces and volume - Save the neuron object to the external - Add the new write key to a list to commit 7) Write all of the keys """ whole_pass_time = time.time() # 1) Get the segment id from the key segment_id = key["segment_id"] print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----") # 2) Get the decomposed neuron object from Decomposition table and the split suggestions neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition") neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path) """ Old way that downloaded from another table # 3) Retrieve the multi soma suggestions split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results") """ #3) Calculated the split results split_results = pru.multi_soma_split_suggestions( neuron_obj, plot_intermediates=False) # 4) Get the number of splits required for this neuron n_paths_cut = pru.get_n_paths_cut(split_results) if verbose: print(f"n_paths_cut = {n_paths_cut}") # 5) Split the neuron into a list of neuron objects (neuron_list, neuron_list_errored_limbs_area, neuron_list_errored_limbs_skeletal_length, neuron_list_n_multi_soma_errors, neuron_list_n_same_soma_errors) = pru.split_neuron( neuron_obj, limb_results=split_results, verbose=verbose, return_error_info=True) print(f"neuron_list = {neuron_list}") print( f"neuron_list_errored_limbs_area = {neuron_list_errored_limbs_area}" ) print( f"neuron_list_n_multi_soma_errors = {neuron_list_n_multi_soma_errors}" ) print( f"neuron_list_n_same_soma_errors = {neuron_list_n_same_soma_errors}" ) if verbose: print(f"Number of neurons: {len(neuron_list)}") neuron_entries = [] for neuron_idx in range(len(neuron_list)): """ # 6) For each neuron object in the list: # - get the number of errored limbs (to indicate the success type) # - Compute the information on the largest soma faces and volume # - Save the neuron object to the external # - Add the new write key to a list to commit """ n = neuron_list[neuron_idx] error_imbs_cancelled_area = neuron_list_errored_limbs_area[ neuron_idx] error_imbs_cancelled_skeletal_length = neuron_list_errored_limbs_skeletal_length[ neuron_idx] n_multi_soma_limbs_cancelled = neuron_list_n_multi_soma_errors[ neuron_idx] n_same_soma_limbs_cancelled = neuron_list_n_same_soma_errors[ neuron_idx] #for n in neuron_list: # nviz.visualize_neuron(n, # limb_branch_dict="all") # - get the number of errored limbs (to indicate the success type) if n.n_error_limbs == 0: split_success = 0 elif n.multi_soma_touching_limbs == 0: split_successs = 1 elif n.same_soma_multi_touching_limbs == 0: split_success = 2 else: split_success = 3 if verbose: print(f"split_success = {split_success}") # - Compute the information on the largest soma faces and volume soma_volumes = [ n[k].volume / 1000000000 for k in n.get_soma_node_names() ] soma_n_faces = [ len(n[k].mesh.faces) for k in n.get_soma_node_names() ] largest_n_faces = np.max(soma_n_faces) largest_volume = np.max(soma_volumes) if verbose: print(f"largest_n_faces = {largest_n_faces}") print(f"largest_volume = {largest_volume}") if "split" not in n.description: n.description += "_soma_0_split" #6) Save the file in a certain location if True: save_time = time.time() ret_file_path = n.save_compressed_neuron(output_folder=str( du.get_decomposition_path()), return_file_path=True, export_mesh=False, suppress_output=True) ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2" print(f"Save time = {time.time() - save_time}") else: print("Storing a dummy value for neuron") ret_file_path_str = "dummy" #7) Pass stats and file location to insert new_key = dict( key, split_index=neuron_idx, split_version=split_version, multiplicity=len(neuron_list), n_splits=n_paths_cut, split_success=split_success, n_error_limbs_cancelled=len(error_imbs_cancelled_area), n_multi_soma_limbs_cancelled=n_multi_soma_limbs_cancelled, n_same_soma_limbs_cancelled=n_same_soma_limbs_cancelled, error_imbs_cancelled_area=np.round( np.sum(error_imbs_cancelled_area), 4), error_imbs_cancelled_skeletal_length=np.round( np.sum(error_imbs_cancelled_skeletal_length) / 1000, 4), split_results=split_results, max_soma_n_faces=largest_n_faces, max_soma_volume=largest_volume, decomposition=ret_file_path_str, n_vertices=len(n.mesh.vertices), n_faces=len(n.mesh.faces), run_time=np.round(time.time() - whole_pass_time, 4)) stats_dict = n.neuron_stats() new_key.update(stats_dict) attributes_to_remove = ["axon_length", "axon_area", "n_boutons"] for k in attributes_to_remove: del new_key[k] neuron_entries.append(new_key) self.insert(neuron_entries, allow_direct_insert=True, skip_duplicates=True) print( f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------" )
def make(self, key): """ Purpose: To decimate a mesh by a perscribed decimation ratio and algorithm Pseudocode: 1) Fetch neuron object 2) Get the parameters for the mesh split 3) Calculate the split results 4) Apply the split results to the neuron 5) Save the neuron and write to the table (including the skeleton table) """ global_time = time.time() segment_id = key["segment_id"] decomposition_split_hash = key["decomposition_split_method"] ver = key["ver"] if verbose: print( f"\n\n--Working on {segment_id}: (decomposition_split_hash = " f"{decomposition_split_hash})") #1) Fetch neuron object st = time.time() neuron_obj = hdju.neuron_obj_from_table( segment_id=segment_id, table=h01auto.Decomposition.Object(), verbose=True, return_one=True, ) if verbose: print(f"Downloading Neuron Object: {time.time() - st}") if plotting: nviz.plot_soma_limb_concept_network(neuron_obj) for limb_idx in nru.error_limbs(neuron_obj): print(f"Error Limb: {limb_idx}") nviz.visualize_neuron_specific_limb(neuron_obj, limb_idx) #2) Get the parameters for the mesh split split_args = DecompositionSplitMethod.restrict_one_part_with_hash( decomposition_split_hash).fetch1() min_skeletal_length_limb = copy.copy( split_args["min_skeletal_length_limb"]) del split_args["min_skeletal_length_limb"] #3) Calculated the split results split_results = pru.multi_soma_split_suggestions( neuron_obj, plot_suggestions=plotting, verbose=verbose, #**split_args ) n_paths_cut = pru.get_n_paths_cut(split_results) if verbose: print(f"n_paths_cut = {n_paths_cut}") # 4) Apply the split results to the neuron (neuron_list, neuron_list_errored_limbs_area, neuron_list_errored_limbs_skeletal_length, neuron_list_n_multi_soma_errors, neuron_list_n_same_soma_errors) = pru.split_neuron( neuron_obj, limb_results=split_results, verbose=verbose, return_error_info=True, #min_skeletal_length_limb=min_skeletal_length_limb, ) #5) Save the neuron and write to the table (including the skeleton table) if len(neuron_list) > 1: raise Exception("More than one neuron after splitting") neuron_idx = 0 neuron_obj_comb = neuron_list[neuron_idx] error_imbs_cancelled_area = neuron_list_errored_limbs_area[neuron_idx] error_imbs_cancelled_skeletal_length = neuron_list_errored_limbs_skeletal_length[ neuron_idx] n_multi_soma_limbs_cancelled = neuron_list_n_multi_soma_errors[ neuron_idx] n_same_soma_limbs_cancelled = neuron_list_n_same_soma_errors[ neuron_idx] if plotting: nviz.visualize_neuron_limbs(neuron_obj_comb) st = time.time() stats_dict = neuron_obj_comb.neuron_stats( stats_to_ignore=[ "n_boutons", "axon_length", "axon_area", "max_soma_volume", "max_soma_n_faces", ], include_skeletal_stats=True, include_centroids=True, voxel_adjustment_vector=voxel_adjustment_vector, ) if verbose: print(f"-- Generating Stats: {time.time() - st}") #4) Save the neuron object in a certain location #file_name = f"{neuron_obj_comb.segment_id}_{neuron_idx}" file_name = f"{neuron_obj_comb.segment_id}_{neuron_idx}_{decomposition_split_hash}" file_name_decomp = f"{file_name}_{dataset}_decomposition" output_folder = str(target_dir_decomp) st = time.time() ret_file_path = neuron_obj_comb.save_compressed_neuron( output_folder=output_folder, file_name=file_name_decomp, return_file_path=True, export_mesh=False, suppress_output=True, ) ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2" if verbose: print(f"-- Neuron Object Save time: {time.time() - st}") #5) Outputting skeleton object, computing stats and saving st = time.time() sk_stats = nst.skeleton_stats_from_neuron_obj( neuron_obj_comb, include_centroids=True, voxel_adjustment_vector=voxel_adjustment_vector, verbose=True) skeleton = neuron_obj_comb.skeleton file_name_decomp_sk = f"{file_name}_{dataset}_decomposition_sk" ret_sk_filepath = su.compressed_pickle(skeleton, filename=file_name_decomp_sk, folder=str(target_dir_sk), return_filepath=True) if verbose: print(f"ret_sk_filepath = {ret_sk_filepath}") if verbose: print(f"-- Skeleton Generation and Save time: {time.time() - st}") # 6) Calcuating the split statistics: split_success = pru.split_success(neuron_obj_comb) if verbose: print(f"Calculating split statistics dict") print(f"split_success = {split_success}") split_stats_dict = dict( n_splits=n_paths_cut, split_success=split_success, n_error_limbs_cancelled=len(error_imbs_cancelled_area), n_multi_soma_limbs_cancelled=n_multi_soma_limbs_cancelled, n_same_soma_limbs_cancelled=n_same_soma_limbs_cancelled, error_imbs_cancelled_area=np.round( np.sum(error_imbs_cancelled_area), 4), error_imbs_cancelled_skeletal_length=np.round( np.sum(error_imbs_cancelled_skeletal_length) / 1000, 4), split_results=split_results, ) # 7) make the insertions run_time = run_time = np.round(time.time() - global_time, 4) # -- decomp table -- decomp_dict = dict( key.copy(), multiplicity=1, split_index=neuron_idx, decomposition=ret_file_path_str, run_time=run_time, # statistics for the split ) decomp_dict.update(stats_dict) decomp_dict.update(split_stats_dict) self.insert1(decomp_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True) self.Object.insert1(decomp_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True) #-- sk table sk_dict = dict(key.copy(), multiplicity=1, split_index=neuron_idx, skeleton=ret_sk_filepath, run_time=run_time) sk_dict.update(sk_stats) SkeletonDecompositionSplit.insert1(sk_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True) SkeletonDecompositionSplit.Object.insert1(sk_dict, allow_direct_insert=True, ignore_extra_fields=True, skip_duplicates=True)