コード例 #1
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down All of the Neurons
        2) Get the nucleus centers and the original mesh

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        whole_pass_time = time.time()

        curr_output = pru.proofreading_table_processing(
            key,
            proof_version=proof_version,
            axon_version=axon_version,
            compute_synapse_to_soma_skeletal_distance=True,
            perform_axon_classification=False,
            high_fidelity_axon_on_excitatory=False,
            verbose=True,
        )
        # ------ Writing the Data To the Tables ----- #

        AutoProofreadSynapse_keys = curr_output["AutoProofreadSynapse_keys"]
        AutoProofreadNeurons_keys = curr_output["AutoProofreadNeurons_keys"]
        filtering_info_list = curr_output["filtering_info_list"]
        synapse_stats_list = curr_output["synapse_stats_list"]
        total_error_synapse_ids_list = curr_output[
            "total_error_synapse_ids_list"]
        neuron_mesh_list = curr_output["neuron_mesh_list"]
        axon_mesh_list = curr_output["axon_mesh_list"]
        neuron_split_idxs = curr_output["neuron_split_idxs"]

        axon_skeleton_list = curr_output["axon_skeleton_list"]
        dendrite_skeleton_list = curr_output["dendrite_skeleton_list"]

        # Once have inserted all the new neurons need to compute the stats
        if verbose:
            print("Computing the overall stats")

        overall_syn_error_rates = pru.calculate_error_rate(
            total_error_synapse_ids_list, synapse_stats_list, verbose=True)

        # Final Part: Create the stats table entries and insert

        proofread_stats_entries = []

        stats_to_make_sure_in_proofread_stats = [
            'axon_on_dendrite_merges_error_area',
            'axon_on_dendrite_merges_error_length',
            'low_branch_clusters_error_area',
            'low_branch_clusters_error_length',
            'dendrite_on_axon_merges_error_area',
            'dendrite_on_axon_merges_error_length',
            'double_back_and_width_change_error_area',
            'double_back_and_width_change_error_length',
            'crossovers_error_area',
            'crossovers_error_length',
            'high_degree_coordinates_error_area',
            'high_degree_coordinates_error_length',
        ]

        for sp_idx, split_index in enumerate(neuron_split_idxs):

            #write the AutoProofreadNeurons and AutoProofreadSynapse Tabel
            keys_to_write = AutoProofreadSynapse_keys[sp_idx]
            AutoProofreadSynapse4.insert(keys_to_write, skip_duplicates=True)

            new_key = AutoProofreadNeurons_keys[sp_idx]
            self.insert1(new_key,
                         skip_duplicates=True,
                         allow_direct_insert=True)

            synapse_stats = synapse_stats_list[sp_idx]
            filtering_info = filtering_info_list[sp_idx]
            limb_branch_to_cancel = pru.extract_from_filter_info(
                filtering_info, name_to_extract="limb_branch_dict_to_cancel")

            red_blue_suggestions = pru.extract_from_filter_info(
                filtering_info, name_to_extract="red_blue_suggestions")

            curr_key = dict(
                key,
                split_index=split_index,
                proof_version=proof_version,
                mesh_faces=neuron_mesh_list[sp_idx],
                axon_faces=axon_mesh_list[sp_idx],
                axon_skeleton=axon_skeleton_list[sp_idx],
                dendrite_skeleton=dendrite_skeleton_list[sp_idx],

                # ------------ For local valid synapses to that split_index
                n_valid_syn_presyn_for_split=synapse_stats[
                    "n_valid_syn_presyn"],
                n_valid_syn_postsyn_for_split=synapse_stats[
                    "n_valid_syn_postsyn"],
                n_presyn_error_syn_non_axon=synapse_stats[
                    "n_errored_syn_presyn_non_axon"],
                limb_branch_to_cancel=limb_branch_to_cancel,
                red_blue_suggestions=red_blue_suggestions,
            )

            for s in stats_to_make_sure_in_proofread_stats:
                if s not in filtering_info.keys():
                    curr_key[s] = None

            filter_key = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }
            curr_key.update(filter_key)
            curr_key.update(overall_syn_error_rates)

            proofread_stats_entries.append(curr_key)

        for pse in proofread_stats_entries:
            AutoProofreadStats4.insert1(pse, skip_duplicates=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self,key):
        """
        Pseudocode:
        1) Pull Down All of the Neurons
        2) Get the nucleus centers and the original mesh

        """
        
        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]
        
        print(f"\n\n------- AutoProofreadNeuron {segment_id}  ----------")
        
        neuron_objs,neuron_split_idxs = du.decomposition_with_spine_recalculation(segment_id)
        
        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")
        
        
        # 2)  ----- Pre-work ------

        nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id)

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")



        original_mesh = du.fetch_segment_id_mesh(segment_id)
        original_mesh_kdtree = KDTree(original_mesh.triangles_center)
        
        
        
        # 3) ----- Iterate through all of the Neurons and Proofread --------
        
        # lists to help save stats until write to ProofreadStats Table
        filtering_info_list = []
        synapse_stats_list = []
        total_error_synapse_ids_list = []
        
        
        for split_index,neuron_obj_pre_split in zip(neuron_split_idxs,neuron_objs):
            
            whole_pass_time = time.time()
    
            if verbose:
                print(f"\n-----Working on Neuron Split {split_index}-----")

                
            
            neuron_obj = neuron_obj_pre_split
#             if neuron_obj_pre_split.n_error_limbs > 0:
#                 if verbose:
#                     print(f"   ---> Pre-work: Splitting Neuron Limbs Because still error limbs exist--- ")
#                 neuron_objs_split = pru.split_neuron(neuron_obj_pre_split,
#                                              verbose=False)
                
#                 if len(neuron_objs_split) > 1:
#                     raise Exception(f"After splitting the neuron there were more than 1: {neuron_objs_split}")

#                 neuron_obj= neuron_objs_split[0]
#             else:
#                 neuron_obj = neuron_obj_pre_split
            
            

            # Part A: Proofreading the Neuron
            if verbose:
                print(f"\n   --> Part A: Proofreading the Neuron ----")


        #     nviz.visualize_neuron(neuron_obj,
        #                       limb_branch_dict="all")
        
        

            output_dict= pru.proofread_neuron(neuron_obj,
                                plot_limb_branch_filter_with_disconnect_effect=False,
                                plot_final_filtered_neuron=False,
                                verbose=True)

            filtered_neuron = output_dict["filtered_neuron"]
            cell_type_info = output_dict["cell_type_info"]
            filtering_info = output_dict["filtering_info"]

            
            


            # Part B: Getting Soma Centers and Matching To Nuclei
            if verbose:
                print(f"\n\n    --> Part B: Getting Soma Centers and Matching To Nuclei ----")


            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(neuron_obj,
                                     "S0",
                                      nucleus_ids,
                                      nucleus_centers,
                                     nuclei_distance_threshold = 15000,
                                      return_matching_info = True,
                                     verbose=True)

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            





            # Part C: Getting the Faces of the Original Mesh
            if verbose:
                print(f"\n\n    --> Part C: Getting the Faces of the Original Mesh ----")

            original_mesh_faces = tu.original_mesh_faces_map(original_mesh,
                                                        filtered_neuron.mesh,
                                                        exact_match=True,
                                                        original_mesh_kdtree=original_mesh_kdtree)
            
            original_mesh_faces_file = du.save_proofread_faces(original_mesh_faces,
                                                              segment_id=segment_id,
                                                              split_index=split_index)

            

        #     nviz.plot_objects(recovered_mesh)






            # Part D: Getting the Synapse Information
            if verbose:
                print(f"\n\n    --> Part D: Getting the Synapse Information ----")


            (keys_to_write,
             synapse_stats,
             total_error_synapse_ids) = pru.synapse_filtering(filtered_neuron,
                            split_index,
                            nucleus_id=winning_nucleus_id,
                            segment_id=None,
                            return_synapse_filter_info = True,
                            return_synapse_center_data = False,
                            return_error_synapse_ids = True,
                            mapping_threshold = 500,
                              plot_synapses=False,
                            verbose = True,
                            original_mesh_method = True,
                            original_mesh = original_mesh,
                            original_mesh_kdtree = original_mesh_kdtree,
                            valid_faces_on_original_mesh=original_mesh_faces, 
                                                          
                            )


            



            soma_x,soma_y,soma_z = nru.soma_centers(filtered_neuron,
                                               soma_name="S0",
                                               voxel_adjustment=True)

        
        
        
            
            #7) Creating the dictionary to insert into the AutoProofreadNeuron
            new_key = dict(key,
                           split_index = split_index,
                           proof_version = proof_version,
                           
                           multiplicity = len(neuron_objs),
                           
                           # -------- Important Excitatory Inhibitory Classfication ------- #
                        cell_type_predicted = cell_type_info["inh_exc_class"],
                        spine_category=cell_type_info["spine_category"],

                        n_axons=cell_type_info["n_axons"],
                        n_apicals=cell_type_info["n_axons"],
                           
                           
                        
    
                        # ----- Soma Information ----#
                        nucleus_id         = nucleus_info["nuclei_id"],
                        nuclei_distance      = np.round(nucleus_info["nuclei_distance"],2),
                        n_nuclei_in_radius   = nucleus_info["n_nuclei_in_radius"],
                        n_nuclei_in_bbox     = nucleus_info["n_nuclei_in_bbox"],

                        soma_x           = soma_x,
                        soma_y           =soma_y,
                        soma_z           =soma_z,

                        # ---------- Mesh Faces ------ #
                        mesh_faces = original_mesh_faces_file,

                           
                        # ------------- The Regular Neuron Information (will be computed in the stats dict) ----------------- #
                        
                        
                        
                           # ------ Information Used For Excitatory Inhibitory Classification -------- 
                        axon_angle_maximum=cell_type_info["axon_angle_maximum"],
                        spine_density_classifier=cell_type_info["neuron_spine_density"],
                        n_branches_processed=cell_type_info["n_branches_processed"],
                        skeletal_length_processed=cell_type_info["skeletal_length_processed"],
                        n_branches_in_search_radius=cell_type_info["n_branches_in_search_radius"],
                        skeletal_length_in_search_radius=cell_type_info["skeletal_length_in_search_radius"],

                           
                        
                           
                           run_time=np.round(time.time() - whole_pass_time,4)
                          )
            
            
            
            
            
            
            
            stats_dict = filtered_neuron.neuron_stats()
            new_key.update(stats_dict)

            
            # ------ Writing the Data To the Tables ----- #
            SynapseProofread.insert(keys_to_write,skip_duplicates=True)
            
            self.insert1(new_key,skip_duplicates=True,allow_direct_insert=True)
            
            
            
            #saving following information for later processing:
            filtering_info_list.append(filtering_info)
            synapse_stats_list.append(synapse_stats)
            total_error_synapse_ids_list.append(total_error_synapse_ids)
            
            
        
        # Once have inserted all the new neurons need to compute the stats
        if verbose:
            print("Computing the overall stats")
            
        overall_syn_error_rates = pru.calculate_error_rate(total_error_synapse_ids_list,
                        synapse_stats_list,
                        verbose=True)
        
        
        # Final Part: Create the stats table entries and insert
        
        proofread_stats_entries = []
        
        stats_to_make_sure_in_proofread_stats = [
            
         'axon_on_dendrite_merges_error_area',
         'axon_on_dendrite_merges_error_length',
         'low_branch_clusters_error_area',
         'low_branch_clusters_error_length',
         'dendrite_on_axon_merges_error_area',
         'dendrite_on_axon_merges_error_length',
         'double_back_and_width_change_error_area',
         'double_back_and_width_change_error_length',
         'crossovers_error_area',
         'crossovers_error_length',
         'high_degree_coordinates_error_area',
         'high_degree_coordinates_error_length',
        ]
        
        
        for sp_idx,split_index in enumerate(neuron_split_idxs):
            synapse_stats = synapse_stats_list[sp_idx]
            filtering_info = filtering_info_list[sp_idx]
            
            curr_key = dict(key,
                           split_index = split_index,
                           proof_version = proof_version,
                           

                            # ------------ For local valid synapses to that split_index
                            n_valid_syn_presyn_for_split=synapse_stats["n_valid_syn_presyn"],
                            n_valid_syn_postsyn_for_split=synapse_stats["n_valid_syn_postsyn"],

                           
                           
                           )
            
            
            for s in stats_to_make_sure_in_proofread_stats:
                if s not in filtering_info.keys():
                    curr_key[s] = None
            
            filter_key = {k:np.round(v,2) for k,v in filtering_info.items() if "area" in k or "length" in k}
            curr_key.update(filter_key)
            curr_key.update(overall_syn_error_rates)
            
            proofread_stats_entries.append(curr_key)
            
        
        ProofreadStats.insert(proofread_stats_entries,skip_duplicates=True)

            

        print(f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***")