def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decomposed neurong object from Decomposition table
        3) Run the multi_soma split suggestions algorithm
        4) Get the number of splits required for this neuron
        5) Split the neuron into a list of neuron objects
        6) For each neuron object in the list:
        - get the number of errored limbs (to indicate the success type)
        - Change the description to include the multiplicity
        - Compute the information on the largest soma faces and volume
        - Save the neuron object to the external
        - Add the new write key to a list to commit 
        7) Write all of the keys 
        """

        whole_pass_time = time.time()

        # 1) Get the segment id from the key
        segment_id = key["segment_id"]
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")

        # 2) Get the decomposed neuron object from Decomposition table and the split suggestions
        neuron_obj_path = (minnie.Decomposition & key).fetch1("decomposition")
        neuron_obj = du.filepath_to_neuron_obj(neuron_obj_path)
        """ Old way that downloaded from another table
        # 3) Retrieve the multi soma suggestions
        split_results = (minnie.NeuronSplitSuggestions & key).fetch1("split_results")
        """
        #3) Calculated the split results
        split_results = pru.multi_soma_split_suggestions(
            neuron_obj, plot_intermediates=False)

        # 4) Get the number of splits required for this neuron
        n_paths_cut = pru.get_n_paths_cut(split_results)

        if verbose:
            print(f"n_paths_cut = {n_paths_cut}")

        # 5) Split the neuron into a list of neuron objects
        (neuron_list, neuron_list_errored_limbs_area,
         neuron_list_errored_limbs_skeletal_length,
         neuron_list_n_multi_soma_errors,
         neuron_list_n_same_soma_errors) = pru.split_neuron(
             neuron_obj,
             limb_results=split_results,
             verbose=verbose,
             return_error_info=True)

        print(f"neuron_list = {neuron_list}")
        print(
            f"neuron_list_errored_limbs_area = {neuron_list_errored_limbs_area}"
        )
        print(
            f"neuron_list_n_multi_soma_errors = {neuron_list_n_multi_soma_errors}"
        )
        print(
            f"neuron_list_n_same_soma_errors = {neuron_list_n_same_soma_errors}"
        )

        if verbose:
            print(f"Number of neurons: {len(neuron_list)}")

        neuron_entries = []
        for neuron_idx in range(len(neuron_list)):
            """
            # 6) For each neuron object in the list:
            # - get the number of errored limbs (to indicate the success type)
            # - Compute the information on the largest soma faces and volume
            # - Save the neuron object to the external
            # - Add the new write key to a list to commit 
            """
            n = neuron_list[neuron_idx]

            error_imbs_cancelled_area = neuron_list_errored_limbs_area[
                neuron_idx]
            error_imbs_cancelled_skeletal_length = neuron_list_errored_limbs_skeletal_length[
                neuron_idx]
            n_multi_soma_limbs_cancelled = neuron_list_n_multi_soma_errors[
                neuron_idx]
            n_same_soma_limbs_cancelled = neuron_list_n_same_soma_errors[
                neuron_idx]

            #for n in neuron_list:
            #     nviz.visualize_neuron(n,
            #                          limb_branch_dict="all")

            # - get the number of errored limbs (to indicate the success type)
            if n.n_error_limbs == 0:
                split_success = 0
            elif n.multi_soma_touching_limbs == 0:
                split_successs = 1
            elif n.same_soma_multi_touching_limbs == 0:
                split_success = 2
            else:
                split_success = 3

            if verbose:
                print(f"split_success = {split_success}")

            # - Compute the information on the largest soma faces and volume
            soma_volumes = [
                n[k].volume / 1000000000 for k in n.get_soma_node_names()
            ]
            soma_n_faces = [
                len(n[k].mesh.faces) for k in n.get_soma_node_names()
            ]

            largest_n_faces = np.max(soma_n_faces)
            largest_volume = np.max(soma_volumes)

            if verbose:
                print(f"largest_n_faces = {largest_n_faces}")
                print(f"largest_volume = {largest_volume}")

            if "split" not in n.description:
                n.description += "_soma_0_split"

            #6) Save the file in a certain location
            if True:
                save_time = time.time()
                ret_file_path = n.save_compressed_neuron(output_folder=str(
                    du.get_decomposition_path()),
                                                         return_file_path=True,
                                                         export_mesh=False,
                                                         suppress_output=True)

                ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
                print(f"Save time = {time.time() - save_time}")
            else:
                print("Storing a dummy value for neuron")
                ret_file_path_str = "dummy"

            #7) Pass stats and file location to insert
            new_key = dict(
                key,
                split_index=neuron_idx,
                split_version=split_version,
                multiplicity=len(neuron_list),
                n_splits=n_paths_cut,
                split_success=split_success,
                n_error_limbs_cancelled=len(error_imbs_cancelled_area),
                n_multi_soma_limbs_cancelled=n_multi_soma_limbs_cancelled,
                n_same_soma_limbs_cancelled=n_same_soma_limbs_cancelled,
                error_imbs_cancelled_area=np.round(
                    np.sum(error_imbs_cancelled_area), 4),
                error_imbs_cancelled_skeletal_length=np.round(
                    np.sum(error_imbs_cancelled_skeletal_length) / 1000, 4),
                split_results=split_results,
                max_soma_n_faces=largest_n_faces,
                max_soma_volume=largest_volume,
                decomposition=ret_file_path_str,
                n_vertices=len(n.mesh.vertices),
                n_faces=len(n.mesh.faces),
                run_time=np.round(time.time() - whole_pass_time, 4))

            stats_dict = n.neuron_stats()
            new_key.update(stats_dict)

            attributes_to_remove = ["axon_length", "axon_area", "n_boutons"]

            for k in attributes_to_remove:
                del new_key[k]

            neuron_entries.append(new_key)

        self.insert(neuron_entries,
                    allow_direct_insert=True,
                    skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - whole_pass_time} ------"
        )
Esempio n. 2
0
    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decimated mesh
        3) Get the somas info
        4) Run the preprocessing
        5) Calculate all starter stats
        6) Save the file in a certain location
        7) Pass stats and file location to insert
        """
        whole_pass_time = time.time()
        #1) Get the segment id from the key
        segment_id = key["segment_id"]
        description = str(key['decimation_version']) + "_25"
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")
        global_start = time.time()

        #2) Get the decimated mesh
        current_neuron_mesh = du.fetch_segment_id_mesh(segment_id)

        #3) Get the somas info
        somas = du.get_soma_mesh_list(segment_id)
        soma_ver = du.get_soma_mesh_list_ver(segment_id)

        print(f"somas = {somas}")

        #3b) Get the glia and nuclei information
        glia_faces, nuclei_faces = du.get_segment_glia_nuclei_faces(
            segment_id, return_empty_list=True)

        #4) Run the preprocessing

        total_neuron_process_time = time.time()

        print(f"\n--- Beginning preprocessing of {segment_id}---")
        recovered_neuron = neuron.Neuron(
            mesh=current_neuron_mesh,
            somas=somas,
            segment_id=segment_id,
            description=description,
            suppress_preprocessing_print=False,
            suppress_output=False,
            calculate_spines=True,
            widths_to_calculate=["no_spine_median_mesh_center"],
            glia_faces=glia_faces,
            nuclei_faces=nuclei_faces,
        )

        print(
            f"\n\n\n---- Total preprocessing time = {time.time() - total_neuron_process_time}"
        )

        #5) Don't have to do any of the processing anymore because will do in the neuron object
        stats_dict = recovered_neuron.neuron_stats()

        #6) Save the file in a certain location
        save_time = time.time()
        ret_file_path = recovered_neuron.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"Save time = {time.time() - save_time}")

        #7) Pass stats and file location to insert
        new_key = dict(key,
                       ver=soma_ver,
                       process_version=process_version,
                       index=0,
                       multiplicity=1,
                       decomposition=ret_file_path_str,
                       n_vertices=len(current_neuron_mesh.vertices),
                       n_faces=len(current_neuron_mesh.faces),
                       run_time=np.round(time.time() - whole_pass_time, 4))
        new_key.update(stats_dict)

        keys_to_delete = [
            "axon_length", "axon_area", "max_soma_volume", "max_soma_n_faces"
        ]

        for k_to_delete in keys_to_delete:
            del new_key[k_to_delete]

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - global_start} ------"
        )
Esempio n. 3
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()
            #Run the Axon Decomposition
            neuron_obj_with_web = au.complete_axon_processing(neuron_obj,
                                                              verbose=True)

            save_time = time.time()
            ret_file_path = neuron_obj_with_web.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{neuron_obj_with_web.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(key,
                          split_index=split_index,
                          axon_version=au.axon_version,
                          decomposition=ret_file_path_str,
                          axon_length=neuron_obj_with_web.axon_length,
                          run_time=np.round(time.time() - st, 2))

            dict_to_write.append(n_dict)

        #write the
        self.insert(dict_to_write,
                    skip_duplicates=True,
                    allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode for process:

        1) Get the segment id from the key
        2) Get the decimated mesh
        3) Get the somas info
        4) Run the preprocessing
        5) Calculate all starter stats
        6) Save the file in a certain location
        7) Pass stats and file location to insert
        """
        whole_pass_time = time.time()
        #1) Get the segment id from the key
        segment_id = key["segment_id"]
        description = str(key['decimation_version']) + "_25"
        print(f"\n\n----- Working on {segment_id}-------")
        global_start = time.time()

        #2) Get the decimated mesh
        current_neuron_mesh = du.fetch_segment_id_mesh(segment_id,
                                                       minnie=minnie)

        #3) Get the somas info *************************** Need to change this when actually run *******************
        somas = du.get_soma_mesh_list(segment_id, minnie=minnie)
        print(f"somas = {somas}")
        #4) Run the preprocessing

        total_neuron_process_time = time.time()

        print(f"\n--- Beginning preprocessing of {segment_id}---")
        recovered_neuron = neuron.Neuron(
            mesh=current_neuron_mesh,
            somas=somas,
            segment_id=segment_id,
            description=description,
            suppress_preprocessing_print=False,
            suppress_output=False,
            calculate_spines=True,
            widths_to_calculate=["no_spine_median_mesh_center"])

        print(
            f"\n\n\n---- Total preprocessing time = {time.time() - total_neuron_process_time}"
        )

        #5) Don't have to do any of the processing anymore because will do in the neuron object
        stats_dict = recovered_neuron.neuron_stats()

        #6) Save the file in a certain location
        save_time = time.time()
        ret_file_path = recovered_neuron.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"Save time = {time.time() - save_time}")

        #7) Pass stats and file location to insert
        new_key = dict(key,
                       decomposition=ret_file_path_str,
                       n_vertices=len(current_neuron_mesh.vertices),
                       n_faces=len(current_neuron_mesh.faces),
                       run_time=np.round(time.time() - whole_pass_time, 4))
        new_key.update(stats_dict)

        self.insert1(new_key, allow_direct_insert=True, skip_duplicates=True)

        print(
            f"\n\n ------ Total time for {segment_id} = {time.time() - global_start} ------"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull down the neuron object
        2) Run the complete axon preprocessing on the neuron
        3) Run the borders attributes dictionary
        4) Save off the neuron object
        5) Write the Attribute records

        """
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        whole_pass_time = time.time()
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id)
        neuron_obj = neuron_objs[0]

        #2) Run the complete axon preprocessing on the neuron
        neuron_obj_with_web = au.complete_axon_processing(neuron_obj,
                                                          verbose=True)

        branch_attr = vu.neuron_to_border_branching_attributes(
            neuron_obj_with_web,
            plot_valid_border_branches=False,
            plot_invalid_border_branches=False,
            verbose=False)

        #3) Run the borders attributes dictionary
        branch_attr_keys = []
        for k in branch_attr:
            new_dict = dict(key)
            new_dict.update(k)
            new_dict["axon_version"] = axon_version
            branch_attr_keys.append(new_dict)

        if verbose:
            print(f"\n\nlen(branch_attr_keys) = {len(branch_attr_keys)}")

        #4) Save the file in a certain location
        save_time = time.time()
        ret_file_path = neuron_obj_with_web.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            file_name=f"{neuron_obj_with_web.segment_id}_validation_full_axon",
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"ret_file_path_str = {ret_file_path_str}")
        print(f"Save time = {time.time() - save_time}")

        n_dict = dict(key, decomposition=ret_file_path_str)

        AutoProofreadValidationBorderNeurons.insert1(n_dict,
                                                     skip_duplicates=True)

        #5) Write the Attribute records
        if len(branch_attr_keys) > 0:
            AutoProofreadValidationBorder.insert(branch_attr_keys,
                                                 skip_duplicates=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        whole_pass_time = time.time()

        # ----------- Doing the v4 Processing ------- #

        segment_id = key["segment_id"]
        if verbose:
            print(f"\n-- Working on neuron {segment_id}---")

        segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4()
                            & dict(segment_id=segment_id)).fetch1()

        #1) Find the coordinates of the nucleus for that new segment
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        if verbose:
            print(f"nuc_center_coords = {nuc_center_coords}")

        #2) Make sure that same number of DecompositionAxon objects as in Decomposition
        old_segment_id = segment_map_dict["old_segment_id"]
        if verbose:
            print(f"old_segment_id = {old_segment_id}")

        search_key = dict(segment_id=old_segment_id)
        n_somas = len(minnie.BaylorSegmentCentroid() & search_key)
        n_decomp_axon = len(minnie.DecompositionAxon() & search_key)
        if verbose:
            print(
                f"# of somas = {n_somas} and # of DecompositionAxon = {n_decomp_axon}"
            )

        if n_somas != n_decomp_axon:
            raise Exception(
                f"# of somas = {n_somas} NOT MATCH # of DecompositionAxon = {n_decomp_axon}"
            )

        #3) Pick the neuron object that is closest and within a certain range of the nucleus
        neuron_objs, split_idxs = du.decomposition_with_spine_recalculation(
            old_segment_id)
        if n_somas > 1:
            """
            Finding the closest soma:
            1) For each neuron object get the mesh center of the soma object
            2) Find the distance of each from the nucleus center
            3) Find the arg min distance and make sure within threshold
            4) Mark the current neuron and the current split index
            """
            nuclei_distance_threshold = 15000

            soma_center_coords = [k["S0"].mesh_center for k in neuron_objs]
            soma_distances = [
                np.linalg.norm(k - nuc_center_coords)
                for k in soma_center_coords
            ]
            min_dist_arg = np.argmin(soma_distances)
            min_dist = soma_distances[min_dist_arg]

            if verbose:
                print(f"soma_distances = {soma_distances}")
                print(
                    f"min_dist_arg = {min_dist_arg}, with min distance = {min_dist}"
                )

            if min_dist > nuclei_distance_threshold:
                raise Exception(
                    f"min_dist ({min_dist}) larger than nuclei_distance_threshold ({nuclei_distance_threshold})"
                )

            neuron_obj = neuron_objs[min_dist_arg]
            split_index = split_idxs[min_dist_arg]

            if verbose:
                print(f"Winning split_index = {split_index}")
        else:
            split_index = split_idxs[0]
            neuron_obj = neuron_objs[0]

        (filt_neuron, return_synapse_df_revised, return_synapse_df_errors,
         return_validation_df_revised,
         return_validation_df_extension) = vu.filtered_neuron_score(
             neuron_obj=neuron_obj,
             filter_list=pru.v4_exc_filters(),
             plot_limb_branch_filter_with_disconnect_effect=False,
             verbose=True,
             plot_score=False,
             nucleus_id=nucleus_id,
             return_synapse_df_errors=True,
             return_validation_df_extension=True,
             split_index=split_index)

        print(f"\n\n ----- Done Filtering ----------")

        #------- saving off the filtered neuron

        save_time = time.time()
        file_name = f"{filt_neuron.segment_id}_{filt_neuron.description}_v4_val"
        ret_file_path = filt_neuron.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            file_name=file_name,
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"Save time = {time.time() - save_time}")

        # ---------- Getting the scores of the proofreading ----- #
        presyn_scores_dict = vu.scores_presyn(return_validation_df_revised)
        postsyn_scores_dict = vu.scores_postsyn(return_validation_df_revised)

        cat = vu.synapse_validation_df_to_category_counts(
            return_validation_df_revised,
            print_postsyn=True,
            print_presyn=False)

        run_time = np.round(time.time() - whole_pass_time, 2)

        final_dict = dict(
            key,
            split_index=split_index,
            decomposition=ret_file_path_str,
            axon_length=filt_neuron.axon_length,
            validation_df=return_validation_df_revised.to_numpy(),
            validation_df_ext=return_validation_df_extension.to_numpy(),
            pre_tp=cat["presyn"]["TP"],
            pre_tn=cat["presyn"]["TN"],
            pre_fp=cat["presyn"]["FP"],
            pre_fn=cat["presyn"]["FN"],
            pre_precision=presyn_scores_dict["precision"],
            pre_recall=presyn_scores_dict["recall"],
            pre_f1=presyn_scores_dict["f1"],
            post_tp=cat["postsyn"]["TP"],
            post_tn=cat["postsyn"]["TN"],
            post_fp=cat["postsyn"]["FP"],
            post_fn=cat["postsyn"]["FN"],
            post_precision=postsyn_scores_dict["precision"],
            post_recall=postsyn_scores_dict["recall"],
            post_f1=postsyn_scores_dict["f1"],
            run_time=run_time)

        self.insert1(final_dict,
                     skip_duplicates=True,
                     allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {run_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        #         ver = 88
        #         nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id,
        #                                                                nuclei_version=ver)
        nucleus_ids, nucleus_centers = du.segment_to_nuclei(segment_id,
                                                            nuclei_version=ver)

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #4) -------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            (inh_exc_class, spine_category, axon_angles, n_axons, n_apicals,
             neuron_spine_density, n_branches_processed,
             skeletal_length_processed, n_branches_in_search_radius,
             skeletal_length_in_search_radius
             ) = clu.inhibitory_excitatory_classifier(
                 neuron_obj,
                 return_spine_classification=True,
                 return_axon_angles=True,
                 return_n_axons=True,
                 return_n_apicals=True,
                 return_spine_statistics=True,
                 axon_limb_branch_dict_precomputed=None,
                 axon_angles_precomputed=None,
                 verbose=verbose)
            if verbose:
                print(
                    f"Total time for classification = {time.time() - filter_time}"
                )

            all_axon_angles = []
            for limb_idx, limb_data in axon_angles.items():
                for candidate_idx, cand_angle in limb_data.items():
                    all_axon_angles.append(cand_angle)

            if len(axon_angles) > 0:
                axon_angle_maximum = np.max(all_axon_angles)
            else:
                axon_angle_maximum = 0

            if verbose:
                print("\n -- Cell Type Classification Results --")
                print(f"inh_exc_class={inh_exc_class}")
                print(f"spine_category={spine_category}")
                print(f"axon_angles={axon_angles}")
                print(f"n_axons={n_axons}")
                print(f"n_apicals={n_apicals}")
                print(f"neuron_spine_density={neuron_spine_density}")
                print(f"n_branches_processed={n_branches_processed}")
                print(f"skeletal_length_processed={skeletal_length_processed}")
                print(
                    f"n_branches_in_search_radius={n_branches_in_search_radius}"
                )
                print(
                    f"skeletal_length_in_search_radius={skeletal_length_in_search_radius}"
                )

            baylor_cell_type_info = dict(
                cell_type_predicted=inh_exc_class,
                spine_category=spine_category,
                axon_angle_maximum=axon_angle_maximum,
                n_axons=n_axons,
                n_apicals=n_apicals,
                spine_density_classifier=neuron_spine_density,
                n_branches_processed=neuron_spine_density,
                skeletal_length_processed=skeletal_length_processed,
                n_branches_in_search_radius=n_branches_in_search_radius,
                skeletal_length_in_search_radius=
                skeletal_length_in_search_radius,
            )

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            #6) -------- If excitatory running the axon processing--------------
            """
            Psuedocode: 
            If e_i class is excitatory:
            1) Filter away the axon on dendrite
            2) Do the higher fidelity axon processing
            3) Compute the axon features

            """

            if e_i_class == "excitatory" and neuron_obj.axon_limb_name is not None:
                if verbose:
                    print(
                        f"Excitatory so performing high fidelity axon and computing axon features"
                    )
            #     1) Filter away the axon on dendrite
            #     2) Do the higher fidelity axon processing

                o_neuron, filtering_info = au.complete_axon_processing(
                    neuron_obj,
                    perform_axon_classification=False,
                    return_filtering_info=True)
                filtering_info = {
                    k: np.round(v, 2)
                    for k, v in filtering_info.items()
                    if "area" in k or "length" in k
                }
                #3) Compute the axon features
                axon_features = au.axon_features_from_neuron_obj(o_neuron)
            else:
                nru.clear_all_branch_labels(neuron_obj, labels_to_clear="axon")
                o_neuron = neuron_obj
                axon_features = dict()
                filtering_info = dict()

            #3) ------ Adding the Synapses -----------
            o_neuron = syu.add_synapses_to_neuron_obj(
                o_neuron,
                validation=validation,
                verbose=True,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type_for_axon=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)
            #dict_to_write.append(n_dict)

        #write the
        #self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if len(key_source_inh & dict(segment_id=segment_id)) > 0:
            manual_e_i = "inhibitory"
        elif len(key_source_exc & dict(segment_id=segment_id)) > 0:
            manual_e_i = "excitatory"
        else:
            raise Exception("Not in exc or inh table")

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        ''' ------ Old way of getting the nucleus info for the manual proofread data -------
        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        
        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]
        
        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")'''

        nucleus_ids, nucleus_centers = du.segment_to_nuclei(
            segment_id,
            #nuclei_version=ver
        )

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            # 3) ---- Doing Baylor Cell Type Classification ---------
            # 3a) --- Adding the synapses and spine labels
            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            st = time.time()
            if verbose:
                print(f"Adding the synapses and the head_neck_shaft")
            neuron_obj = syu.add_synapses_to_neuron_obj(
                neuron_obj,
                validation=validation,
                verbose=verbose,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False,
            )
            neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                            verbose=verbose)
            if verbose:
                print(
                    f"Done adding synapses and head_neck_shaft: {time.time() - st}"
                )

            # 3b) --- Running the stats for Baylor Classification

            filter_time = time.time()
            '''
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                                neuron_obj = neuron_obj,
                               verbose = False,)
            
            (syn_density_post,
             syn_density_head,
             syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj,
                          limb_branch_dict = limb_branch_dict,
                                            verbose = True)
            
            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj,
                                                        verbose = True,
                                                        multiplier = 1000)

            if verbose:
                print(f"Total time for density calculations = {time.time() - filter_time}")

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            
            baylor_cell_type_info = dict(
                        syn_density_post = syn_density_post,
                        syn_density_head = syn_density_head,
                        syn_density_neck = syn_density_neck,
                        syn_density_shaft = syn_density_shaft,
                        skeletal_length_processed_syn=skeletal_length_processed_syn,
                        spine_density=spine_density,
                        skeletal_length_processed_spine = skeletal_length_processed_spine
            )
            
            baylor_e_i = ctu.e_i_classification_single(data=[syn_density_shaft,spine_density],
                              features=["syn_density_shaft","spine_density"],
                             verbose = True,
                              return_label_name = True
                             )
            
            
            '''
            baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
                neuron_obj,
                verbose=True,
                return_cell_type_info=True,
                return_dendrite_branch_stats=True)

            baylor_cell_type_info["baylor_e_i"] = baylor_e_i

            #5) ----- Deciding on cell type to use for axon
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "allen_e_i"] is not None:
                e_i_class = allen_cell_type_info["allen_e_i"]
                cell_type_used = "allen"
            else:
                e_i_class = baylor_e_i
                cell_type_used = "baylor"

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
                )

            #3) ------ Axon Classification (and getting the axon features)------------------

            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                cell_type=e_i_class,
                add_synapses_and_head_neck_shaft_spines=False,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}_pipe_v6_e_i_val_3",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            # ---- 8/10 Addition ----------
            if save_axon_skeleton:
                axon_skeleton_file = du.save_proofread_skeleton(
                    o_neuron.axon_skeleton,
                    segment_id=o_neuron.segment_id,
                    split_index=split_index,
                    file_name_ending=f"decomp_cell_type_axon_skeleton_e_i_val_3"
                )
            else:
                axon_skeleton_file = None

            #---- 8/29 Addition: Will compute the soma center of the mesh in nm ---
            soma_x_nm, soma_y_nm, soma_z_nm = neuron_obj["S0"].mesh_center
            if verbose:
                print(
                    f"soma_x_nm, soma_y_nm, soma_z_nm = {soma_x_nm, soma_y_nm, soma_z_nm}"
                )

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                manual_e_i=manual_e_i,
                cell_type=e_i_class,
                cell_type_used=cell_type_used,
                axon_skeleton=str(axon_skeleton_file),
                soma_x_nm=soma_x_nm,
                soma_y_nm=soma_y_nm,
                soma_z_nm=soma_z_nm,
                n_syn_pre=neuron_obj.n_synapses_pre,
                n_syn_post=neuron_obj.n_synapses_post,
            )

            soma_stats_dict = ctu.soma_stats_for_cell_type(neuron_obj)

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict,
                soma_stats_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Esempio n. 9
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4()
                                & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh()
                                & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)

        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #3) ------ Axon Classification (and getting the axon features)------------------

            import axon_utils as au
            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                add_synapses_and_head_neck_shaft_spines=True,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            #3)------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            #---- adding the synapses and spines data -----#
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                neuron_obj=o_neuron,
                verbose=False,
            )

            (syn_density_post, syn_density_head, syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(
                 neuron_obj=o_neuron,
                 limb_branch_dict=limb_branch_dict,
                 verbose=True)

            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(
                 neuron_obj=o_neuron, verbose=True, multiplier=1000)

            if verbose:
                print(
                    f"Total time for density calculations = {time.time() - filter_time}"
                )

            baylor_cell_type_info = dict(
                syn_density_post=syn_density_post,
                syn_density_head=syn_density_head,
                syn_density_neck=syn_density_neck,
                syn_density_shaft=syn_density_shaft,
                skeletal_length_processed_syn=skeletal_length_processed_syn,
                spine_density=spine_density,
                skeletal_length_processed_spine=skeletal_length_processed_spine
            )

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            ################ NEED TO INSERT CODE TO DO THIS ###########

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )