Esempio n. 1
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()
            #Run the Axon Decomposition
            neuron_obj_with_web = au.complete_axon_processing(neuron_obj,
                                                              verbose=True)

            save_time = time.time()
            ret_file_path = neuron_obj_with_web.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{neuron_obj_with_web.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(key,
                          split_index=split_index,
                          axon_version=au.axon_version,
                          decomposition=ret_file_path_str,
                          axon_length=neuron_obj_with_web.axon_length,
                          run_time=np.round(time.time() - st, 2))

            dict_to_write.append(n_dict)

        #write the
        self.insert(dict_to_write,
                    skip_duplicates=True,
                    allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull down the neuron object
        2) Run the complete axon preprocessing on the neuron
        3) Run the borders attributes dictionary
        4) Save off the neuron object
        5) Write the Attribute records

        """
        print(f"\n\n\n---- Working on Neuron {key['segment_id']} ----")

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        whole_pass_time = time.time()
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id)
        neuron_obj = neuron_objs[0]

        #2) Run the complete axon preprocessing on the neuron
        neuron_obj_with_web = au.complete_axon_processing(neuron_obj,
                                                          verbose=True)

        branch_attr = vu.neuron_to_border_branching_attributes(
            neuron_obj_with_web,
            plot_valid_border_branches=False,
            plot_invalid_border_branches=False,
            verbose=False)

        #3) Run the borders attributes dictionary
        branch_attr_keys = []
        for k in branch_attr:
            new_dict = dict(key)
            new_dict.update(k)
            new_dict["axon_version"] = axon_version
            branch_attr_keys.append(new_dict)

        if verbose:
            print(f"\n\nlen(branch_attr_keys) = {len(branch_attr_keys)}")

        #4) Save the file in a certain location
        save_time = time.time()
        ret_file_path = neuron_obj_with_web.save_compressed_neuron(
            output_folder=str(du.get_decomposition_path()),
            file_name=f"{neuron_obj_with_web.segment_id}_validation_full_axon",
            return_file_path=True,
            export_mesh=False,
            suppress_output=True)

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"
        print(f"ret_file_path_str = {ret_file_path_str}")
        print(f"Save time = {time.time() - save_time}")

        n_dict = dict(key, decomposition=ret_file_path_str)

        AutoProofreadValidationBorderNeurons.insert1(n_dict,
                                                     skip_duplicates=True)

        #5) Write the Attribute records
        if len(branch_attr_keys) > 0:
            AutoProofreadValidationBorder.insert(branch_attr_keys,
                                                 skip_duplicates=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        #         ver = 88
        #         nucleus_ids,nucleus_centers = du.segment_to_nuclei(segment_id,
        #                                                                nuclei_version=ver)
        nucleus_ids, nucleus_centers = du.segment_to_nuclei(segment_id,
                                                            nuclei_version=ver)

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #4) -------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            (inh_exc_class, spine_category, axon_angles, n_axons, n_apicals,
             neuron_spine_density, n_branches_processed,
             skeletal_length_processed, n_branches_in_search_radius,
             skeletal_length_in_search_radius
             ) = clu.inhibitory_excitatory_classifier(
                 neuron_obj,
                 return_spine_classification=True,
                 return_axon_angles=True,
                 return_n_axons=True,
                 return_n_apicals=True,
                 return_spine_statistics=True,
                 axon_limb_branch_dict_precomputed=None,
                 axon_angles_precomputed=None,
                 verbose=verbose)
            if verbose:
                print(
                    f"Total time for classification = {time.time() - filter_time}"
                )

            all_axon_angles = []
            for limb_idx, limb_data in axon_angles.items():
                for candidate_idx, cand_angle in limb_data.items():
                    all_axon_angles.append(cand_angle)

            if len(axon_angles) > 0:
                axon_angle_maximum = np.max(all_axon_angles)
            else:
                axon_angle_maximum = 0

            if verbose:
                print("\n -- Cell Type Classification Results --")
                print(f"inh_exc_class={inh_exc_class}")
                print(f"spine_category={spine_category}")
                print(f"axon_angles={axon_angles}")
                print(f"n_axons={n_axons}")
                print(f"n_apicals={n_apicals}")
                print(f"neuron_spine_density={neuron_spine_density}")
                print(f"n_branches_processed={n_branches_processed}")
                print(f"skeletal_length_processed={skeletal_length_processed}")
                print(
                    f"n_branches_in_search_radius={n_branches_in_search_radius}"
                )
                print(
                    f"skeletal_length_in_search_radius={skeletal_length_in_search_radius}"
                )

            baylor_cell_type_info = dict(
                cell_type_predicted=inh_exc_class,
                spine_category=spine_category,
                axon_angle_maximum=axon_angle_maximum,
                n_axons=n_axons,
                n_apicals=n_apicals,
                spine_density_classifier=neuron_spine_density,
                n_branches_processed=neuron_spine_density,
                skeletal_length_processed=skeletal_length_processed,
                n_branches_in_search_radius=n_branches_in_search_radius,
                skeletal_length_in_search_radius=
                skeletal_length_in_search_radius,
            )

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            #6) -------- If excitatory running the axon processing--------------
            """
            Psuedocode: 
            If e_i class is excitatory:
            1) Filter away the axon on dendrite
            2) Do the higher fidelity axon processing
            3) Compute the axon features

            """

            if e_i_class == "excitatory" and neuron_obj.axon_limb_name is not None:
                if verbose:
                    print(
                        f"Excitatory so performing high fidelity axon and computing axon features"
                    )
            #     1) Filter away the axon on dendrite
            #     2) Do the higher fidelity axon processing

                o_neuron, filtering_info = au.complete_axon_processing(
                    neuron_obj,
                    perform_axon_classification=False,
                    return_filtering_info=True)
                filtering_info = {
                    k: np.round(v, 2)
                    for k, v in filtering_info.items()
                    if "area" in k or "length" in k
                }
                #3) Compute the axon features
                axon_features = au.axon_features_from_neuron_obj(o_neuron)
            else:
                nru.clear_all_branch_labels(neuron_obj, labels_to_clear="axon")
                o_neuron = neuron_obj
                axon_features = dict()
                filtering_info = dict()

            #3) ------ Adding the Synapses -----------
            o_neuron = syu.add_synapses_to_neuron_obj(
                o_neuron,
                validation=validation,
                verbose=True,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type_for_axon=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)
            #dict_to_write.append(n_dict)

        #write the
        #self.insert(dict_to_write,skip_duplicates=True,allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if len(key_source_inh & dict(segment_id=segment_id)) > 0:
            manual_e_i = "inhibitory"
        elif len(key_source_exc & dict(segment_id=segment_id)) > 0:
            manual_e_i = "excitatory"
        else:
            raise Exception("Not in exc or inh table")

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []
        ''' ------ Old way of getting the nucleus info for the manual proofread data -------
        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4() & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh() & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)
        
        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]
        
        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")'''

        nucleus_ids, nucleus_centers = du.segment_to_nuclei(
            segment_id,
            #nuclei_version=ver
        )

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            # 3) ---- Doing Baylor Cell Type Classification ---------
            # 3a) --- Adding the synapses and spine labels
            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            st = time.time()
            if verbose:
                print(f"Adding the synapses and the head_neck_shaft")
            neuron_obj = syu.add_synapses_to_neuron_obj(
                neuron_obj,
                validation=validation,
                verbose=verbose,
                original_mesh=None,
                plot_valid_error_synapses=False,
                calculate_synapse_soma_distance=False,
                add_valid_synapses=True,
                add_error_synapses=False,
            )
            neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                            verbose=verbose)
            if verbose:
                print(
                    f"Done adding synapses and head_neck_shaft: {time.time() - st}"
                )

            # 3b) --- Running the stats for Baylor Classification

            filter_time = time.time()
            '''
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                                neuron_obj = neuron_obj,
                               verbose = False,)
            
            (syn_density_post,
             syn_density_head,
             syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(neuron_obj = neuron_obj,
                          limb_branch_dict = limb_branch_dict,
                                            verbose = True)
            
            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(neuron_obj = neuron_obj,
                                                        verbose = True,
                                                        multiplier = 1000)

            if verbose:
                print(f"Total time for density calculations = {time.time() - filter_time}")

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            
            baylor_cell_type_info = dict(
                        syn_density_post = syn_density_post,
                        syn_density_head = syn_density_head,
                        syn_density_neck = syn_density_neck,
                        syn_density_shaft = syn_density_shaft,
                        skeletal_length_processed_syn=skeletal_length_processed_syn,
                        spine_density=spine_density,
                        skeletal_length_processed_spine = skeletal_length_processed_spine
            )
            
            baylor_e_i = ctu.e_i_classification_single(data=[syn_density_shaft,spine_density],
                              features=["syn_density_shaft","spine_density"],
                             verbose = True,
                              return_label_name = True
                             )
            
            
            '''
            baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
                neuron_obj,
                verbose=True,
                return_cell_type_info=True,
                return_dendrite_branch_stats=True)

            baylor_cell_type_info["baylor_e_i"] = baylor_e_i

            #5) ----- Deciding on cell type to use for axon
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "allen_e_i"] is not None:
                e_i_class = allen_cell_type_info["allen_e_i"]
                cell_type_used = "allen"
            else:
                e_i_class = baylor_e_i
                cell_type_used = "baylor"

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
                )

            #3) ------ Axon Classification (and getting the axon features)------------------

            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                cell_type=e_i_class,
                add_synapses_and_head_neck_shaft_spines=False,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}_pipe_v6_e_i_val_3",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            # ---- 8/10 Addition ----------
            if save_axon_skeleton:
                axon_skeleton_file = du.save_proofread_skeleton(
                    o_neuron.axon_skeleton,
                    segment_id=o_neuron.segment_id,
                    split_index=split_index,
                    file_name_ending=f"decomp_cell_type_axon_skeleton_e_i_val_3"
                )
            else:
                axon_skeleton_file = None

            #---- 8/29 Addition: Will compute the soma center of the mesh in nm ---
            soma_x_nm, soma_y_nm, soma_z_nm = neuron_obj["S0"].mesh_center
            if verbose:
                print(
                    f"soma_x_nm, soma_y_nm, soma_z_nm = {soma_x_nm, soma_y_nm, soma_z_nm}"
                )

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                manual_e_i=manual_e_i,
                cell_type=e_i_class,
                cell_type_used=cell_type_used,
                axon_skeleton=str(axon_skeleton_file),
                soma_x_nm=soma_x_nm,
                soma_y_nm=soma_y_nm,
                soma_z_nm=soma_z_nm,
                n_syn_pre=neuron_obj.n_synapses_pre,
                n_syn_post=neuron_obj.n_synapses_post,
            )

            soma_stats_dict = ctu.soma_stats_for_cell_type(neuron_obj)

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict,
                soma_stats_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )
Esempio n. 5
0
    def make(self, key):
        """
        Purpose: To extract the axon/dendrite of a split neuron
        
        1) Pull down the neuron
        2) Get the neuron ids and nucleus centers corresponding to
        that segent id

        Iterate through all the neuron objects
        a0) Recompute the width
        a) Get the winning nucleus_id
        b) Get the cell type info from the central database
        c) Add synapses to neuron obj
        d) Add spine categories to neuorn object
        e) classifiy E/I cell type according to Baylor rules
        f) Pick the cell type to use
        g) Perfrom complete aon processing
        h) Get aon Features
        i) Save neurong object
        j) Save Axon/Dendrite before proofreading
        k) Write to dj table

        """
        global_time = time.time()

        segment_id = key["segment_id"]
        decomposition_cell_type_hash = key["decomposition_cell_type_method"]
        decomposition_split_method = hdju.decomposition_split_method_hash_from_segment_id(
            segment_id, verbose=True)

        if verbose:
            print(
                f"\n\n--Working on {segment_id}: (decomposition_cell_type_hash = "
                f"{decomposition_cell_type_hash}, decomposition_split_method = {decomposition_split_method})"
            )

        #0) Visualizing the neuron
        if plotting:
            print(f"Visualizing the intial neuron")
            hdju.plot_mesh_with_somas(
                segment_id=segment_id,
                #split_index=0,
                with_skeleton=True,
                align_from_soma_center=True)

        # ---1) Pulling down the neuron---
        st = time.time()

        n_objs, sp_indexes = hdju.neuron_objs_from_decomposition_stage(
            segment_id, verbose=True, return_one=False)

        if verbose:
            print(f"---1) Pulling down the neuron---: {time.time() - st}")
            st = time.time()

        # ---2) Get the nucleus ids and nucleus centers for that segment id---
        nucleus_ids, nucleus_centers = hdju.nuclei_from_segment_id(
            segment_id, return_centers=True, return_nm=True)

        if verbose:
            print(f"Number of Corresponding Nuclei = {len(nucleus_ids)}")
            print(f"nucleus_ids = {nucleus_ids}")
            print(f"nucleus_centers = {nucleus_centers}")

        curr_idx = 0
        neuron_obj_pre_filt = n_objs[curr_idx]
        split_index = sp_indexes[curr_idx]

        if plot_initial_neuron:
            neuron_obj_rot = hu.align_neuron_obj(neuron_obj_pre_filt)
            nviz.visualize_neuron(neuron_obj_rot, limb_branch_dict="all")

        if verbose:
            print(f"--> Working on Split Index {split_index} -----")

        if verbose:
            print(
                f"---2) Get the nucleus ids and nucleus centers--- {time.time() - st}"
            )
            st = time.time()

        # -- a0) Prep work: Recompute the Widths --
        if filter_low_branch_cluster_dendrite:
            neuron_obj, filtering_info_low_branch = pru.apply_proofreading_filters_to_neuron(
                input_neuron=neuron_obj_pre_filt,
                filter_list=[pru.low_branch_length_clusters_dendrite_filter],
                plot_limb_branch_filter_with_disconnect_effect=False,
                plot_limb_branch_filter_away=
                plot_limb_branch_filter_away_low_branch,
                plot_final_neuron=False,
                return_error_info=True,
                verbose=False,
                verbose_outline=verbose)
        else:
            neuron_obj = neuron_obj_pre_filt
            filtering_info_low_branch = {}

        neuron_obj = wu.neuron_width_calculation_standard(neuron_obj,
                                                          verbose=True)

        if verbose:
            print(f"a0) Prep work: Recompute the Widths: {time.time() - st}")
            st = time.time()

        # --- a) Get the winning nucleus_id and nucleus info
        winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
            neuron_obj,
            "S0",
            nucleus_ids,
            nucleus_centers,
            nuclei_distance_threshold=15000,
            return_matching_info=True,
            verbose=True)
        if verbose:
            print(f"nucleus_info = {nucleus_info}")
            print(f"winning_nucleus_id = {winning_nucleus_id}")

        if winning_nucleus_id is None:
            if verbose:
                print(
                    f"No winning nuclues found so assigning the only nucleus id"
                )
            winning_nucleus_id = nucleus_ids[0]

        if verbose:
            print(
                f"--- a) Get the winning nucleus_id and nucleus info: {time.time() - st}"
            )
            st = time.time()

        # ---b) Get the cell type info from database
        database_cell_type_info = hdju.nuclei_classification_info_from_nucleus_id(
            winning_nucleus_id)
        database_e_i_class = database_cell_type_info[
            f"{data_type}_e_i_cell_type"]

        if verbose:
            print(f"database_cell_type_info = {database_cell_type_info}")
            print(f"database_e_i_class = {database_e_i_class}")

        if verbose:
            print(
                f"---b) Get the cell type info from database: {time.time() - st}"
            )
            st = time.time()

        # ---c/d) Add synapses and spine categories
        import synapse_utils as syu
        neuron_obj = syu.add_synapses_to_neuron_obj(
            neuron_obj,
            validation=False,
            verbose=verbose,
            original_mesh=None,
            plot_valid_error_synapses=False,
            calculate_synapse_soma_distance=False,
            add_valid_synapses=True,
            add_error_synapses=False,
        )
        neuron_obj = spu.add_head_neck_shaft_spine_objs(neuron_obj,
                                                        verbose=verbose)
        if plot_synapses:
            syu.plot_synapses(neuron_obj)

        if plot_spines:
            spu.plot_spines_head_neck(neuron_obj)

        if verbose:
            print(
                f"---c/d) Add synapses and spine categories: {time.time() - st}"
            )
            st = time.time()

        #---e) classifiy E/I cell type according to Baylor rules
        baylor_e_i, baylor_cell_type_info = ctu.e_i_classification_from_neuron_obj(
            neuron_obj,
            plot_on_model_map=False,
            plot_spines_and_sk_filter_for_syn=plot_spines_and_sk_filter_for_syn,
            plot_spines_and_sk_filter_for_spine=
            plot_spines_and_sk_filter_for_spine,
            verbose=True,
            return_cell_type_info=True)

        baylor_cell_type_info["baylor_e_i"] = baylor_e_i

        if verbose:
            print(f"baylor_cell_type_info = \n{baylor_cell_type_info}")

        if verbose:
            print(
                f"---e) classifiy E/I cell type according to Baylor rules: {time.time() - st}"
            )
            st = time.time()

        #--- f) Pick the cell type to use

        if (inh_exc_class_to_use_for_axon == "h01"
                and database_e_i_class in ["excitatory", "inhibitory"]):
            e_i_class = database_e_i_class
            if verbose:
                print(f"Using h01 e/i cell type")

            cell_type_used = "h01"
        else:
            if verbose:
                print(f"Using baylor e/i cell type")
            e_i_class = baylor_e_i
            cell_type_used = "baylor"

        if verbose:
            print(
                f"e_i_class = {e_i_class} with cell_type_used = {cell_type_used}"
            )

        if verbose:
            print(f"---f) Pick the cell type to use: {time.time() - st}")
            st = time.time()

        #---# g) Perfrom complete aon processing

        if plot_aligned_neuron:
            print(f"plot_aligned_neuron")
            neuron_obj_rot = hu.align_neuron_obj(neuron_obj)
            nviz.visualize_neuron(neuron_obj_rot, limb_branch_dict="all")

        o_neuron_unalign, filtering_info, axon_angles_dict = au.complete_axon_processing(
            neuron_obj,
            cell_type=e_i_class,
            add_synapses_and_head_neck_shaft_spines=False,
            validation=False,
            plot_initial_axon=plot_initial_axon,
            plot_axon_on_dendrite=plot_axon_on_dendrite,
            return_filtering_info=True,
            return_axon_angle_info=True,
            plot_high_fidelity_axon=plot_high_fidelity_axon,
            plot_boutons_web=plot_boutons_web,
            add_synapses_after_high_fidelity_axon=True,
            verbose=verbose)

        #o_neuron_unalign = hu.unalign_neuron_obj(o_neuron)

        #         if verbose:
        #             print(f"Readding Synapses to the high fidelity axon after all processing donw")
        #         o_neuron_unalign = syu.add_synapses_to_neuron_obj(o_neuron_unalign,
        #                 validation = False,
        #                 verbose  = verbose,
        #                 original_mesh = None,
        #                 plot_valid_error_synapses = False,
        #                 calculate_synapse_soma_distance = False,
        #                 add_valid_synapses = True,
        #                   add_error_synapses=False,
        #                 limb_branch_dict_to_add_synapses=o_neuron_unalign.axon_limb_branch_dict)

        if verbose:
            print(
                f"After add_synapses_after_high_fidelity_axon: # of neuron_obj.synapses_somas = {len(o_neuron_unalign.synapses_somas)}"
            )

        if plot_unaligned_synapses:
            syu.plot_synapses(o_neuron_unalign, total_synapses=True)

        if plot_unaligned_axon:
            nviz.plot_axon(o_neuron_unalign)

        if verbose:
            print(f"---g) Perfrom complete aon processing: {time.time() - st}")
            st = time.time()

        # --- h) Get the axon and dendrite stats ----
        dendrite_stats = nst.skeleton_stats_dendrite(o_neuron_unalign,
                                                     include_centroids=False)
        axon_stats = nst.skeleton_stats_axon(o_neuron_unalign,
                                             include_centroids=False)
        stats_dict = o_neuron_unalign.neuron_stats(
            stats_to_ignore=[
                "n_not_processed_soma_containing_meshes", "n_error_limbs",
                "n_same_soma_multi_touching_limbs",
                "n_multi_soma_touching_limbs", "n_somas", "spine_density"
            ],
            include_skeletal_stats=False,
            include_centroids=True,
            voxel_adjustment_vector=voxel_adjustment_vector,
        )

        if verbose:
            print(
                f"--- h) Get the axon and dendrite stats: {time.time() - st}")
            st = time.time()

        #---- i) Calculating the synapse info ------
        syn_dict = syu.n_synapses_analysis_axon_dendrite(o_neuron_unalign,
                                                         verbose=True)

        # --- j) saving neuron and skeleton ----

        #4) Save the neuron object in a certain location
        file_name = f"{o_neuron_unalign.segment_id}_{split_index}_{decomposition_cell_type_hash}"
        file_name_decomp = f"{file_name}_{dataset}_cell_type_decomp"
        output_folder = str(target_dir_decomp)

        ret_file_path = o_neuron_unalign.save_compressed_neuron(
            output_folder=output_folder,
            file_name=file_name_decomp,
            return_file_path=True,
            export_mesh=False,
            suppress_output=True,
        )

        ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

        axon_skeleton = o_neuron_unalign.axon_skeleton
        file_name_decomp_sk_axon = f"{file_name_decomp}_axon_sk"
        ret_sk_filepath_ax = su.compressed_pickle(
            axon_skeleton,
            filename=file_name_decomp_sk_axon,
            folder=str(target_dir_sk),
            return_filepath=True)

        dendrite_skeleton = o_neuron_unalign.dendrite_skeleton
        file_name_decomp_sk_dendr = f"{file_name_decomp}_dendr_sk"
        ret_sk_filepath_dendr = su.compressed_pickle(
            dendrite_skeleton,
            filename=file_name_decomp_sk_dendr,
            folder=str(target_dir_sk),
            return_filepath=True)

        if verbose:
            print(f"neuron ret_file_path_str = {ret_file_path_str}")
            print(f"ret_sk_filepath_ax = {ret_sk_filepath_ax}")
            print(f"ret_sk_filepath_dendr = {ret_sk_filepath_dendr}")

        if verbose:
            print(f"--- i) saving neuron and skeleton ----")
            st = time.time()

        nucleus_info
        h01_e_i_cell_type = database_e_i_class

        database_cell_type_info

        baylor_e_i, baylor_cell_type_info
        e_i_class
        cell_type_used

        filtering_info, axon_angles_dict
        o_neuron_unalign

        dendrite_stats
        axon_stats
        stats_dict

        ret_file_path_str
        ret_sk_filepath_ax
        ret_sk_filepath_dendr

        # 7) make the insertions
        run_time = run_time = np.round(time.time() - global_time, 4)
        # -- decomp table --
        n_dict = dict(
            key.copy(),
            decomposition_split_method=decomposition_split_method,
            multiplicity=1,
            split_index=split_index,
            decomposition=str(ret_file_path_str),
            axon_skeleton=str(ret_sk_filepath_ax),
            dendrite_skeleton=str(ret_sk_filepath_dendr),

            #--- cell types
            h01_e_i_cell_type=database_e_i_class,
            cell_type=e_i_class,
            cell_type_used=cell_type_used,

            #----- synapses ---
            n_syn_pre=neuron_obj.n_synapses_pre,
            n_syn_post=neuron_obj.n_synapses_post,
            run_time=run_time,

            # statistics for the split
        )

        dicts_for_update = [
            nucleus_info, database_cell_type_info, filtering_info,
            axon_angles_dict, dendrite_stats, axon_stats, stats_dict,
            baylor_cell_type_info, filtering_info_low_branch, syn_dict
        ]

        for d in dicts_for_update:
            n_dict.update(d)

        print(f"n_dict = {n_dict}")

        for curr_obj in [self, SkeletonAxonDendrite]:

            curr_obj.insert1(n_dict,
                             allow_direct_insert=True,
                             ignore_extra_fields=True,
                             skip_duplicates=True)
            curr_obj.Object.insert1(n_dict,
                                    allow_direct_insert=True,
                                    ignore_extra_fields=True,
                                    skip_duplicates=True)
Esempio n. 6
0
    def make(self, key):
        """
        Pseudocode:
        1) Pull Down all the Neuron Objects associated with a segment_id
        
        For each neuron:
        2) Run the full axon preprocessing
        3) Save off the neuron
        4) Save dict entry to list
        
        
        5) Write the new entry to the table

        """

        # 1) Pull Down All of the Neurons
        segment_id = key["segment_id"]

        if verbose:
            print(f"------- Working on Neuron {segment_id} -----")

        whole_pass_time = time.time()

        #1) Pull Down all the Neuron Objects associated with a segment_id
        neuron_objs, neuron_split_idxs = du.decomposition_with_spine_recalculation(
            segment_id,
            ignore_DecompositionAxon=True,
            ignore_DecompositionCellType=True)

        if verbose:
            print(f"Number of Neurons found ={len(neuron_objs)}")

        #For each neuron:
        dict_to_write = []

        # -------- getting the nuclei info to match
        try:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMap4()
                                & dict(old_segment_id=segment_id)).fetch1()
        except:
            segment_map_dict = (minnie.AutoProofreadValidationSegmentMapInh()
                                & dict(old_segment_id=segment_id)).fetch1()
        nucleus_id = segment_map_dict["nucleus_id"]
        nuc_center_coords = du.nuclei_id_to_nucleus_centers(nucleus_id)

        nucleus_ids = [nucleus_id]
        nucleus_centers = [nuc_center_coords]

        print(f"nucleus_ids = {nucleus_ids}")
        print(f"nucleus_centers = {nucleus_centers}")

        for split_index, neuron_obj in zip(neuron_split_idxs, neuron_objs):

            if verbose:
                print(f"--> Working on Split Index {split_index} -----")

            st = time.time()

            # ------------- Does all of the processing -------------------

            #1) ------ Getting the paired nuclei ------
            winning_nucleus_id, nucleus_info = nru.pair_neuron_obj_to_nuclei(
                neuron_obj,
                "S0",
                nucleus_ids,
                nucleus_centers,
                nuclei_distance_threshold=15000,
                return_matching_info=True,
                verbose=True)

            # else:
            #     winning_nucleus_id = 12345
            #     nucleus_info = dict()
            #     nucleus_info["nucleus_id"] = winning_nucleus_id
            #     nucleus_info["nuclei_distance"] = 0
            #     nucleus_info["n_nuclei_in_radius"] = 1
            #     nucleus_info["n_nuclei_in_bbox"] = 1

            if verbose:
                print(f"nucleus_info = {nucleus_info}")
                print(f"winning_nucleus_id = {winning_nucleus_id}")

            #2) ------- Finding the Allen Cell Types -------
            allen_cell_type_info = ctu.allen_nuclei_classification_info_from_nucleus_id(
                winning_nucleus_id)
            if verbose:
                print(f"allen_cell_type_info = {allen_cell_type_info}")

            #3) ------ Axon Classification (and getting the axon features)------------------

            import axon_utils as au
            o_neuron, filtering_info, axon_angles_dict = au.complete_axon_processing(
                neuron_obj,
                add_synapses_and_head_neck_shaft_spines=True,
                validation=validation,
                plot_initial_axon=False,
                plot_axon_on_dendrite=False,
                return_filtering_info=True,
                return_axon_angle_info=True,
                verbose=verbose)
            filtering_info = {
                k: np.round(v, 2)
                for k, v in filtering_info.items()
                if "area" in k or "length" in k
            }

            axon_features = au.axon_features_from_neuron_obj(o_neuron)

            #3)------- Running the cell classification and stats--------------

            if verbose:
                print(
                    f"\n\n ------ Part C: Inhibitory Excitatory Classification ---- \n\n"
                )

            filter_time = time.time()

            #---- adding the synapses and spines data -----#
            limb_branch_dict = ctu.postsyn_branches_near_soma_for_syn_post_density(
                neuron_obj=o_neuron,
                verbose=False,
            )

            (syn_density_post, syn_density_head, syn_density_neck,
             syn_density_shaft,
             skeletal_length_processed_syn) = ctu.synapse_density_stats(
                 neuron_obj=o_neuron,
                 limb_branch_dict=limb_branch_dict,
                 verbose=True)

            (spine_density,
             skeletal_length_processed_spine) = ctu.spine_density_near_soma(
                 neuron_obj=o_neuron, verbose=True, multiplier=1000)

            if verbose:
                print(
                    f"Total time for density calculations = {time.time() - filter_time}"
                )

            baylor_cell_type_info = dict(
                syn_density_post=syn_density_post,
                syn_density_head=syn_density_head,
                syn_density_neck=syn_density_neck,
                syn_density_shaft=syn_density_shaft,
                skeletal_length_processed_syn=skeletal_length_processed_syn,
                spine_density=spine_density,
                skeletal_length_processed_spine=skeletal_length_processed_spine
            )

            # 4) ------ Predicting the E/I Group Based on the data collected --------
            ################ NEED TO INSERT CODE TO DO THIS ###########

            #5) ----- Deciding on cell type to use for axon
            e_i_class = inh_exc_class
            if inh_exc_class_to_use_for_axon == "Allen" and allen_cell_type_info[
                    "e_i"] is not None:
                e_i_class = allen_cell_type_info["e_i"]

            if verbose:
                print(
                    f"e_i_class = {e_i_class} with inh_exc_class_to_use_for_axon = {inh_exc_class_to_use_for_axon}"
                )

            # ------- Saving off the neuron object ----------------
            save_time = time.time()
            ret_file_path = o_neuron.save_compressed_neuron(
                output_folder=str(du.get_decomposition_path()),
                #output_folder = "./",
                file_name=
                f"{o_neuron.segment_id}_{split_index}_split_axon_v{au.axon_version}",
                return_file_path=True,
                export_mesh=False,
                suppress_output=True)

            ret_file_path_str = str(ret_file_path.absolute()) + ".pbz2"

            if verbose:
                print(f"ret_file_path_str = {ret_file_path_str}")
                print(f"Save time = {time.time() - save_time}")

            # ----------------

            n_dict = dict(
                key,
                split_index=split_index,
                axon_version=au.axon_version,
                decomposition=ret_file_path_str,
                run_time=np.round(time.time() - st, 2),
                cell_type=e_i_class,
            )

            dicts_for_update = [
                baylor_cell_type_info, allen_cell_type_info, nucleus_info,
                filtering_info, axon_features, axon_angles_dict
            ]

            for d in dicts_for_update:
                n_dict.update(d)

            self.insert1(n_dict,
                         skip_duplicates=True,
                         allow_direct_insert=True)

        print(
            f"\n\n ***------ Total time for {key['segment_id']} = {time.time() - whole_pass_time} ------ ***"
        )