コード例 #1
0
ファイル: stats.py プロジェクト: markuspleijzier/neuroboom
def postfocality_to_dendrogram_coloring(
    x: pd.DataFrame,
    p_val: float,
    neuron: navis.TreeNeuron
):

    """
    Function to take the results of synaptic focality tests and create colour dict for plotting
    """

    x_thresh = x[x.p_val < p_val].copy()
    partner_dict = dict(zip(x_thresh.partner_neuron, x_thresh.partner_type))

    # fetching synapse connections
    conn = nvneu.fetch_synapse_connections(target_criteria=neuron.id,
                                           source_criteria=x_thresh.partner_neuron.tolist())

    # filtering for highly probably synapses
    conn_thresh = conn[(conn.confidence_pre > 0.9) & (conn.confidence_post > 0.9)].copy()

    pal = sns.color_palette('turbo', len(partner_dict))
    pal_dict = dict(zip(partner_dict.keys(), pal))

    nodes_matched = nbm.match_connectors_to_nodes(conn_thresh, neuron, synapse_type='post')

    c2n = dict(zip(nodes_matched.connector, nodes_matched.bodyId_pre))
    c2color = {i: pal_dict[c2n[i]] for i in c2n.keys()}

    return(c2color, c2n, conn_thresh, partner_dict)
コード例 #2
0
ファイル: stats.py プロジェクト: markuspleijzier/neuroboom
def aba_postsyn_focality(
    neuron: navis.TreeNeuron,
    confidence_threshold: Tuple = (0.0, 0.0),
    n_iter: int = 100,
    syn_thresh: int = 1
):

    print('Fetching synaptic connections...')
    syn = nvneu.fetch_synapse_connections(target_criteria=neuron.id)

    print('Thresholding synapses by confidences...')
    syn = syn[(syn.confidence_pre > confidence_threshold[0]) & (syn.confidence_post > confidence_threshold[1])].copy()

    print('Thresholding synapses by synapse count...')
    count_dict = dict(Counter(syn.bodyId_pre).most_common())
    syn = syn[[count_dict[i] > syn_thresh for i in syn.bodyId_pre]].copy()

    print('Matching connections to nodes...')
    # syn_wmc = synaptic connections with connectors matched
    syn_wmc = nbm.match_connectors_to_nodes(syn, neuron, synapse_type='post')

    connector2node = dict(zip(neuron.connectors.connector_id, neuron.connectors.node_id))

    syn_wmc['node'] = syn_wmc.connector.map(connector2node).to_numpy()

    unique_usns = syn_wmc.bodyId_pre.unique()

    neuron_to_uNodes = {i: syn_wmc[syn_wmc.bodyId_pre == i].node.unique() for i in unique_usns}

    print('Calculating all by all geodesic matrix for nodes...')
    g_mat = navis.geodesic_matrix(neuron)

    df = pd.DataFrame()
    df['unique_ids'] = unique_usns
    T_obs_list = []
    An_list = []
    Bn_list = []
    rdsd_list = []

    print('Calculating T obs and drawing from random samples...')
    for i in unique_usns:

        T_obs, An, Bn = calculate_T_obs(neuron_id=i,
                                        neuron_to_node_dict=neuron_to_uNodes,
                                        gmat=g_mat)

        rdsd = random_draw_sample_dist(n_iter, g_mat, T_obs, An, Bn)

        T_obs_list.append(T_obs)
        An_list.append(An)
        Bn_list.append(Bn)
        rdsd_list.append(rdsd)

    df['T_obs'] = T_obs_list
    df['An'] = An_list
    df['Bn'] = Bn_list
    df['rdsd'] = rdsd_list

    return(df)
コード例 #3
0
ファイル: stats.py プロジェクト: markuspleijzier/neuroboom
def synaptic_focality_KS_test(
    x: navis.TreeNeuron,
    synapse_type: str = 'pre',
    confidence_threshold: Tuple = (0.9, 0.9)
):

    if synapse_type == 'pre':

        g_mat = navis.geodesic_matrix(x)

        syn = nvneu.fetch_synapse_connections(source_criteria=x.id)
        syn = syn[(syn.confidence_pre > confidence_threshold[0]) & (syn.confidence_post > confidence_threshold[1])].copy()
        syn = nbm.match_connectors_to_nodes(syn, x, synapse_type=synapse_type)

        df = pd.DataFrame()
        df['partner_id'] = syn.bodyId_post.unique()
        partner_gt = {}
        partner_statistic = {}
        partner_pval = {}

        for i, j in enumerate(df.partner_id):

            nodes = syn[syn.bodyId_post == j].node.tolist()

            truth_array = np.isin(g_mat.index, nodes)

            partner_geo_dist_vals = g_mat[truth_array].values.mean(axis=1)

            total_geo_dist_vals = g_mat[~truth_array].values.mean(axis=1)

            partner_gt[j] = partner_geo_dist_vals

            KS_test = ks_2samp(partner_geo_dist_vals, total_geo_dist_vals)

            partner_statistic[j] = KS_test.statistic

            partner_pval[j] = KS_test.pvalue

        df['gT'] = df.partner_id.map(partner_gt)
        df['KS statistic'] = df.partner_id.map(partner_statistic)
        df['KS pval'] = df.partner_id.map(partner_pval)
        df['n_syn'] = [len(i) for i in df.gT]


    elif synapse_type == 'post':

        g_mat = navis.geodesic_matrix(x)

        syn = nvneu.fetch_synapse_connections(target_criteria=x.id)
        syn = syn[(syn.confidence_pre > confidence_threshold[0]) & (syn.confidence_post > confidence_threshold[1])].copy()
        syn = nbm.match_connectors_to_nodes(syn, x, synapse_type=synapse_type)

        df = pd.DataFrame()
        df['partner_id'] = syn.bodyId_pre.unique()
        partner_gt = {}
        partner_statistic = {}
        partner_pval = {}

        for i, j in enumerate(df.partner_id):

            nodes = syn[syn.bodyId_pre == j].node.tolist()

            truth_array = np.isin(g_mat.index, nodes)

            partner_geo_dist_vals = g_mat[truth_array].values.mean(axis=1)

            total_geo_dist_vals = g_mat[~truth_array].values.mean(axis=1)

            partner_gt[j] = partner_geo_dist_vals

            KS_test = ks_2samp(partner_geo_dist_vals, total_geo_dist_vals)

            partner_statistic[j] = KS_test.statistic

            partner_pval[j] = KS_test.pvalue


        df['gT'] = df.partner_id.map(partner_gt)
        df['KS statistic'] = df.partner_id.map(partner_statistic)
        df['KS pval'] = df.partner_id.map(partner_pval)
        df['n_syn'] = [len(i) for i in df.gT]


    return(df)
コード例 #4
0
ファイル: stats.py プロジェクト: markuspleijzier/neuroboom
def presynapse_focality(
    x: Union[navis.TreeNeuron, navis.NeuronList],
    heal_fragmented_neuron: bool = False,
    confidence_threshold: tuple((float, float)) = (0.9, 0.9),
    num_synapses_threshold: int = 1,
):

    """
    Finds the connections that are downstream of 'x', where the presynpases of 'x' are focalised

    Parameters
    --------
    x:                       A matrix to perform DBSCAN on

    heal_fragmented_neuron:  bool
                             Whether to heal the neuron or not.
                             N.B. Its better to heal neurons during
                             import to save time in this function.

    connector_confidence:    tuple of floats
                             The confidence value used to threshold the synapses.
                             The first value (connector_confidence[0]) will be used to threshold presynapses
                             The second value (connector_confidence[1]) will be used to threshold postsynapses

    num_samples_threshold:   int
                             The minimum number of synapses a partner must have
                             to be included in the permutation test

    Returns
    --------
    synapse_connections:     A dataframe detailing the presynaptic connections
    df:                      A dataframe to be populated by the permutation test function

    Examples
    --------

    """

    x = check_valid_neuron_input(x)

    if heal_fragmented_neuron is True:

        x = navis.heal_fragmented_neuron(x)

    # Getting the connector table of synapses where x.id is the source
    synapse_connections = navis.interfaces.neuprint.fetch_synapse_connections(
        source_criteria=x.id
    )
    synapse_connections.astype(object)
    synapse_connections = nbm.match_connectors_to_nodes(synapse_connections,
                                                        x,
                                                        synapse_type='pre')

    truth_list = [
        True if len(np.unique(i)) > 1 else False
        for i in synapse_connections.node.values
    ]
    if synapse_connections[truth_list].shape[0] == 0:

        synapse_connections.node = [
            np.unique(k)[0] for k in synapse_connections.node.tolist()
        ]

    else:

        return "There are synapses associated with multiple nodes!!!!"

    synapse_connections = synapse_connections[
        synapse_connections.confidence_pre > confidence_threshold[0]
    ][synapse_connections.confidence_post > confidence_threshold[1]].copy()
    count = Counter(synapse_connections.bodyId_post.tolist())
    count = {
        k: v for k, v in sorted(count.items(), key=lambda item: item[1], reverse=True)
    }
    truth_list = [
        True if count[i] > num_synapses_threshold else False
        for i in synapse_connections.bodyId_post
    ]
    synapse_connections = synapse_connections[truth_list].copy()

    df = pd.DataFrame()
    df["partner_neuron"] = synapse_connections.bodyId_post.unique().tolist()
    df["gT"] = ""
    df["significance_val"] = ""
    df["p_val"] = ""
    df["num_syn"] = [count[i] for i in df.partner_neuron]

    return (synapse_connections, df)
コード例 #5
0
def find_connection_types(n: navis.TreeNeuron,
                          split: navis.NeuronList,
                          syn_con: pd.DataFrame,
                          synapse_type: str = 'pre',
                          metric: str = 'flow_centrality',
                          disable_progress: bool = False):

    syn_con = nbm.match_connectors_to_nodes(syn_con,
                                            n,
                                            synapse_type=synapse_type)

    n_copy = node_to_compartment_type(n, split)

    n2comp = dict(zip(n_copy.nodes.node_id, n_copy.nodes.compartment))

    if synapse_type == 'pre':

        syn_con['pre_node_type'] = syn_con.node.map(n2comp)

        ind_to_compart_post = {}

        for i in tqdm(syn_con.bodyId_post.unique(), disable=disable_progress):

            post_n = nvneu.fetch_skeletons(i, heal=True)[0]
            post_split = navis.split_axon_dendrite(n, metric=metric)
            post_n_copy = node_to_compartment_type(post_n, post_split)

            post_n2comp = dict(
                zip(post_n_copy.nodes.node_id, post_n_copy.nodes.compartment))
            sub = syn_con[syn_con.bodyId_post == i].copy()
            sub_matched = nbm.match_connectors_to_nodes(sub,
                                                        post_n_copy,
                                                        synapse_type='post')

            tmp_comp = [post_n2comp[i] for i in sub_matched.node]
            ind_comp = dict(zip(list(sub_matched.index), tmp_comp))

            ind_to_compart_post.update(ind_comp)

        syn_con['post_node_type'] = list(
            syn_con.index.map(ind_to_compart_post))

    elif synapse_type == 'post':

        syn_con['post_node_type'] = syn_con.node.map(n2comp)

        ind_to_compart_pre = {}

        for i in tqdm(syn_con.bodyId_pre.unique(), disable=disable_progress):

            pre_n = nvneu.fetch_skeletons(i, heal=True)[0]
            pre_split = navis.split_axon_dendrite(n, metric=metric)
            pre_n_copy = node_to_compartment_type(pre_n, pre_split)

            pre_n2comp = dict(
                zip(pre_n_copy.nodes.node_id, pre_n_copy.nodes.compartment))
            sub = syn_con[syn_con.bodyId_pre == i].copy()
            sub_matched = nbm.match_connectors_to_nodes(sub,
                                                        pre_n_copy,
                                                        synapse_type='pre')

            tmp_comp = [pre_n2comp[i] for i in sub_matched.node]
            ind_comp = dict(zip(list(sub_matched.index), tmp_comp))

            ind_to_compart_pre.update(ind_comp)

        syn_con['pre_node_type'] = list(syn_con.index.map(ind_to_compart_pre))

    syn_con['connection_type'] = syn_con[['pre_node_type',
                                          'post_node_type']].agg('-'.join,
                                                                 axis=1)

    return (syn_con)