示例#1
0
def find_most_active_overlap(session_1, session_2, percentile=50):
    mouse = session_list[session_1]["Animal"]

    assert mouse == session_list[session_2]["Animal"], \
        "Mouse names don't match!"

    s1_most_active = find_most_active(session_1, percentile)
    s2_most_active = find_most_active(session_2, percentile)

    match_map = load_cellreg_results(mouse)
    match_map = trim_match_map(match_map, [session_1, session_2])

    _, idx = ismember(match_map[:, 1], s2_most_active)
    s1_cells_active_on_s2 = match_map[idx, 0]
    s1_overlap_with_s2 = list(set(s1_cells_active_on_s2)
                              & set(s1_most_active))

    # Number of cells that were highly active on session 1 and also registered
    # to session 2.
    _, idx = ismember(match_map[:, 0], s1_most_active)
    n_cells = np.sum((match_map[idx, :] > -1).all(axis=1))

    n_overlap = len(s1_overlap_with_s2)
    percent_overlap = n_overlap / n_cells

    return percent_overlap
def concatenate_sessions(sessions,
                         include_homecage=False,
                         dtype='traces',
                         global_cell_idx=None):
    # Load cell map.
    mouse = session_list[sessions[0]]['Animal']
    match_map = cell_reg.load_cellreg_results(mouse)
    if global_cell_idx is not None:
        match_map = match_map[global_cell_idx]
    match_map = cell_reg.trim_match_map(match_map, sessions)

    neural_data = []
    all_t = []
    all_days = []
    all_freezing = []
    for idx, session in enumerate(sessions):
        # Only get these neurons.
        neurons = match_map[:, idx]

        if not include_homecage:
            data, t = load_and_trim(session, dtype=dtype, neurons=neurons)
            freezing, _ = load_and_trim(session, dtype='freezing')
            t -= t.min()

        else:
            if dtype == 'traces':
                data, t = ca_traces.load_traces(session)
                data = zscore(data, axis=1)
            elif dtype == 'events':
                data, t = ca_events.load_events(session)
            else:
                raise ValueError('Invalid data type.')

            ff_session = ff.load_session(session)
            freezing = ff_session.imaging_freezing

            data = data[neurons]

        all_freezing.extend(freezing)

        # Day index.
        day_idx = np.ones(t.size) * idx
        all_days.extend(day_idx)

        # Time stamps.
        all_t.extend(t)

        # Neural data.
        neural_data.append(data)

    neural_data = np.column_stack(neural_data)
    all_days = np.asarray(all_days)
    all_t = np.asarray(all_t)
    all_freezing = np.asarray(all_freezing)

    return neural_data, all_days, all_t, all_freezing
示例#3
0
def compute_matrices_across_days(session_1,
                                 session_2,
                                 bin_length=10,
                                 neurons=None,
                                 n_clusters=5,
                                 cluster_here=0):
    # Load cell map.
    mouse = session_list[session_1]["Animal"]
    assert mouse == session_list[session_2]["Animal"], "Mice don't match."
    match_map = cell_reg.load_cellreg_results(mouse)
    map_idx = cell_reg.find_match_map_index((session_1, session_2))

    # If neurons are specified, narrow down the list.
    if neurons is not None:
        global_cell_idx = cell_reg.find_cell_in_map(match_map, map_idx[0],
                                                    neurons)
        match_map = match_map[global_cell_idx]

    # Only take cells that persisted across the sessions.
    match_map = cell_reg.trim_match_map(match_map, map_idx)
    neurons_ref = match_map[:, 0]
    neurons_test = match_map[:, 1]

    # Do correlations on time slices.
    corr_matrices_1, t = do_sliced_correlation(session_1, bin_length,
                                               neurons_ref)
    corr_matrices_2, _ = do_sliced_correlation(session_2, bin_length,
                                               neurons_test)

    # Cluster correlation matrix at specified time slice.
    if n_clusters > 0:
        clusters, order = cluster_corr_matrices(corr_matrices_1, cluster_here,
                                                n_clusters, t)
    else:
        order = range(len(neurons_ref))
        clusters = None

    return corr_matrices_1, corr_matrices_2, order, clusters
示例#4
0
def cosine_distance_between_days(session_1, session_2, neurons=None):
    # Load cell map.
    mouse = session_list[session_1]["Animal"]
    assert mouse == session_list[session_2]["Animal"], "Mice don't match."
    match_map = cell_reg.load_cellreg_results(mouse)
    map_idx = cell_reg.find_match_map_index((session_1, session_2))

    # If neurons are specified, narrow down the list.
    if neurons is not None:
        global_cell_idx = cell_reg.find_cell_in_map(match_map, map_idx[0],
                                                    neurons)
        match_map = match_map[global_cell_idx]

    # Only take cells that persisted across the sessions.
    match_map = cell_reg.trim_match_map(match_map, map_idx)
    neurons_ref = match_map[:, 0]
    neurons_test = match_map[:, 1]

    corr_matrix_1 = pairwise_correlate_traces(session_1, neurons_ref)
    corr_matrix_2 = pairwise_correlate_traces(session_2, neurons_test)

    d = distance.cosine(corr_matrix_1.flatten(), corr_matrix_2.flatten())

    return d
示例#5
0
    # shuffled = []
    # for i in np.arange(100):
    #     accuracy = cross_session_NB(s1,s2,shuffle=True)
    #     shuffled.append(accuracy)
    #
    # accuracy = cross_session_NB(s1,s2)

    scores_events = np.zeros((len(session_1), 3))
    pvals_events = np.zeros((len(session_1), 3))
    scores_traces = np.zeros((len(session_1), 3))
    pvals_traces = np.zeros((len(session_1), 3))
    #S = ShockSequence(s1)

    for i, fc in enumerate(session_1):
        match_map = load_cellreg_results(session_list[fc]['Animal'])
        trimmed = trim_match_map(match_map, all_sessions[i])
        neurons = trimmed[:, 0]
        for j, ext in enumerate(session_2[i]):
            score, _, p_value = \
            cross_session_NB(fc, ext, bin_length=bin_length, predictor='events',
                              neurons=neurons)

            scores_events[i, j] = score
            pvals_events[i, j] = p_value

            score, _, p_value = \
            cross_session_NB(fc, ext, bin_length=bin_length, predictor='traces',
                             neurons=neurons)

            scores_traces[i, j] = score
            pvals_traces[i, j] = p_value