Exemplo n.º 1
0
def compare_decoder():
    import matplotlib.pyplot as plt
    data_file = files['wt'][2]
    lever = load_mat(data_file['response'])
    y = InterpolatedUnivariateSpline(lever.axes[0], lever.values[0])(
        data_file['spike']['y'])[1:]
    X = data_file['spike']['data'][:, 1:]
    bounds: Bounds = (tuple(np.quantile(y, [0.001, 0.999])), (-2, 1), (-5, 5)
                      )  # type: ignore
    print("mutual info:")
    plt.plot(y, color=COLORS[0], alpha=0.5, label="trajectory")
    path_hats = dict()
    powers = dict()
    for color, (name, decoder) in zip(
            COLORS[1:],
        (("particle", particle.decoder_factory(bounds)),
         ("kalman", kalman.decoder_factory(bounds)),
         ("linear", linear.decoder_factory(bounds)),
         ("svr", svr.decoder_factory(SVR('rbf', 3, 1E-7, cache_size=1000))))):
        path_hat, power = cross_predict(X, y, decoder)
        info = mutual_info(y, path_hat)
        path_hats[name] = path_hat
        powers[name] = power
        plt.plot(path_hat,
                 color=color,
                 alpha=0.5,
                 label="{}: {}".format(name, info))
    plt.legend()
Exemplo n.º 2
0
def cross_predict(X: np.ndarray,
                  y: np.ndarray,
                  predictor: Decoder,
                  fold: int = 10,
                  section_mi: bool = True) -> np.ndarray:
    """Calculate the prediction by taking 1 - 1 / {fold} as learning samples and predict the rest 1 / {fold} samples,
    do it {fold} times and concatenate the results.
    Args:
        X: all predictor samples, [feature x sample]
        y: [sample] real data to be predicted
        predictor: a function for prediction, takes [X_learn, y_learn, X_test] -> y_test_hat
        fold: number of folds
        section_mi: whether returns section by section mi
    """
    assert X.shape[1] == y.shape[
        0], f"X ({X.shape}) and y ({y.shape}) must have the sample sample size"
    length = X.shape[1]
    sections = np.c_[np.arange(fold), np.arange(1, fold + 1)] * length // fold
    end_all = sections[-1, -1]
    predicted = list()
    power = list()
    for start, end in sections:
        X_learn = np.c_[X[:, 0:start], X[:, end:end_all]]
        y_learn = np.r_[y[0:start], y[end:end_all]]
        X_test, y_test = X[:, start:end], y[start:end]
        hat = predictor(X_learn, y_learn, X_test)
        predicted.append(hat)
        if section_mi:
            power.append(mutual_info(y_test, hat))
    return ((np.hstack(predicted),
             np.array(power)) if section_mi else np.hstack(predicted))
Exemplo n.º 3
0
def null_dist(path_hat, y):
    res = [
        mutual_info(np.random.permutation(path_hat), y)
        for _ in tqdm(range(1000))
    ]
    np.savez_compressed(join(res_folder, "svr_power_wt2_perm.npz"),
                        result=np.array(res))
    plt.hist(res, 50)
Exemplo n.º 4
0
 def pred(gamma, C):
     hat = cross_predict(X,
                         y,
                         svr.predictor_factory(y,
                                               gamma=10**gamma,
                                               C=C,
                                               epsilon=1E-3),
                         section_mi=False)
     return mutual_info(y, hat)
Exemplo n.º 5
0
def decoder_power(data_file: File,
                  predictor_factory: Callable[[np.ndarray], Decoder]) -> float:
    lever = load_mat(data_file['response'])
    y = InterpolatedUnivariateSpline(lever.axes[0], lever.values[0])(
        data_file['spike']['y'])[1:]
    X = data_file['spike']['data'][:, 1:]
    decoder = predictor_factory(y)
    lever_hat, powers = cross_predict(X, y, decoder)
    return mutual_info(lever_hat, y)
Exemplo n.º 6
0
def test_mutual_info():
    r = 0.6
    sqrtm_r = (1 - np.sqrt(1 - r * r)) / r
    np.random.seed(12345)
    res = list()
    for _ in range(1000):
        a, b = np.dot([[1, sqrtm_r], [sqrtm_r, 1]], np.random.randn(2, 1000))
        res.append(ee.mutual_info(a[:, np.newaxis], b[:, np.newaxis]))
    real_info = -(0.5 * np.log2(1 - r**2))
    assert (abs(np.mean(res) - real_info) < 1E-2)
Exemplo n.º 7
0
def null_fit(data_file):
    lever = load_mat(data_file['response'])
    y = InterpolatedUnivariateSpline(lever.axes[0], lever.values[0])(
        data_file['spike']['y'])[1:]
    X = data_file['spike']['data'][:, 1:]
    res = list()
    for _ in range(200):
        y_perm = np.random.permutation(y.copy())
        y_hat_perm, _ = cross_predict(X, y_perm, svr.predictor_factory(y_perm))
        res.append(mutual_info(y_perm, y_hat_perm))
    np.savez_compressed(join(project_folder, "report", "measure", ""), res=res)
Exemplo n.º 8
0
def mutual(size, mean, cov, sample_no=100, ci=(0.025, 0.975)):
    # type: (int, np.ndarray, np.ndarray, int, Tuple[float, float]) -> Tuple[float, Tuple[float, float]]
    np.random.seed(12345)
    ent = [
        ee.mutual_info(*(x.reshape(-1, 1)
                         for x in mn(mean, cov, size).T[0:2, :]))
        for _ in range(sample_no)
    ]
    ent = np.sort(ent)
    return np.mean(ent), (ent[int(ci[0] * sample_no)],
                          ent[int(ci[1] * sample_no)])
Exemplo n.º 9
0
def show_traces():
    data_file = files['wt'][0]
    lever = load_mat(data_file['response'])
    values = devibrate(lever.values[0], sample_rate=lever.sample_rate)
    y = InterpolatedUnivariateSpline(lever.axes[0],
                                     values)(data_file['spike']['y'])[1:]
    X = data_file['spike']['data'][:, 1:]
    lever_hat, powers = cross_predict(X, y, svr.predictor_factory(y))
    lever_hat = svr.predictor_factory(y)(X, y, X)
    plt.plot(y, color='blue')
    plt.plot(lever_hat, color='red')
    print("mutual info: ", mutual_info(y, lever_hat))
Exemplo n.º 10
0
def examine_saline(data_file):
    data_file = dredd_files['cno'][5]
    lever = load_mat(data_file['response'])
    values = devibrate(lever.values[0], sample_rate=lever.sample_rate)
    y = InterpolatedUnivariateSpline(lever.axes[0],
                                     values)(data_file['spike']['y'])[1:]
    X = data_file['spike']['data'][:, 1:]
    decoder = svr.predictor_factory(y, gamma=3E-7, C=11, epsilon=1E-3)
    single_power = [
        mutual_info(y,
                    cross_predict(x[newaxis, :], y, decoder, section_mi=False))
        for x in X
    ]
    hat_0 = cross_predict(X, y, decoder, section_mi=False)
    mask = np.greater_equal(single_power, sorted(single_power)[-20])
    hat_1 = cross_predict(X[mask, :], y, decoder, section_mi=False)
    plt.plot(y, color='blue')
    hat = decoder(X, y, X)
    plt.plot(hat, color='green')
    plt.plot(hat_0, color='red')
    plt.plot(hat_1, color='orange')
    print("hat_1: ", mutual_info(hat_1, y), " hat_0: ", mutual_info(hat_0, y))
Exemplo n.º 11
0
 def svr_power(data_file: File,
               neuron_no: int = 20) -> Tuple[float, List[float]]:
     lever = load_mat(data_file['response'])
     values = devibrate(lever.values[0], sample_rate=lever.sample_rate)
     y = InterpolatedUnivariateSpline(lever.axes[0],
                                      values)(data_file['spike']['y'])[1:]
     X = data_file['spike']['data'][:, 1:].copy()
     decoder = svr.predictor_factory(y, gamma=3E-9, C=12, epsilon=1E-3)
     single_power = [
         cross_predict(x[newaxis, :], y, decoder,
                       section_mi=True)[1].mean() for x in X
     ]
     mask = np.greater_equal(single_power, sorted(single_power)[-neuron_no])
     path_hat, _ = cross_predict(X[mask, :], y, decoder)
     return mutual_info(y, path_hat), single_power
Exemplo n.º 12
0
def run_amp_power(data_file: File) -> Tuple[float, float, float, float]:
    """Try to decode the max lever trajectory amplitude of each trial.
    Returns:
        pre_amp_power: mutual info between predicted (from neuron activity before motor onset)
            and real amplitude of trials in one session
        post_amp_power: mutual info between predicted (from neuron activity before motor onset)
            and real amplitude of trials in one session
    """
    VALIDATE_FOLD = 10
    lever = get_trials(data_file, motion_params)
    neuron = DataFrame.load(data_file['spike'])
    resampled_onsets = np.rint(lever.trial_anchors *
                               (5 / 256)).astype(np.int_) - 3
    folded = np.stack(
        [take_segment(trace, resampled_onsets, 6) for trace in neuron.values])
    mask, filtered = devibrate_trials(lever.values, motion_params['pre_time'])
    mask &= np.any(folded > 0, axis=(0, 2))
    amp = filtered[mask, 25:64].max(axis=1) - filtered[mask, 0:15].mean(axis=1)
    speed = np.diff(filtered[mask, 5:50], axis=1).max(axis=1)
    svr_rbf = SVR('rbf', 3, 1E-7, cache_size=1000)
    X = folded[:, mask, 0:3].swapaxes(0, 1).reshape(mask.sum(), -1)
    pre_amp_hat = cross_predict(
        X.T, amp, lambda x, y, y_t: svr_rbf.fit(x.T, y).predict(y_t.T),
        VALIDATE_FOLD, False)
    pre_v_hat = cross_predict(
        X.T, speed, lambda x, y, y_t: svr_rbf.fit(x.T, y).predict(y_t.T),
        VALIDATE_FOLD, False)
    X = folded[:, mask, 3:].swapaxes(0, 1).reshape(mask.sum(), -1)
    post_amp_hat = cross_predict(
        X.T, amp, lambda x, y, y_t: svr_rbf.fit(x.T, y).predict(y_t.T),
        VALIDATE_FOLD, False)
    post_v_hat = cross_predict(
        X.T, speed, lambda x, y, y_t: svr_rbf.fit(x.T, y).predict(y_t.T),
        VALIDATE_FOLD, False)
    return (mutual_info(pre_amp_hat, amp), mutual_info(post_amp_hat, amp),
            mutual_info(pre_v_hat, speed), mutual_info(post_v_hat, speed))
Exemplo n.º 13
0
def neuron_info(spike_trajectory: Tuple[DataFrame, SparseRec],
                svr_params: Dict[str, float]) -> np.ndarray:
    """Give the prediction power of individual neurosn on push trajectory predicted in a rbf SVR."""
    spike, trajectory = spike_trajectory
    y = trajectory.values
    X = spike.values
    svr = SVR('rbf', **svr_params)
    y_hat_list = [
        svr.fit(n.reshape(-1, 1), y).predict(n.reshape(-1, 1)) for n in X
    ]
    # y_real, y_hat_array = list(), list()
    # for X_tr, y_tr, X_te, y_te in split_time_series(X, y, 10):
    #     y_real.append(y_te)
    #     y_hat_array.append([svr.fit(n_tr.reshape(-1, 1), y_tr).predict(n_te.reshape(-1, 1))
    #                         for n_tr, n_te in zip(X_tr, X_te)])
    # y_te = np.hstack(y_real)
    # y_hat_list = [np.hstack(x) for x in zip(*y_hat_array)]
    single_powers = np.array([mutual_info(y_hat, y) for y_hat in y_hat_list])
    return single_powers
Exemplo n.º 14
0
def decode_power(spike_trajectory: Tuple[DataFrame, SparseRec],
                 y_hat: np.ndarray) -> float:
    y = spike_trajectory[1].values[0:y_hat.shape[0]]
    return mutual_info(y, y_hat)