Exemple #1
0
                        "ce_activity_store.hdf5")

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # get activity data - corresponding to sine-wave
    all_ids_zf = []
    all_cells_zf = []
    for i, p in enumerate(paths_512_zf):
        cell_res, ids = ana_zf.temperature_activity(mpath(base_path_zf, p),
                                                    temperature, i)
        all_ids_zf.append(ids)
        all_cells_zf.append(cell_res)
    all_ids_zf = np.hstack(all_ids_zf)
    all_cells_zf = np.hstack(all_cells_zf)
    all_ids_ce = []
    all_cells_ce = []
    for i, p in enumerate(paths_512_ce):
        cell_res, ids = ana_ce.temperature_activity(mpath(base_path_ce, p),
                                                    temperature, i)
        all_ids_ce.append(ids)
        all_cells_ce.append(cell_res)
    all_ids_ce = np.hstack(all_ids_ce)
    all_cells_ce = np.hstack(all_cells_ce)
Exemple #2
0
                        "activity_store.hdf5")
    std_pt = c.GradientData.load_standards("photo_training_data.hdf5")

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # get cell responses
    all_cells_zf = []
    for i, p in enumerate(paths_512_zf):
        cell_res, ids = ana_zf.temperature_activity(mpath(base_path_zf, p),
                                                    temperature, i)
        all_cells_zf.append(cell_res)
    all_cells_zf = np.hstack(all_cells_zf)
    all_cells_pt = []
    for p in paths_512_pt:
        all_cells_pt.append(
            get_cell_responses(mpath(base_path_pt, p), temperature))
    all_cells_pt = np.hstack(all_cells_pt)

    # convolve activity with nuclear gcamp calcium kernel
    tau_on = 1.4  # seconds
    tau_on *= GlobalDefs.frame_rate  # in frames
    tau_off = 2  # seconds
    tau_off *= GlobalDefs.frame_rate  # in frames
    kframes = np.arange(10 * GlobalDefs.frame_rate)  # 10 s long kernel
Exemple #3
0
    traces[traces < 1] = np.nan
    traces[traces < 2] = 0
    traces[traces > 1] = 30
    mag = np.nanmean(traces, 0)
    return p_move, p_bout, mag


if __name__ == "__main__":
    save_folder = "./DataFigures/FigureS1/"
    if not os.path.exists(save_folder):
        os.makedirs(save_folder)
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42

    # Example evolution on one network
    p = mpath(base_path_zf, paths_512_zf[0])
    evol_p = p + "/evolve/"
    errors = np.load(evol_p + "generation_errors.npy")
    weights = np.load(evol_p + "generation_weights.npy")
    # Panel: Error progression
    fig, ax = pl.subplots()
    ax.errorbar(np.arange(50),
                np.mean(errors, 1),
                np.std(errors, 1),
                linestyle='None',
                marker='o',
                color="C1")
    ax.errorbar(49,
                np.mean(errors, 1)[49],
                np.std(errors, 1)[49],
                linestyle='None',
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # generate calcium kernel
    tau_on = 1.4  # seconds
    tau_on *= GlobalDefs.frame_rate  # in frames
    tau_off = 2  # seconds
    tau_off *= GlobalDefs.frame_rate  # in frames
    kframes = np.arange(10 * GlobalDefs.frame_rate)  # 10 s long kernel
    kernel = 2**(-kframes / tau_off) * (1 - 2**(-kframes / tau_on))
    kernel = kernel / kernel.sum()

    complexity_dict = {"n_components": [], "net type": [], "trained": []}

    for i, p in enumerate(paths_512_zf):
        model_path = mpath(base_path_zf, p)
        cell_res = ana_zf.temperature_activity(model_path, temperature, i)[0]
        convolve_cell_responses(cell_res, kernel)
        comp_trained = complexity(cell_res, var_cutoff)
        cell_res = get_cell_responses_predictive(model_path, temperature,
                                                 std_zf, False)
        convolve_cell_responses(cell_res, kernel)
        comp_naive = complexity(cell_res, var_cutoff)
        print("ZF Temp network: Trained: {0}, Naive: {1}".format(
            comp_trained, comp_naive))
        complexity_dict["n_components"].append(comp_trained)
        complexity_dict["trained"].append(True)
        complexity_dict["n_components"].append(comp_naive)
        complexity_dict["trained"].append(False)
        complexity_dict["net type"] += ["ZF Temp"] * 2
Exemple #5
0
    # compute expected temperature mean and standard deviation - the same as during training
    ex1 = CircleRLTrainer(None, 100, 22, 37, 26)
    tm1 = ex1.t_mean
    s1 = ex1.t_std
    ex2 = CircleRLTrainer(None, 100, 14, 29, 26)
    tm2 = ex2.t_mean
    s2 = ex2.t_std
    t_mean = (tm1 + tm2) / 2
    t_std = (s1 + s2) / 2

    # Plot gradient errors during training
    rewards_given = []
    grad_errors = []
    for p in paths_rl:
        try:
            l_file = h5py.File(mpath(base_path_rl, p) + "/losses.hdf5", 'r')
        except IOError:
            continue
        rewards_given.append(np.array(l_file["rewards_given"]))
        grad_errors.append(np.array(l_file["ep_avg_grad_error"]))
        l_file.close()
    # find max reward number
    max_reward = max([np.cumsum(rg)[-1] for rg in rewards_given])
    ip_given = np.arange(max_reward, step=5000)
    grad_errors = np.vstack([np.interp(ip_given, np.cumsum(rg), gaussian_filter1d(ge, 10)) for (rg, ge)
                             in zip(rewards_given, grad_errors)])

    fig, ax = pl.subplots()
    sns.tsplot(grad_errors, ip_given, ax=ax)
    ax.plot([786680, 786680], [2.0, 5.0], 'k--')
    ax.plot([786680, 1.7e7], [2.4, 2.4], 'k--')
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # load stack types
    dfile = h5py.File("stack_types.hdf5", 'r')
    stack_types = np.array(dfile["stack_types"])[no_nan_aa]
    dfile.close()

    # get activity data
    all_cells = []
    all_ids = []
    for i, p in enumerate(paths_512):
        cell_res, ids = ana.temperature_activity(mpath(base_path, p),
                                                 temperature, i)
        all_cells.append(cell_res)
        all_ids.append(ids)
    all_cells = np.hstack(all_cells)
    all_ids = np.hstack(all_ids)

    # convolve activity with nuclear gcamp calcium kernel
    tau_on = 1.4  # seconds
    tau_on *= GlobalDefs.frame_rate  # in frames
    tau_off = 2  # seconds
    tau_off *= GlobalDefs.frame_rate  # in frames
    kframes = np.arange(10 * GlobalDefs.frame_rate)  # 10 s long kernel
    kernel = 2**(-kframes / tau_off) * (1 - 2**(-kframes / tau_on))
    kernel = kernel / kernel.sum()
    # convolve with our kernel