示例#1
0
def run_flat_gradient(model_path, drop_list=None):
    mdata = c.ModelData(model_path)
    gpn = MoTypes(False).network_model()
    gpn.load(mdata.ModelDefinition, mdata.LastCheckpoint)
    flt_params = GlobalDefs.circle_sim_params.copy()
    flt_params["t_max"] = flt_params["t_min"]
    sim = MoTypes(False).rad_sim(gpn, std, **flt_params)
    sim.t_max = sim.t_min  # reset gradient to be flat
    sim.remove = drop_list
    evo_path = model_path + '/evolve/generation_weights.npy'
    evo_weights = np.load(evo_path)
    w = np.mean(evo_weights[-1, :, :], 0)
    sim.bf_weights = w
    return sim.run_simulation(GlobalDefs.n_steps, False)
示例#2
0
def compute_da_modulation(model_path, drop_list=None):
    with SimulationStore("sim_store.hdf5", std, MoTypes(False)) as sim_store:
        pos_ev = sim_store.get_sim_pos(model_path, "r", "bfevolve", drop_list)
    pos_flt = run_flat_gradient(model_path, drop_list)
    bs_ev = get_bout_starts(pos_ev)
    bs_flt = get_bout_starts(pos_flt)
    # get delta angle of each bout
    da_ev = get_bout_da(pos_ev, bs_ev)
    da_flt = get_bout_da(pos_flt, bs_flt)
    # get temperature at each bout start
    temp_ev = a.temp_convert(np.sqrt(np.sum(pos_ev[bs_ev.astype(bool), :2]**2, 1)), 'r')
    temp_flt = a.temp_convert(np.sqrt(np.sum(pos_flt[bs_flt.astype(bool), :2] ** 2, 1)), 'r')
    # get delta-temperature effected by each previous bout
    dt_ev = np.r_[0, np.diff(temp_ev)]
    dt_flt = np.r_[0, np.diff(temp_flt)]
    # only consider data above T_Preferred and away from the edge
    valid_ev = np.logical_and(temp_ev > GlobalDefs.tPreferred, temp_ev < GlobalDefs.circle_sim_params["t_max"]-1)
    valid_flt = np.logical_and(temp_flt > GlobalDefs.tPreferred, temp_flt < GlobalDefs.circle_sim_params["t_max"] - 1)
    da_ev = da_ev[valid_ev]
    da_flt = da_flt[valid_flt]
    dt_ev = dt_ev[valid_ev]
    dt_flt = dt_flt[valid_flt]
    # get turn magnitude for up and down gradient
    up_grad_ev = np.mean(np.abs(da_ev[dt_ev > 0.5]))
    dn_grad_ev = np.mean(np.abs(da_ev[dt_ev < -0.5]))
    up_grad_flt = np.mean(np.abs(da_flt[dt_flt > 0.5]))
    dn_grad_flt = np.mean(np.abs(da_flt[dt_flt < -0.5]))
    up_change = up_grad_ev / up_grad_flt
    dn_change = dn_grad_ev / dn_grad_flt
    return dn_change, up_change
示例#3
0
def get_cluster_assignments(mt: MoTypes, model_dir: str, regressors,
                            t_stimulus, std, droplist):
    """
    Creates a dictionary of cluster assignments for cells in t and m branch of a model
    :param mt: The model organism to use
    :param model_dir: The folder of the model checkpoint
    :param regressors: The cluster regressors
    :param t_stimulus: The temperature stimulus to use
    :param std: The standardizations
    :param droplist: Unit drop list
    :return: Dictionary with 't' and 'm' unit responses to stimulus
    """
    md = c.ModelData(model_dir)
    ml = mt.network_model()
    ml.load(md.ModelDefinition, md.LastCheckpoint)
    # prepend lead-in to stimulus
    lead_in = np.full(ml.input_dims[2] - 1, np.mean(t_stimulus[:10]))
    temp = np.r_[lead_in, t_stimulus]
    act_dict = ml.unit_stimulus_responses(temp, None, None, std, droplist)
    mpool = get_pool()
    ares = {
        k: [
            mpool.apply_async(get_best_fit, (ad, regressors))
            for ad in act_dict[k]
        ]
        for k in ['t', 'm']
    }
    retval = {k: np.vstack([ar.get() for ar in ares[k]]) for k in ares}
    return retval
示例#4
0
def compute_da_coherence(model_path, drop_list=None):
    with SimulationStore("sim_store.hdf5", std_zf, MoTypes(False)) as sim_store:
        pos_ev = sim_store.get_sim_pos(model_path, "r", "bfevolve", drop_list)
    bs_ev = get_bout_starts(pos_ev)
    # get delta angle of each bout
    da_ev = np.rad2deg(get_bout_da(pos_ev, bs_ev))
    # convert into appproximation of S, L and R behaviors
    bhv_ev = np.ones_like(da_ev)
    bhv_ev[da_ev < -10] = 2
    bhv_ev[da_ev > 10] = 3
    return a.turn_coherence(bhv_ev, 10), da_ev
示例#5
0
def do_simulation(path):
    """
    Uses a model identified by path to run a naive and a trained simulation
    :param path: The model path
    :return:
        [0]: The facing angle bin centers
        [1]: The occupancy of the naive model
        [2]: The occupancy of the trained model
    """
    global std_pt
    bins = np.linspace(-np.pi, np.pi, 100)
    # bin-centers in degress
    bcenters = bins[:-1] + np.diff(bins) / 2
    # naive simulation
    mdata = c.ModelData(path)
    model_naive = c.ZfGpNetworkModel()
    model_naive.load(mdata.ModelDefinition, mdata.FirstCheckpoint)
    model_trained = c.ZfGpNetworkModel()
    model_trained.load(mdata.ModelDefinition, mdata.LastCheckpoint)
    sim = MoTypes(False).pt_sim(model_naive, std_pt, 100)
    pos_naive = sim.run_simulation(GlobalDefs.n_steps)
    h_naive = a.bin_simulation_pt(pos_naive, bins)
    sim = MoTypes(False).pt_sim(model_trained, std_pt, 100)
    pos_trained = sim.run_simulation(GlobalDefs.n_steps)
    h_trained = a.bin_simulation_pt(pos_trained, bins)
    return bcenters, h_naive, h_trained
示例#6
0
def bin_simulation_pt(pos, bins: np.ndarray):
    """
    Bin simulation result facing angles
    :param pos: Position array obtained from running simulation
    :param bins: Array containing bin edges
    :return: Relative occupancy
    """
    quantpos = MoTypes(False).pt_sim.facing_angle(pos[:, 0], pos[:, 1], pos[:,
                                                                            2])
    # remap angles from -pi to pi
    quantpos[quantpos > np.pi] = quantpos[quantpos > np.pi] - 2 * np.pi
    quantpos[quantpos < -np.pi] = quantpos[quantpos < -np.pi] + 2 * np.pi
    h = np.histogram(quantpos, bins)[0].astype(float)
    h = h / h.sum()
    return h
示例#7
0
def compute_gradient_bout_frequency(model_path, drop_list=None):
    def bout_freq(pos: np.ndarray):
        r = np.sqrt(np.sum(pos[:, :2]**2, 1))  # radial position
        spd = np.r_[0, np.sqrt(np.sum(np.diff(pos[:, :2], axis=0) ** 2, 1))]  # speed
        bs = np.r_[0, np.diff(spd) > 0.00098]  # bout starts
        bins = np.linspace(0, 100, 6)
        bcenters = bins[:-1] + np.diff(bins)/2
        cnt_r = np.histogram(r, bins)[0]
        cnt_r_bs = np.histogram(r[bs > 0.1], bins)[0]
        bfreq = cnt_r_bs / cnt_r * GlobalDefs.frame_rate
        return bfreq, bcenters

    with SimulationStore("sim_store.hdf5", std, MoTypes(False)) as sim_store:
        pos_fixed = sim_store.get_sim_pos(model_path, "r", "trained", drop_list)
        pos_var = sim_store.get_sim_pos(model_path, "r", "bfevolve", drop_list)
    bf_fixed, bc = bout_freq(pos_fixed)
    bf_var, bc = bout_freq(pos_var)
    return bc, bf_fixed, bf_var
示例#8
0
def do_simulation(path, sim_type, run_ideal, drop_list=None):
    """
    Uses a model identified by path to run a naive and a trained and optionally an ideal and unit dropped simulation
    :param path: The model path
    :param sim_type: The simulation type to run
    :param run_ideal: If true, an ideal choice simulation will be run as well
    :param drop_list: If not none should be a list that will be fed to det_drop to determine which units are kept (1)
        or dropped (0)
    :return:
        [0]: The occupancy bin centers in degrees C
        [1]: The occupancy of the naive model
        [2]: The occupancy of the trained model
        [3]: The occupancy of the ideal choice model if run_ideal=True, None otherwise
        [4]: The occupancy of a unit dropped model if drop_list is provided, None otherwise
    """
    bins = np.linspace(0, GlobalDefs.circle_sim_params["radius"], 100)
    # bin-centers in degress
    bcenters = bins[:-1]+np.diff(bins)/2
    bcenters = temp_convert(bcenters, sim_type)
    if sim_type == "l":
        simdir = "x"
    else:
        simdir = "r"
    with SimulationStore("sim_store.hdf5", std, MoTypes(False)) as sim_store:
        pos_naive = sim_store.get_sim_pos(path, sim_type, "naive")
        h_naive = bin_simulation(pos_naive, bins, simdir)
        pos_trained = sim_store.get_sim_pos(path, sim_type, "trained")
        h_trained = bin_simulation(pos_trained, bins, simdir)
        if run_ideal:
            pos_ideal = sim_store.get_sim_pos(path, sim_type, "ideal")
            h_ideal = bin_simulation(pos_ideal, bins, simdir)
        else:
            h_ideal = None
        if drop_list is not None:
            pos_drop = sim_store.get_sim_pos(path, sim_type, "trained", drop_list)
            h_drop = bin_simulation(pos_drop, bins, simdir)
        else:
            h_drop = None
    return bcenters, h_naive, h_trained, h_ideal, h_drop
示例#9
0
    error_file.create_dataset("test_losses", data=np.array(test_losses))
    error_file.create_dataset("test_eval", data=np.array(test_steps))
    error_file.close()


if __name__ == '__main__':
    # load training and test data
    tD_1 = GradientData.load("gd_training_data.hdf5")
    tD_2 = GradientData.load("gd_training_data_rev.hdf5")
    tD_2.copy_normalization(tD_1)
    train_list = [tD_1, tD_2]
    testData = GradientData.load("gd_test_data_radial.hdf5")
    # enforce same scaling on testData as on trainingData
    testData.copy_normalization(tD_1)

    ana = a.Analyzer(MoTypes(False), tD_1.standards, None,
                     "activity_store.hdf5")

    # load cell unit ids and cluster ids
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()
    all_ids = []
    for i, p in enumerate(paths_512):
        cell_res, ids = ana.temperature_activity(mpath(p), temperature, i)
        all_ids.append(ids)
    all_ids = np.hstack(all_ids)
示例#10
0
               color="C1",
               n_boot=1000,
               condition="512 HU")
    epoch_times = np.linspace(0, test_time.max(), 10, endpoint=False)
    for e in epoch_times:
        ax.plot([e, e], [-.5, .1], 'k--', lw=0.25)
    ax.set_ylabel("log(Squared test error)")
    ax.set_xlabel("Training step")
    ax.set_xlim(-10000)
    ax.set_xticks([0, 100000, 200000, 300000, 400000])
    ax.legend()
    sns.despine(fig, ax)
    fig.savefig(save_folder + "pt_test_errors.pdf", type="pdf")

    std_zf = c.GradientData.load_standards("gd_training_data.hdf5")
    ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5",
                        "activity_store.hdf5")
    std_pt = c.GradientData.load_standards("photo_training_data.hdf5")

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # get cell responses
    all_cells_zf = []
    for i, p in enumerate(paths_512_zf):
 root.update()
 root.withdraw()
 print("Select model directory")
 model_dir = filedialog.askdirectory(title="Select directory with model checkpoints", initialdir="./model_data/")
 root.update()
 mdata = ModelData(model_dir)
 # load training data for scaling
 if mo_type == "z":
     std = GradientData.load_standards("gd_training_data.hdf5")
 else:
     std = GradientData.load_standards("ce_gd_training_data.hdf5")
 sim_type = ""
 while sim_type != "l" and sim_type != "r":
     sim_type = input("Please select either (l)inear or (r)adial simulation [l/r]:")
 if mo_type == "z":
     mot = MoTypes(False)
 else:
     mot = MoTypes(True)
 gpn_naive = mot.network_model()
 gpn_naive.load(mdata.ModelDefinition, mdata.FirstCheckpoint)
 gpn_trained = mot.network_model()
 gpn_trained.load(mdata.ModelDefinition, mdata.LastCheckpoint)
 if sim_type == "l":
     sim_type = "x"  # so we call run_simulation correctly later
     sim_naive = mot.lin_sim(gpn_naive, std, **GlobalDefs.lin_sim_params)
     sim_trained = mot.lin_sim(gpn_trained, std, **GlobalDefs.lin_sim_params)
 else:
     sim_naive = mot.rad_sim(gpn_naive, std, **GlobalDefs.circle_sim_params)
     sim_trained = mot.rad_sim(gpn_trained, std, **GlobalDefs.circle_sim_params)
 b_naive, h_naive = run_simulation(sim_naive, n_steps, False, sim_type)[1:]
 pos_trained, b_trained, h_trained = run_simulation(sim_trained, n_steps, False, sim_type)
示例#12
0
    """
    pca = PCA(20)
    pca.fit(cell_mat.T)
    cum_var = np.cumsum(pca.explained_variance_ratio_)
    return np.where(
        cum_var > v_c)[0][0] + 1  # index of first component is 0 in the array


if __name__ == "__main__":
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42

    var_cutoff = 0.99  # the fraction of variance to explain for complexity estimation

    std_zf = core.GradientData.load_standards("gd_training_data.hdf5")
    ana_zf = analysis.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5",
                               "activity_store.hdf5")
    std_ce = core.GradientData.load_standards("ce_gd_training_data.hdf5")
    ana_ce = analysis.Analyzer(MoTypes(True), std_ce, "ce_sim_store.hdf5",
                               "ce_activity_store.hdf5")
    ana_th = analysis.Analyzer(MoTypes(False), std_zf, "sim_store_tanh.hdf5",
                               "activity_store_tanh.hdf5")
    std_pt = core.GradientData.load_standards("photo_training_data.hdf5")

    # compute expected temperature mean and standard deviation for RL nets the same as during training
    ex1 = CircleRLTrainer(None, 100, 22, 37, 26)
    tm1 = ex1.t_mean
    s1 = ex1.t_std
    ex2 = CircleRLTrainer(None, 100, 14, 29, 26)
    tm2 = ex2.t_mean
    s2 = ex2.t_std
示例#13
0
    def sim_info(net_id):
        def bin_pos(all_pos):
            nonlocal sim_type
            nonlocal bins
            bin_centers = bins[:-1] + np.diff(bins) / 2
            if sim_type == "r":
                quantpos = np.sqrt(np.sum(all_pos[:, :2] ** 2, 1))
            else:
                quantpos = all_pos[:, 0]
            h = np.histogram(quantpos, bins)[0].astype(float)
            # normalize for radius if applicable
            if sim_type == "r":
                h /= bin_centers
            h /= h.sum()
            # convert bin_centers to temperature
            bin_centers = temp_convert(bin_centers, sim_type)
            return bin_centers, h

        def temp_error(all_pos):
            nonlocal sim_type
            if sim_type == "r":
                quantpos = np.sqrt(np.sum(all_pos[:, :2] ** 2, 1))
            else:
                quantpos = all_pos[:, 0]
            temp_pos = temp_convert(quantpos, sim_type)
            if sim_type == "r":
                # form a weighted average, considering points of larger radius less since just by
                # chance they will be visited more often
                weights = 1 / np.sqrt(np.sum(all_pos[:, :2]**2, 1))
                weights[np.isinf(weights)] = 0  # occurs when 0,0 was picked as starting point only
                sum_of_weights = np.nansum(weights)
                weighted_sum = np.nansum(np.sqrt((temp_pos - GlobalDefs.tPreferred)**2) * weights)
                return weighted_sum / sum_of_weights
            return np.mean(np.sqrt((temp_pos - GlobalDefs.tPreferred)**2))

        nonlocal sim_type
        nonlocal fish
        nonlocal non_fish
        fish_remove = create_det_drop_list(net_id, clust_ids, all_ids, fish)
        nonfish_remove = create_det_drop_list(net_id, clust_ids, all_ids, non_fish)
        shuff_remove = create_det_drop_list(net_id, clust_ids, all_ids, fish, True)
        with SimulationStore("sim_store.hdf5", std, MoTypes(False)) as sim_store:
            pos_naive, db_naive = sim_store.get_sim_debug(mpath(paths_512[net_id]), sim_type, "naive")
            pos_trained, db_trained = sim_store.get_sim_debug(mpath(paths_512[net_id]), sim_type, "bfevolve")
            pos_fish, db_fish = sim_store.get_sim_debug(mpath(paths_512[net_id]), sim_type, "bfevolve", fish_remove)
            pos_nonfish, db_nonfish = sim_store.get_sim_debug(mpath(paths_512[net_id]), sim_type, "bfevolve",
                                                              nonfish_remove)
        with SimulationStore(None, std, MoTypes(False)) as sim_store:  # don't store shuffle
            pos_shuff, db_shuff = sim_store.get_sim_debug(mpath(paths_512[net_id]), sim_type, "bfevolve", shuff_remove)
        bins = np.linspace(0, GlobalDefs.circle_sim_params["radius"], 100)
        bc, h_naive = bin_pos(pos_naive)
        e_naive = temp_error(pos_naive)
        h_trained = bin_pos(pos_trained)[1]
        e_trained = temp_error(pos_trained)
        h_fish = bin_pos(pos_fish)[1]
        e_fish = temp_error(pos_fish)
        h_nonfish = bin_pos(pos_nonfish)[1]
        e_nonfish = temp_error(pos_nonfish)
        h_shuff = bin_pos(pos_shuff)[1]
        e_shuff = temp_error(pos_shuff)
        return bc, {"naive": (h_naive, db_naive, e_naive), "trained": (h_trained, db_trained, e_trained),
                    "fish": (h_fish, db_fish, e_fish), "nonfish": (h_nonfish, db_nonfish, e_nonfish),
                    "shuffle": (h_shuff, db_shuff, e_shuff)}
示例#14
0
import h5py
from global_defs import GlobalDefs
from scipy.signal import convolve

# file definitions
base_path = "./model_data/Adam_1e-4/sepInput_mixTrain/"

paths_512 = [f + '/' for f in os.listdir(base_path) if "_3m512_" in f]

if __name__ == "__main__":
    save_folder = "./DataFigures/Figure2/"
    if not os.path.exists(save_folder):
        os.makedirs(save_folder)
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42
    mo = MoTypes(False)
    std = c.GradientData.load_standards("gd_training_data.hdf5")
    ana = a.Analyzer(mo, std, "sim_store.hdf5", "activity_store.hdf5")

    # for fish-like clusters - their indices
    fast_on_like = 4
    slow_on_like = 5
    fast_off_like = 1
    slow_off_like = 3

    # for clusters we identify in fish - their indices
    int_off = 2

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
示例#15
0
    error_file.create_dataset("test_rank_errors", data=np.array(test_errors))
    error_file.create_dataset("test_eval", data=np.array(test_steps))
    error_file.close()


if __name__ == '__main__':
    # load training and test data
    tD_1 = GradientData.load("ce_gd_training_data.hdf5")
    tD_2 = GradientData.load("ce_gd_training_data_rev.hdf5")
    tD_2.copy_normalization(tD_1)
    train_list = [tD_1, tD_2]
    testData = GradientData.load("ce_gd_test_data_radial.hdf5")
    # enforce same scaling on testData as on trainingData
    testData.copy_normalization(tD_1)

    ana = a.Analyzer(MoTypes(True), tD_1.standards, None,
                     "ce_activity_store.hdf5")

    # load cell unit ids and cluster ids
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()
    all_ids = []
    for i, p in enumerate(paths_512):
        cell_res, ids = ana.temperature_activity(mpath(p), temperature, i)
        all_ids.append(ids)
    all_ids = np.hstack(all_ids)
示例#16
0
# file definitions
base_path_zf = "./model_data/Adam_1e-4/sepInput_mixTrain/"
paths_512_zf = [f + '/' for f in os.listdir(base_path_zf) if "_3m512_" in f]

base_path_ce = "./model_data/CE_Adam_1e-4/"
paths_512_ce = [f + '/' for f in os.listdir(base_path_ce) if "_3m512_" in f]

if __name__ == "__main__":
    save_folder = "./DataFigures/FigureS5/"
    if not os.path.exists(save_folder):
        os.makedirs(save_folder)
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42

    std_zf = c.GradientData.load_standards("gd_training_data.hdf5")
    ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5",
                        "activity_store.hdf5")
    std_ce = c.GradientData.load_standards("ce_gd_training_data.hdf5")
    ana_ce = a.Analyzer(MoTypes(True), std_ce, "ce_sim_store.hdf5",
                        "ce_activity_store.hdf5")

    # load activity clusters from file
    clfile = h5py.File("cluster_info.hdf5", "r")
    clust_ids_zf = np.array(clfile["clust_ids"])
    clfile.close()
    clfile = h5py.File("ce_cluster_info.hdf5", "r")
    clust_ids_ce = np.array(clfile["clust_ids"])
    clfile.close()

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
示例#17
0
    sns.tsplot(bf_trained, centers, n_boot=1000, color="C1", err_style="ci_band", condition="Generation 0")
    sns.tsplot(bf_part, centers, n_boot=1000, color=(.5, .5, .5), err_style="ci_band", condition="Generation 8")
    sns.tsplot(bf_evolved, centers, n_boot=1000, color="C0", err_style="ci_band", condition="Generation 50")
    ax.set_xlim(23, 36)
    ax.set_xticks([25, 30, 35])
    ax.set_yticks([0.5, 0.75, 1, 1.25])
    ax.set_xlabel("Temperature [C]")
    ax.set_ylabel("Swim frequency [Hz]")
    ax.legend()
    sns.despine(fig, ax)
    fig.savefig(save_folder + "gradient_swim_frequency.pdf", type="pdf")

    # fourth panel - gradient distribution naive, trained, evolved
    bns = np.linspace(0, GlobalDefs.circle_sim_params["radius"], 100)
    centers = a.temp_convert(bns[:-1]+np.diff(bns), "r")
    ana = a.Analyzer(MoTypes(False), std, "sim_store.hdf5", None)
    naive = np.empty((len(paths_512), centers.size))
    trained = np.empty_like(naive)
    evolved = np.empty_like(naive)
    naive_errors = []
    trained_errors = []
    evolved_errors = []
    for i, p in enumerate(paths_512):
        pos_n = ana.run_simulation(mpath(p), "r", "naive")
        naive_errors.append(a.temp_error(pos_n, 'r'))
        naive[i, :] = a.bin_simulation(pos_n, bns, "r")
        pos_t = ana.run_simulation(mpath(p), "r", "trained")
        trained_errors.append(a.temp_error(pos_t, 'r'))
        trained[i, :] = a.bin_simulation(pos_t, bns, "r")
        pos_e = ana.run_simulation(mpath(p), "r", "bfevolve")
        evolved_errors.append(a.temp_error(pos_e, 'r'))
    row_matches = np.full(corr_mat.shape[0], -1)
    for ix, cm in enumerate(col_matches):
        if cm < 0:
            continue
        row_matches[cm] = ix
    return {ix: col_names[row_matches[ix]] if row_matches[ix] != -1 else ix for ix in range(corr_mat.shape[0])}


if __name__ == "__main__":
    save_folder = "./DataFigures/ZF_ANN_Correspondence/"
    if not os.path.exists(save_folder):
        os.makedirs(save_folder)
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42

    mo = MoTypes(False)
    std = c.GradientData.load_standards("gd_training_data.hdf5")
    ana = a.Analyzer(mo, std, "sim_store.hdf5", "activity_store.hdf5")

    # load zebrafish region results and create Rh56 regressor matrix for FastON, SlowON, FastOFF, SlowOFF
    result_labels = ["Rh6"]
    region_results = {}  # type: Dict[str, RegionResults]
    analysis_file = h5py.File('regiondata.hdf5', 'r')
    for rl in result_labels:
        region_results[rl] = pickle.loads(np.array(analysis_file[rl]))
    analysis_file.close()
    rh_56_calcium = region_results["Rh6"].regressors[:, :-1]
    # the names of these regressors according to Haesemeyer et al., 2018
    reg_names = ["Fast ON", "Slow ON", "Fast OFF", "Slow OFF"]

    # load and interpolate temperature stimulus
示例#19
0
     print("No standards found attempting to load full training data")
     std = GradientData.load("gd_training_data.hdf5").standards
 # plot radial sim results
 plot_sim("r")
 # load and interpolate temperature stimulus
 dfile = h5py.File("stimFile.hdf5", 'r')
 tsin = np.array(dfile['sine_L_H_temp'])
 x = np.arange(tsin.size)  # stored at 20 Hz !
 xinterp = np.linspace(0, tsin.size, tsin.size * GlobalDefs.frame_rate // 20)
 temperature = np.interp(xinterp, x, tsin)
 dfile.close()
 # for our 512 unit network extract all temperature responses and correponding IDs
 all_cells = []
 all_ids = []
 for i, d in enumerate(paths_512):
     with ActivityStore("activity_store.hdf5", std, MoTypes(False)) as act_store:
         cell_res, ids = act_store.get_cell_responses(mpath(d), temperature, i)
     all_cells.append(cell_res)
     all_ids.append(ids)
 all_cells = np.hstack(all_cells)
 all_ids = np.hstack(all_ids)
 # convolve all activity with the MTA derived nuclear Gcamp6s calcium kernel
 # we want to put network activity "on same footing" as imaging data
 tau_on = 1.4  # seconds
 tau_on *= GlobalDefs.frame_rate  # in frames
 tau_off = 2  # seconds
 tau_off *= GlobalDefs.frame_rate  # in frames
 kframes = np.arange(10 * GlobalDefs.frame_rate)  # 10 s long kernel
 kernel = 2**(-kframes / tau_off) * (1 - 2**(-kframes / tau_on))
 kernel = kernel / kernel.sum()
 # convolve with our kernel
示例#20
0
               color="C1",
               n_boot=1000,
               condition="512 HU")
    epoch_times = np.linspace(0, test_time.max(), 10, endpoint=False)
    for e in epoch_times:
        ax.plot([e, e], [-1.2, .4], 'k--', lw=0.25)
    ax.set_ylabel("log(Squared test error)")
    ax.set_xlabel("Training step")
    ax.set_xlim(-10000)
    ax.set_xticks([0, 250000, 500000])
    ax.legend()
    sns.despine(fig, ax)
    fig.savefig(save_folder + "ce_test_errors.pdf", type="pdf")

    std_zf = c.GradientData.load_standards("gd_training_data.hdf5")
    ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5",
                        "activity_store.hdf5")
    std_ce = c.GradientData.load_standards("ce_gd_training_data.hdf5")
    ana_ce = a.Analyzer(MoTypes(True), std_ce, "ce_sim_store.hdf5",
                        "ce_activity_store.hdf5")

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size,
                          tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # get activity data - corresponding to sine-wave
示例#21
0
def plot_sim_debug(path, sim_type, drop_list=None):
    """
    Runs indicated simulation on fully trained network, retrieves debug information and plots parameter correlations
    :param path: The model path
    :param sim_type: Either "r"adial or "l"inear
    :param drop_list: Optional list of vectors that indicate which units should be kept (1) or dropped (0)
    :return:
        [0]: The simulation positions
        [1]: The debug dict
    """
    with SimulationStore("sim_store.hdf5", std, MoTypes(False)) as sim_store:
        all_pos, db_dict = sim_store.get_sim_debug(path, sim_type, "trained", drop_list)
    ct = db_dict["curr_temp"]
    val = np.logical_not(np.isnan(ct))
    ct = ct[val]
    pred = db_dict["pred_temp"][val, :]
    selb = db_dict["sel_behav"][val]
    tru = db_dict["true_temp"][val, :]
    btypes = ["N", "S", "L", "R"]
    # plot counts of different behavior types
    fig, ax = pl.subplots()
    sns.countplot(selb, order=btypes)
    sns.despine(fig, ax)
    # for each behavior type, plot scatter of prediction vs. current temperature
    fig, axes = pl.subplots(2, 2)
    axes = axes.ravel()
    for i in range(4):
        axes[i].scatter(ct, pred[:, i], s=2)
        axes[i].set_xlabel("Current temperature")
        axes[i].set_ylabel("{0} prediction".format(btypes[i]))
        axes[i].set_title("r = {0:.2g}".format(np.corrcoef(ct, pred[:, i])[0, 1]))
    sns.despine(fig)
    fig.tight_layout()
    # for each behavior type, plot scatter of prediction vs.true outcome
    fig, axes = pl.subplots(2, 2)
    axes = axes.ravel()
    for i in range(4):
        axes[i].scatter(tru[:, i], pred[:, i], s=2)
        axes[i].set_xlabel("{0} tru outcome".format(btypes[i]))
        axes[i].set_ylabel("{0} prediction".format(btypes[i]))
        axes[i].set_title("r = {0:.2g}".format(np.corrcoef(tru[:, i], pred[:, i])[0, 1]))
    sns.despine(fig)
    fig.tight_layout()
    # Plot average rank errors binned by current temperature
    rerbins = 10
    avg_rank_errors = np.zeros(rerbins)
    ctb = np.linspace(ct.min(), ct.max(), rerbins+1)
    bincents = ctb[:-1] + np.diff(ctb)/2
    for i in range(rerbins):
        in_bin = np.logical_and(ct >= ctb[i], ct < ctb[i+1])
        pib = pred[in_bin, :]
        tib = tru[in_bin, :]
        errsum = 0
        for j in range(pib.shape[0]):
            p_ranks = np.unique(pib[j, :], return_inverse=True)[1]
            t_ranks = np.unique(tib[j, :], return_inverse=True)[1]
            errsum += np.sum(np.abs(p_ranks - t_ranks))
        avg_rank_errors[i] = errsum / pib.shape[0]
    fig, ax = pl.subplots()
    ax.plot(bincents, avg_rank_errors, 'o')
    ax.set_title("Avg. rank errors by temperature")
    ax.set_xlabel("Binned start temperature")
    ax.set_ylabel("Average rank error")
    sns.despine(fig, ax)
    return all_pos, db_dict
示例#22
0
    right = all_triggered_units[3]['t']
    units_turn = [l + r for (l, r) in zip(left, right)]
    return units_straight, units_turn


if __name__ == "__main__":
    save_folder = "./DataFigures/FigureS2/"
    if not os.path.exists(save_folder):
        os.makedirs(save_folder)
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42

    std_05Hz = c.GradientData.load_standards("gd_05Hz_training_data.hdf5")
    std_2Hz = c.GradientData.load_standards("gd_2Hz_training_Data.hdf5")
    std_zf = c.GradientData.load_standards("gd_training_data.hdf5")
    ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5", "activity_store.hdf5")

    # load cluster data from file
    clfile = h5py.File("cluster_info.hdf5", "r")
    clust_ids_zf = np.array(clfile["clust_ids"])
    clfile.close()

    # load and interpolate temperature stimulus
    dfile = h5py.File("stimFile.hdf5", 'r')
    tsin = np.array(dfile['sine_L_H_temp'])
    x = np.arange(tsin.size)  # stored at 20 Hz !
    xinterp = np.linspace(0, tsin.size, tsin.size * GlobalDefs.frame_rate // 20)
    temperature = np.interp(xinterp, x, tsin)
    dfile.close()

    # get activity data
示例#23
0
    fname = base_path_zf + path + "fl_tbranch_retrain/losses.hdf5"
    lossfile = h5py.File(fname, "r")
    rank_errors_t = np.array(lossfile["test_losses"])
    timepoints = np.array(lossfile["test_eval"])
    return timepoints, rank_errors_t, rank_errors_non_t


if __name__ == "__main__":
    save_folder = "./DataFigures/Figure3/"
    if not os.path.exists(save_folder):
        os.makedirs(save_folder)
    sns.reset_orig()
    mpl.rcParams['pdf.fonttype'] = 42

    std_zf = c.GradientData.load_standards("gd_training_data.hdf5")
    ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5",
                        "activity_store.hdf5")

    # for fish network clusters - their indices matched to plot colors to match Figure 2
    pal = sns.color_palette()  # the default matplotlib color cycle
    plot_cols_zf = {
        0: (0.6, 0.6, 0.6),
        1: pal[2],
        2: (102 / 255, 45 / 255, 145 / 255),
        3: pal[0],
        4: pal[3],
        5: pal[1],
        6: (0.6, 0.6, 0.6),
        7: (0.6, 0.6, 0.6),
        "naive": (0.0, 0.0, 0.0),
        "trained": (0.9, 0.9, 0.9),