示例#1
0
    def __init__(self, params, comm=None):
        self.params = params
        self.comm = comm
        self.n_speeds = params["n_speeds"]
        self.n_cycles = params["n_cycles"]
        self.n_directions = params["n_theta"]
        self.n_iterations_total = (
            self.params["n_theta"]
            * self.params["n_speeds"]
            * self.params["n_cycles"]
            * self.params["n_stim_per_direction"]
        )
        self.selected_conns = None
        self.n_time_steps = self.params["t_sim"] / self.params["dt_rate"]

        # distribute units among processors
        if comm != None:
            self.pc_id, self.n_proc = comm.rank, comm.size
            my_units = utils.distribute_n(params["n_exc"], self.n_proc, self.pc_id)
            self.my_units = range(my_units[0], my_units[1])
        else:
            self.my_units = range(self.params["n_exc"])
            self.pc_id, self.n_proc = 0, 1

        try:
            self.tuning_prop = np.loadtxt(self.params["tuning_prop_means_fn"])
        except:
            print "Tuning properties file not found: %s\n Will create new ones" % self.params["tuning_prop_means_fn"]
            self.tuning_prop = utils.set_tuning_prop(self.params, mode="hexgrid", cell_type="exc")
            np.savetxt(self.params["tuning_prop_means_fn"], self.tuning_prop)

        if comm != None:
            comm.barrier()

        self.initial_value = 1e-2  # should be around 1 / n_units per HC, i.e. 1. / (params['N_theta'] * params['N_V']
        self.eps = 0.1 * self.initial_value
        self.normalize = False  # normalize input within a 'hypercolumn'

        all_conns = []
        # distribute connections among processors
        for i in xrange(params["n_exc"]):
            for j in xrange(params["n_exc"]):
                if i != j:
                    all_conns.append((i, j))
        self.my_conns = utils.distribute_list(all_conns, n_proc, pc_id)

        # setup data structures
        self.my_conns = np.array(self.my_conns)
        np.savetxt("delme_my_conns_%d.txt" % self.pc_id, self.my_conns, fmt="%d\t%d")
        self.pre_ids = np.unique(self.my_conns[:, 0])
        self.post_ids = np.unique(self.my_conns[:, 1])
        self.gid_idx_map_pre = {}
        self.gid_idx_map_post = {}
        for i in xrange(self.pre_ids.size):
            self.gid_idx_map_pre[self.pre_ids[i]] = i
        for i in xrange(self.post_ids.size):
            self.gid_idx_map_post[self.post_ids[i]] = i
        self.my_selected_conns = []
示例#2
0
#    comm.barrier()
#normalize_input(params)
#times.append(time.time())

#exit(1)


if full_system: 
    # distribute the connections
    all_conns = []
    for i in xrange(n_cells):
        for j in xrange(n_cells):
            if i != j:
                all_conns.append((i, j))

    my_conns = utils.distribute_list(all_conns, n_proc, pc_id)
    output_fn = params['bcpnntrace_folder'] + 'pij_%d.dat' % (pc_id)
    input_fn_base = params['input_rate_fn_base']
    compute_my_pijs(my_conns, output_fn, tau_dict, input_fn_base)
    times.append(time.time())
    t_comp = times[-1] - times[0]
    print 'Computation time: %d sec = %.1f min' % (t_comp, t_comp / 60.)

else:
    my_conns = [(25, 81)]
#    my_conns = [(145, 300)]
    compute_pre_post_traces(my_conns)

    # traces computed above by Bcpnn.get_spiking_weight_and_bias
    zi_debug = np.loadtxt(params['ztrace_fn_base'] + "%d.dat" % my_conns[0][0])
    zj_debug = np.loadtxt(params['ztrace_fn_base'] + "%d.dat" % my_conns[0][1])
示例#3
0
文件: Bcpnn.py 项目: MinaKh/bcpnn-mt
def bcpnn_offline(params, connection_matrix, sim_cnt=0, pc_id=0, n_proc=1, save_all=False):
    """
    Arguments:
        params: parameter dictionary
        connection_matrix: two-dim numpy array storing cell-to-cell connections (only non-zero elements will be processed)
                            or
                           file name
        sim_cnt: int for recording to file

    This function does basically the same thing as the script bcpnn_offline.py
    """
    if (type(connection_matrix) == type('')):
        connection_matrix = np.load(connection_matrix)
    non_zeros = connection_matrix.nonzero()
    conns = zip(non_zeros[0], non_zeros[1])
    my_conns = utils.distribute_list(conns, n_proc, pc_id)

    n, m = connection_matrix.shape
    for i in xrange(len(my_conns)):
#    for i in xrange(2):
        pre_id = my_conns[i][0]
        post_id = my_conns[i][1]

        # extract the spike times from the file where all cells belonging to one minicolumn are stored
        # pre
        mc_index_pre = pre_id / params['n_exc_per_mc']
        fn_pre = params['exc_spiketimes_fn_base'] + str(pre_id) + '.ras'
        spklist_pre = nts.load_spikelist(fn_pre, range(params['n_exc_per_mc']), t_start=0, t_stop=params['t_sim'])
        spiketimes_pre = spklist_pre[pre_id % params['n_exc_per_mc']].spike_times # TODO: check: + 1 for NeuroTools 
        pre_trace = utils.convert_spiketrain_to_trace(spiketimes_pre, params['t_sim'] + 1) # + 1 is to handle spikes in the last time step

        # post
        mc_index_post = post_id / params['n_exc_per_mc']
        fn_post = params['exc_spiketimes_fn_base'] + str(post_id) + '.ras'
        spklist_post = nts.load_spikelist(fn_post, range(params['n_exc_per_mc']), t_start=0, t_stop=params['t_sim'])
        spiketimes_post = spklist_post[post_id % params['n_exc_per_mc']].spike_times# TODO: check: + 1 for NeuroTools 
        post_trace = utils.convert_spiketrain_to_trace(spiketimes_post, params['t_sim'] + 1)

        # compute
        wij, bias, pi, pj, pij, ei, ej, eij, zi, zj = get_spiking_weight_and_bias(pre_trace, post_trace)

        # update
        dw = (wij.max() - wij.min()) * params['dw_scale']
        print "DEBUG, updating weight[%d, %d] by %.1e to %.1e" % (pre_id, post_id, dw, connection_matrix[pre_id, post_id] + dw)
        connection_matrix[pre_id, post_id] += dw
        bias[post_id] = bias.max()
        
        ids_to_save = []
        if (save_all):
            ids_to_save = []

        if (save_all):
            # save
            output_fn = params['weights_fn_base'] + "%d_%d.npy" % (pre_id, post_id)
            np.save(output_fn, wij)

            output_fn = params['bias_fn_base'] + "%d.npy" % (post_id)
            np.save(output_fn, bias)

            output_fn = params['ztrace_fn_base'] + "%d.npy" % pre_id
            np.save(output_fn, zi)
            output_fn = params['ztrace_fn_base'] + "%d.npy" % post_id
            np.save(output_fn, zj)

            output_fn = params['etrace_fn_base'] + "%d.npy" % pre_id
            np.save(output_fn, ei)
            output_fn = params['etrace_fn_base'] + "%d.npy" % post_id
            np.save(output_fn, ej)
            output_fn = params['etrace_fn_base'] + "%d_%d.npy" % (pre_id, post_id)
            np.save(output_fn, eij)

            output_fn = params['ptrace_fn_base'] + "%d.npy" % pre_id
            np.save(output_fn, pi)
            output_fn = params['ptrace_fn_base'] + "%d.npy" % post_id
            np.save(output_fn, pj)
            output_fn = params['ptrace_fn_base'] + "%d_%d.npy" % (pre_id, post_id)
            np.save(output_fn, pij)

    print "debug", params['conn_mat_ee_fn_base'] + str(sim_cnt+1) + '.npy'
    np.savetxt(params['conn_mat_ee_fn_base'] + str(sim_cnt+1) + '.npy', connection_matrix)
    print "debug", params['bias_values_fn_base'] + str(sim_cnt+1) + '.npy'
    np.savetxt(params['bias_values_fn_base'] + str(sim_cnt+1) + '.npy', bias)

    return connection_matrix, bias