def w_conc_gen(self, n): w = np.dot(conceptors[self.count], gen_w_rec(n)) self.count += 1 return w
sin_per = (2 * np.pi * 10) / t_len cos_per = (2 * np.pi * 20) / t_len sigs = [ lambda t: np.sin(sin_per*t), lambda t: 0.5*np.cos(cos_per*t), ] n_sigs = len(sigs) apert = np.ones(n_sigs) * 10 rate_data = np.zeros((n_sigs, t_steps, n_neurons)) pat_data = np.zeros((n_sigs, t_steps)) init_x = np.zeros((n_sigs, n_neurons)) w_rec = gen_w_rec(n_neurons) #neuron_type = nengo.LIFRate() neuron_type = TanhWithBias(seed=SEED) for i_s, sig in enumerate(sigs): # get the rate data with nengo.Network() as rate_acc: in_sig = nengo.Node(sig) sig_reserv = nengo.Ensemble(n_neurons, sig_dims, neuron_type=neuron_type, seed=SEED) nengo.Connection(sig_reserv.neurons, sig_reserv.neurons, transform=w_rec, synapse=0) nengo.Connection(in_sig, sig_reserv, synapse=None) p_rate = nengo.Probe(sig_reserv.neurons, synapse=None) p_pat = nengo.Probe(in_sig) with nengo.Simulator(rate_acc) as rate_sim:
# TODO: make sure these are actually periodic sigs = [ lambda t: np.sin(20*t), lambda t: 0.5*np.cos(80*t), lambda t: funky_sig(t) ] n_sigs = len(sigs) apert = np.ones(n_sigs) * 10 t_len = t_period*t_scale t_steps = int(t_len / dt) rate_data = np.zeros((n_sigs, t_len)) pat_data = np.zeros((n_sigs, t_len)) w_rec_base = gen_w_rec(n_neurons) def make_div_net(origin, w_func): sig_ens = [] with nengo.Network as d_n: for s_i in range(n_sigs): sig_ens.append(nengo.Ensemble(n_neurons, 1, seed=SEED)) nengo.Connection(origin.neurons, sig_ens[-1].neurons, transform=w_rec_base) nengo.Connection(sig_ens[-1].neurons, origin.neurons, transform=w_func(n_neurons)) return d_n, sig_ens for i_s, sig in enumerate(sigs): # get the rate data with nengo.Network() as rate_acc: in_sig = nengo.Node(sig)