def training_schedule(self, training_time): address_gens = [nf.output(100, True, self.id_vectors[n].v, False) for n in self.G] stored_gens = [nf.output(100, True, self.hrr_vectors[n].v, False) for n in self.G] address_times = [training_time] * self.num_vectors stored_times = [training_time] * self.num_vectors address_func = nf.make_f(address_gens, address_times) stored_func = nf.make_f(stored_gens, stored_times) sim_time = sum(address_times) return (sim_time, address_func, stored_func)
def edge_testing_schedule(self, testing_time, num_tests, node_order=None): if node_order is not None: nodes = list(self.G) nodes = [nodes[i] for i in node_order] edges = [random.sample(list(self.G.edges_iter(n, data=True)), 1)[0] for n in nodes] else: edges = list(self.G.edges_iter(data=True)) edges = [edges[int(random.random() * len(edges))] for i in xrange(num_tests)] correct_vectors = [self.hrr_vectors[v].v for u,v,d in edges] input_vectors = [self.id_vectors[v].v for u,v,d in edges] testing_vectors = [self.hrr_vectors[u].convolve(~self.edge_vectors[d['index']]) for u,v,d in edges] testing_vectors = map(lambda x: x.v, testing_vectors) testing_gens = [nf.output(100, True, tv, False) for tv in testing_vectors] testing_times = [testing_time] * num_tests testing_func = nf.make_f(testing_gens, testing_times) sim_time = sum(testing_times) return (sim_time, testing_func, correct_vectors, input_vectors)
gens1 = [ nf.interpolator(1, ortho, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho, noisy_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, noisy_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho2, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho2, lambda x: np.true_divide(x, ttms)), ] times1 = [testing_time] * 6 phase1_input = nf.make_f(gens1, times1) # --- Build input for phase 2 gens2 = [ nf.output(100, True, training_vector, False), nf.output(100, True, np.zeros(dim), False), nf.interpolator(1, ortho, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho, noisy_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, noisy_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho2, training_vector, lambda x: np.true_divide(x, ttms)),
sim.run(.01) pre_decoders = sim.model.connections[0]._decoders #Make up our own encoders vocab = hrr.Vocabulary(dim, max_similarity=0.05) cleanup_encoders = [] for i in range(vector_n): cleanup_encoders.append(vocab.parse("x" + str(i)).v) cleanup_encoders = np.array(cleanup_encoders) times, gens = make_input(cleanup_encoders) cleanup_encoders = np.repeat(cleanup_encoders, cleanup_nperv, 0) # --- Make Input Function sim_length = sum(times) func = nf.make_f(gens, times) # ----- Make Nodes ----- inn = nengo.Node(output=func) cleanup = nengo.Ensemble(label='cleanup', neurons=nengo.LIF(cleanup_n), dimensions=dim, max_rates=max_rates * cleanup_n, intercepts=intercepts * cleanup_n, encoders=cleanup_encoders) post = nengo.Ensemble( label='post', neurons=nengo.LIF(ensemble_n), dimensions=dim,
def build_and_run(seed, dim, DperE, NperD, cleanup_n, address_vectors, stored_vectors, testing_vectors, training_time, testing_time, cleanup_params, ensemble_params, oja_learning_rate, oja_scale, pre_tau, post_tau, pes_learning_rate, **kwargs): random.seed(seed) num_ensembles = int(dim / DperE) dim = num_ensembles * DperE NperE = NperD * DperE total_n = NperE * num_ensembles ensemble_params['max_rates'] *= NperE ensemble_params['intercepts'] *= NperE cleanup_params['max_rates'] *= cleanup_n cleanup_params['intercepts'] *= cleanup_n address_gens = [nf.output(100, True, av, False) for av in address_vectors] address_gens += [nf.output(100, True, tv, False) for tv in testing_vectors] stored_gens = [nf.output(100, True, sv, False) for sv in stored_vectors] stored_gens += [nf.output(100, True, tv, False) for tv in testing_vectors] address_times = [training_time] * len(address_vectors) + [testing_time] * len(testing_vectors) stored_times = [training_time] * len(address_vectors) + [testing_time] * len(testing_vectors) address_func = nf.make_f(address_gens, address_times) stored_func = nf.make_f(stored_gens, stored_times) sim_time = sum(address_times) end_time = len(address_vectors) * training_time print "Building..." model = nengo.Model("Learn cleanup", seed=seed) # ----- Make Input ----- address_input = nengo.Node(output=address_func) stored_input = nengo.Node(output=stored_func) # ----- Build neural part ----- #cleanup = build_training_cleanup(dim, num_vectors, neurons_per_vector, intercept=intercept) cleanup = nengo.Ensemble(label='cleanup', neurons=nengo.LIF(cleanup_n), dimensions=dim, **cleanup_params) pre_ensembles, pre_decoders, pre_connections = \ build_cleanup_oja(model, address_input, cleanup, DperE, NperD, num_ensembles, ensemble_params, oja_learning_rate, oja_scale, end_time=end_time) output_ensembles, error_ensembles = build_cleanup_pes(cleanup, stored_input, DperE, NperD, num_ensembles, pes_learning_rate) gate = nengo.Node(output=lambda x: [1.0] if x > end_time else [0.0]) for ens in error_ensembles: nengo.Connection(gate, ens.neurons, transform=-10 * np.ones((NperE, 1))) # ----- Build probes ----- address_input_p = nengo.Probe(address_input, 'output') stored_input_p = nengo.Probe(stored_input, 'output') pre_probes = [nengo.Probe(ens, 'decoded_output', filter=0.1) for ens in pre_ensembles] cleanup_s = nengo.Probe(cleanup, 'spikes') output_probes = [nengo.Probe(ens, 'decoded_output', filter=0.1) for ens in output_ensembles] # ----- Run and get data----- print "Simulating..." sim = nengo.Simulator(model, dt=0.001) sim.run(sim_time) return locals()
def _simulate(fname, seed, dim, DperE, NperD , oja_scale, oja_learning_rate, pre_tau, post_tau, pes_learning_rate, cleanup_params, ensemble_params, cleanup_n, testing_time, training_time, encoder_similarity): random.seed(seed) hrr_num = 1 num_ensembles = int(dim / DperE) dim = num_ensembles * DperE NperE = NperD * DperE ttms = testing_time * 1000 #in ms print "Building..." model = nengo.Model("Network Array OJA", seed=seed) training_vector = np.array(hrr.HRR(dim).v) ortho = nf.ortho_vector(training_vector) ortho2 = nf.ortho_vector(training_vector) hrr_noise = nf.make_hrr_noise(dim, hrr_num) noisy_vector = hrr_noise(training_vector) print "HRR sim: ", np.dot(noisy_vector, training_vector) # --- Build input for phase 1 gens1 = [ nf.interpolator(1, ortho, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho, noisy_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, noisy_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho2, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho2, lambda x: np.true_divide(x, ttms)), ] times1 = [testing_time] * 6 phase1_input = nf.make_f(gens1, times1) # --- Build input for phase 2 gens2 = [ nf.output(100, True, training_vector, False), nf.output(100, True, np.zeros(dim), False), nf.interpolator(1, ortho, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho, noisy_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, noisy_vector, ortho, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, ortho2, training_vector, lambda x: np.true_divide(x, ttms)), nf.interpolator(1, training_vector, ortho2, lambda x: np.true_divide(x, ttms)), ] times2 = [0.9 * training_time , 0.1 * training_time] + [testing_time] * 6 phase2_input = nf.make_f(gens2, times2) orthos = [ortho] if cleanup_n > 1: orthos.append(ortho2) if cleanup_n > 2: orthos.extend([nf.ortho_vector(training_vector) for i in range(max(cleanup_n - 2, 0))]) p = encoder_similarity encoders = [p * training_vector + (1-p) * o for o in orthos] encoders = np.array([enc / np.linalg.norm(enc) for enc in encoders]) print [np.dot(enc,training_vector) for enc in encoders]
sim.run(.01) pre_decoders = sim.model.connections[0]._decoders #Make up our own encoders vocab = hrr.Vocabulary(dim, max_similarity=0.05) cleanup_encoders = [] for i in range(vector_n): cleanup_encoders.append(vocab.parse("x" + str(i)).v) cleanup_encoders = np.array(cleanup_encoders) times, gens = make_input(cleanup_encoders) cleanup_encoders = np.repeat(cleanup_encoders, cleanup_nperv, 0) # --- Make Input Function sim_length = sum(times) func = nf.make_f(gens, times) # ----- Make Nodes ----- inn = nengo.Node(output=func) cleanup = nengo.Ensemble(label='cleanup', neurons=nengo.LIF(cleanup_n), dimensions=dim, max_rates=max_rates * cleanup_n, intercepts=intercepts * cleanup_n, encoders=cleanup_encoders) post = nengo.Ensemble(label='post', neurons=nengo.LIF(ensemble_n), dimensions=dim, #max_rates=max_rates * ensemble_n, intercepts=[-.1] * ensemble_n) error = nengo.Ensemble(label='Error', neurons=nengo.LIF(ensemble_n), dimensions=dim) # ----- Make Connections ----- nengo.Connection(inn, pre)
def build_and_run(seed, dim, DperE, NperD, cleanup_n, address_vectors, stored_vectors, testing_vectors, training_time, testing_time, cleanup_params, ensemble_params, oja_learning_rate, oja_scale, pre_tau, post_tau, pes_learning_rate, **kwargs): random.seed(seed) num_ensembles = int(dim / DperE) dim = num_ensembles * DperE NperE = NperD * DperE total_n = NperE * num_ensembles ensemble_params['max_rates'] *= NperE ensemble_params['intercepts'] *= NperE cleanup_params['max_rates'] *= cleanup_n cleanup_params['intercepts'] *= cleanup_n address_gens = [nf.output(100, True, av, False) for av in address_vectors] address_gens += [nf.output(100, True, tv, False) for tv in testing_vectors] stored_gens = [nf.output(100, True, sv, False) for sv in stored_vectors] stored_gens += [nf.output(100, True, tv, False) for tv in testing_vectors] address_times = [training_time] * len( address_vectors) + [testing_time] * len(testing_vectors) stored_times = [training_time] * len( address_vectors) + [testing_time] * len(testing_vectors) address_func = nf.make_f(address_gens, address_times) stored_func = nf.make_f(stored_gens, stored_times) sim_time = sum(address_times) end_time = len(address_vectors) * training_time print "Building..." model = nengo.Model("Learn cleanup", seed=seed) # ----- Make Input ----- address_input = nengo.Node(output=address_func) stored_input = nengo.Node(output=stored_func) # ----- Build neural part ----- #cleanup = build_training_cleanup(dim, num_vectors, neurons_per_vector, intercept=intercept) cleanup = nengo.Ensemble(label='cleanup', neurons=nengo.LIF(cleanup_n), dimensions=dim, **cleanup_params) pre_ensembles, pre_decoders, pre_connections = \ build_cleanup_oja(model, address_input, cleanup, DperE, NperD, num_ensembles, ensemble_params, oja_learning_rate, oja_scale, end_time=end_time) output_ensembles, error_ensembles = build_cleanup_pes( cleanup, stored_input, DperE, NperD, num_ensembles, pes_learning_rate) gate = nengo.Node(output=lambda x: [1.0] if x > end_time else [0.0]) for ens in error_ensembles: nengo.Connection(gate, ens.neurons, transform=-10 * np.ones((NperE, 1))) # ----- Build probes ----- address_input_p = nengo.Probe(address_input, 'output') stored_input_p = nengo.Probe(stored_input, 'output') pre_probes = [ nengo.Probe(ens, 'decoded_output', filter=0.1) for ens in pre_ensembles ] cleanup_s = nengo.Probe(cleanup, 'spikes') output_probes = [ nengo.Probe(ens, 'decoded_output', filter=0.1) for ens in output_ensembles ] # ----- Run and get data----- print "Simulating..." sim = nengo.Simulator(model, dt=0.001) sim.run(sim_time) return locals()