def test_learningrule_attr(seed): """Test learning_rule attribute on Connection""" def check_rule(rule, conn, rule_type): assert rule.connection is conn and rule.learning_rule_type is rule_type with nengo.Network(seed=seed): a, b, e = [nengo.Ensemble(10, 2) for i in range(3)] T = np.ones((10, 10)) r1 = PES() c1 = nengo.Connection(a.neurons, b.neurons, learning_rule_type=r1) check_rule(c1.learning_rule, c1, r1) r2 = [PES(), BCM()] c2 = nengo.Connection(a.neurons, b.neurons, learning_rule_type=r2, transform=T) assert isinstance(c2.learning_rule, list) for rule, rule_type in zip(c2.learning_rule, r2): check_rule(rule, c2, rule_type) r3 = dict(oja=Oja(), bcm=BCM()) c3 = nengo.Connection(a.neurons, b.neurons, learning_rule_type=r3, transform=T) assert isinstance(c3.learning_rule, dict) assert set(c3.learning_rule) == set(r3) # assert same keys for key in r3: check_rule(c3.learning_rule[key], c3, r3[key])
def test_learningruletypeparam(): """LearningRuleTypeParam must be one or many learning rules.""" class Test(object): lrp = LearningRuleTypeParam('lrp', default=None) inst = Test() assert inst.lrp is None inst.lrp = Oja() assert isinstance(inst.lrp, Oja) inst.lrp = [Oja(), Oja()] for lr in inst.lrp: assert isinstance(lr, Oja) # Non-LR no good with pytest.raises(ValueError): inst.lrp = 'a' # All elements in list must be LR with pytest.raises(ValueError): inst.lrp = [Oja(), 'a', Oja()]
learning_rule_type={"pes": nengo.PES()}) nengo.Connection(err, conn.learning_rule["pes"]) # Case 3: neurons -> ens conn = nengo.Connection(ens1.neurons, ens2, transform=np.ones((1, ens1.n_neurons)), learning_rule_type={"pes": nengo.PES()}) nengo.Connection(err, conn.learning_rule["pes"]) with Simulator(net) as sim: sim.run(0.01) @pytest.mark.parametrize('rule_type, solver', [ (BCM(learning_rate=1e-8), False), (Oja(learning_rate=1e-5), False), ([Oja(learning_rate=1e-5), BCM(learning_rate=1e-8)], False), ([Oja(learning_rate=1e-5), BCM(learning_rate=1e-8)], True), ]) def test_unsupervised(Simulator, rule_type, solver, seed, rng, plt): n = 200 m = nengo.Network(seed=seed) with m: u = nengo.Node(WhiteSignal(0.5, high=10), size_out=2) a = nengo.Ensemble(n, dimensions=2) b = nengo.Ensemble(n + 1, dimensions=2) nengo.Connection(u, a)
_test_pes(Simulator, nengo.LIF, plt, seed, pre_neurons=True, post_neurons=True, n=n, transform=initial_weights) def test_pes_neuron_ens(Simulator, plt, seed, rng): n = 200 initial_weights = rng.uniform(high=1e-4, size=(2, n)) _test_pes(Simulator, nengo.LIF, plt, seed, pre_neurons=True, post_neurons=False, n=n, transform=initial_weights) @pytest.mark.parametrize('learning_rule_type', [ BCM(learning_rate=1e-8), Oja(learning_rate=1e-5), [Oja(learning_rate=1e-5), BCM(learning_rate=1e-8)]]) def test_unsupervised(Simulator, learning_rule_type, seed, rng, plt): n = 200 m = nengo.Network(seed=seed) with m: u = nengo.Node(WhiteSignal(0.5, high=5), size_out=2) a = nengo.Ensemble(n, dimensions=2) b = nengo.Ensemble(n, dimensions=2) initial_weights = rng.uniform( high=1e-3, size=(a.n_neurons, b.n_neurons)) nengo.Connection(u, a)
not pre_neurons and not post_neurons and weight_solver ) with Simulator(net) as sim: assert ( any(op.tag == "PES:encode" for op in sim.model.operators) == apply_encoders ) sim.step() @pytest.mark.parametrize( "rule_type, solver", [ (BCM(learning_rate=1e-8), False), (Oja(learning_rate=1e-5), False), ([Oja(learning_rate=1e-5), BCM(learning_rate=1e-8)], False), ([Oja(learning_rate=1e-5), BCM(learning_rate=1e-8)], True), ], ) def test_unsupervised(Simulator, rule_type, solver, seed, rng, plt, allclose): n = 200 m = nengo.Network(seed=seed) with m: u = nengo.Node(WhiteSignal(0.5, high=10), size_out=2) a = nengo.Ensemble(n, dimensions=2) b = nengo.Ensemble(n + 1, dimensions=2) nengo.Connection(u, a) if solver:
train_test_proportion = train_images.shape[ 0 ] / test_images.shape[ 0 ] # inp = pyip.inputNum( prompt=f"Number of samples (current {args.train_samples}):", blank=True ) # if inp: # args.train_samples = inp # inp = pyip.inputNum( prompt=f"Number of neurons (current {args.neurons}):", blank=True ) # if inp: # args.neurons = inp # set parameters args.gain = 1e7 args.voltage = 1e-1 args.noisy = 0.01 if args.learning_rule == "Oja": lr_train = Oja( learning_rate=args.learning_rate, beta=args.beta ) if args.learning_rule == "mOja": lr_train = mOja( voltage=args.voltage, beta=args.beta, gain=args.gain, noisy=args.noisy ) simulation_loops = 10 random.seed = args.seed presentation_time = 0.35 if args.digits: train_images = np.array( [ x for i, x in enumerate( train_images ) if train_labels[ i ] in args.digits ] ) test_images = np.array( [ x for i, x in enumerate( test_images ) if test_labels[ i ] in args.digits ] ) train_labels = np.array(