def build_network(): u_spec = leabra.UnitSpec(act_thr=0.5, act_gain=100, act_sd=0.005, g_bar_e=1.0, g_bar_l=0.1, g_bar_i=1.0, e_rev_e=1.0, e_rev_l=0.3, e_rev_i=0.25, avg_l_min=0.2, avg_l_init=0.4, avg_l_gain=2.5, adapt_on=False) # layers input0_layer = leabra.Layer(4, unit_spec=u_spec, name='input0_layer') input1_layer = leabra.Layer(4, unit_spec=u_spec, name='input1_layer') output_spec = leabra.LayerSpec(lay_inhib=False) output_layer = leabra.Layer(4, spec=output_spec, unit_spec=u_spec, name='output_layer') # connections conn_spec0 = leabra.ConnectionSpec(proj='1to1', lrule=None, rnd_mean=0.5, rnd_var=0.0, wt_scale_abs=1.0, wt_scale_rel=1.0) conn_spec1 = leabra.ConnectionSpec(proj='1to1', lrule=None, rnd_mean=0.5, rnd_var=0.0, wt_scale_abs=2.0, wt_scale_rel=1.0) conn0 = leabra.Connection(input0_layer, output_layer, spec=conn_spec0) conn1 = leabra.Connection(input1_layer, output_layer, spec=conn_spec1) # network network = leabra.Network( layers=[input0_layer, input1_layer, output_layer], connections=[conn0, conn1]) network.set_inputs({ 'input0_layer': [0.5, 0.95, 0.0, 0.25], 'input1_layer': [0.0, 0.5, 0.95, 0.75] }) return network
def build_leabra_network(n_input, n_output, n_hidden, hidden_sizes=None, training_flag=None, quarter_size=50): # specifications learning_rule = 'leabra' if training_flag is True else None unit_spec = leabra.UnitSpec(adapt_on=True, noisy_act=True) layer_spec = leabra.LayerSpec(lay_inhib=True) conn_spec = leabra.ConnectionSpec(proj='full', rnd_type='uniform', rnd_mean=0.75, rnd_var=0.2, lrule=learning_rule) # input/outputs input_layer = leabra.Layer(n_input, spec=layer_spec, unit_spec=unit_spec, name='input_layer') output_layer = leabra.Layer(n_output, spec=layer_spec, unit_spec=unit_spec, name='output_layer') # creating the required numbers of hidden layers and connections layers = [input_layer] connections = [] if isinstance(hidden_sizes, numbers.Number): hidden_sizes = [hidden_sizes] * n_hidden for i in range(n_hidden): if hidden_sizes is not None: hidden_size = hidden_sizes[i] else: hidden_size = n_input hidden_layer = leabra.Layer(hidden_size, spec=layer_spec, unit_spec=unit_spec, name='hidden_layer_{}'.format(i)) hidden_conn = leabra.Connection(layers[-1], hidden_layer, spec=conn_spec) layers.append(hidden_layer) connections.append(hidden_conn) last_conn = leabra.Connection(layers[-1], output_layer, spec=conn_spec) connections.append(last_conn) layers.append(output_layer) network_spec = leabra.NetworkSpec(quarter_size=quarter_size) network = leabra.Network(layers=layers, connections=connections, spec=network_spec) return network
def test_sig_inv(): conn_spec = leabra.ConnectionSpec() assert conn_spec.sig_inv(-1.0) == 0.0 assert conn_spec.sig_inv(0.0) == 0.0 assert conn_spec.sig_inv(0.5) == 0.5 assert conn_spec.sig_inv(1.0) == 1.0 assert conn_spec.sig_inv(2.0) == 1.0
def build_network(inhib, fixed_lrn_factor=None): if fixed_lrn_factor is not None: class UnitSpecFixedLrnFactor(leabra.UnitSpec): def avg_l_lrn(self, unit): return fixed_lrn_factor unitspec_class = UnitSpecFixedLrnFactor else: unitspec_class = leabra.UnitSpec u_spec = unitspec_class(act_thr=0.5, act_gain=100, act_sd=0.005, g_bar_e=1.0, g_bar_i=1.0, g_bar_l=0.1, e_rev_e=1.0, e_rev_i=0.25, e_rev_l=0.3, avg_l_min=0.2, avg_l_init=0.4, avg_l_gain=2.5, adapt_on=False) input_layer = leabra.Layer(1, unit_spec=u_spec, genre=leabra.INPUT, name='input_layer') for unit in input_layer.units: unit.log_names = log_names unit.logs = {name: [] for name in unit.log_names} output_spec = leabra.LayerSpec(lay_inhib=inhib, g_i=1.8, ff=1.0, fb=1.0, fb_dt=1 / 1.4, ff0=0.1) #FIXME fb_tau output_layer = leabra.Layer(1, spec=output_spec, unit_spec=u_spec, genre=leabra.OUTPUT, name='output_layer') for unit in output_layer.units: unit.log_names = log_names unit.logs = {name: [] for name in unit.log_names} conspec = leabra.ConnectionSpec(proj='full', lrule='leabra', lrate=0.04, m_lrn=1.0, rnd_mean=0.5, rnd_var=0.0) conn = leabra.Connection(input_layer, output_layer, spec=conspec) network = leabra.Network(layers=[input_layer, output_layer], connections=[conn]) network.set_inputs({'input_layer': [0.95]}) network.set_outputs({'output_layer': [0.95]}) return network
def build_network(n_input, n_output, n_hidden): # specifications unit_spec = leabra.UnitSpec(adapt_on=True, noisy_act=True) inpout_layer_spec = leabra.LayerSpec(lay_inhib=True, g_i=2.0, ff=1, fb=0.5) hidden_layer_spec = leabra.LayerSpec(lay_inhib=True, g_i=1.8, ff=1, fb=1) conn_spec = leabra.ConnectionSpec(proj='full', lrule='leabra', lrate=0.04, rnd_type='uniform', rnd_mean=0.50, rnd_var=0.25) # input/outputs input_layer = leabra.Layer(n_input, spec=inpout_layer_spec, unit_spec=unit_spec, genre=leabra.INPUT, name='input_layer') output_layer = leabra.Layer(n_output, spec=inpout_layer_spec, unit_spec=unit_spec, genre=leabra.OUTPUT, name='output_layer') # creating the required numbers of hidden layers and connections layers = [input_layer] connections = [] for i in range(n_hidden): hidden_layer = leabra.Layer(n_input, spec=hidden_layer_spec, unit_spec=unit_spec, genre=leabra.HIDDEN, name='hidden_layer_{}'.format(i)) hidden_conn = leabra.Connection(layers[-1], hidden_layer, spec=conn_spec) layers.append(hidden_layer) connections.append(hidden_conn) last_conn = leabra.Connection(layers[-1], output_layer, spec=conn_spec) connections.append(last_conn) layers.append(output_layer) network_spec = leabra.NetworkSpec(quarter_size=25) network = leabra.Network(layers=layers, connections=connections) return network
def test_emergent_layer(self): """Test quantitative equivalence with emergent on a basic layer inhibition project.""" emergent_data = data.parse_unit('layer_fffb.dat') unit_spec = leabra.UnitSpec(adapt_on=True, noisy_act=True, g_bar_e=0.3, g_bar_l=0.3, g_bar_i=1.0, act_thr=0.5, act_gain=40, act_sd=0.01) layer_spec = leabra.LayerSpec(g_i=0.4, ff=1.0, fb=0.5) connection_spec = leabra.ConnectionSpec(proj='1to1', rnd_mean=1.0, rnd_var=0.0) src_layer = leabra.Layer(10, spec=layer_spec, unit_spec=unit_spec) dst_layer = leabra.Layer(10, spec=layer_spec, unit_spec=unit_spec) connection0 = leabra.Connection(src_layer, dst_layer, spec=connection_spec) connection0.wt_scale_rel_eff = 1.0 # because we don't use the Network that initialize this # value here. input_pattern = 5 * [1.0, 0.0] for i in range(200): if ((i >= 10) and (i < 160)): src_layer.force_activity(input_pattern) else: src_layer.force_activity(10 * [0.0]) src_layer.cycle('minus') connection0.cycle() dst_layer.cycle('minus') self.assertTrue( quantitative_match(dst_layer.units[0].logs, emergent_data, rtol=2e-05, atol=0))
def test_emergent_layer(self): """Test quantitative equivalence with emergent on a basic layer inhibition project.""" emergent_data = data.parse_unit('layer_1.txt') unit_spec0 = leabra.UnitSpec(adapt_on=True, noisy_act=True) layer_spec0 = leabra.LayerSpec(g_i=0.4, ff=1.0, fb=0.5) connection_spec0 = leabra.ConnectionSpec(proj='1to1', rnd_mean=1.0, rnd_var=0.0) src_layer = leabra.Layer(10, spec=layer_spec0, unit_spec=unit_spec0) dst_layer = leabra.Layer(10, spec=layer_spec0, unit_spec=unit_spec0) connection0 = leabra.Connection(src_layer, dst_layer, spec=connection_spec0) input_pattern = 5 * [1.0, 0.0] for i in range(200): if ((i >= 10) and (i < 160)): src_layer.force_activity(input_pattern) else: src_layer.force_activity(10 * [0.0]) src_layer.cycle() connection0.cycle() dst_layer.cycle() check = True for name in dst_layer.units[0].logs.keys(): for t, (py, em) in enumerate( zip(dst_layer.units[0].logs[name], emergent_data[name])): if not np.allclose(py, em, rtol=1e-05, atol=1e-07): print('{}:{} [py] {:.10f} != {:.10f} [emergent]'.format( name, t, py, em)) check = False self.assertTrue(check)
def test_simple_usage(self): """Test the basic Network API""" input_layer = leabra.Layer(4, name='input_layer') output_spec = leabra.LayerSpec(g_i=1.5, ff=1, fb=0.5, fb_dt=1 / 1.4, ff0=0.1) output_layer = leabra.Layer(2, spec=output_spec, name='output_layer') conspec = leabra.ConnectionSpec(proj="full", lrule='leabra') conn = leabra.Connection(input_layer, output_layer, spec=conspec) network = leabra.Network(layers=[input_layer, output_layer], connections=[conn]) network.set_inputs({'input_layer': [1.0, 1.0, 0.0, 0.0]}) network.set_outputs({'output_layer': [1.0, 0.0]}) for _ in range(20): network.trial() self.assertTrue(True)
def build_network(n): log_names = ('net', 'I_net', 'v_m', 'act', 'v_m_eq', 'adapt', 'avg_ss', 'avg_s', 'avg_s_eff', 'avg_m', 'avg_l') u_spec = leabra.UnitSpec(act_thr=0.5, act_gain=100, act_sd=0.005, g_bar_e=1.0, g_bar_l=0.1, g_bar_i=1.0, e_rev_e=1.0, e_rev_l=0.3, e_rev_i=0.25, avg_l_min=0.2, avg_l_init=0.4, avg_l_gain=2.5, adapt_on=False) # layers layer_spec = leabra.LayerSpec(lay_inhib=False) input_layer = leabra.Layer(n, spec=layer_spec, unit_spec=u_spec, genre=leabra.INPUT, name='input_layer') hidden_layer = leabra.Layer(n, spec=layer_spec, unit_spec=u_spec, genre=leabra.HIDDEN, name='hidden_layer') output_layer = leabra.Layer(n, spec=layer_spec, unit_spec=u_spec, genre=leabra.OUTPUT, name='output_layer') for layer in [input_layer, hidden_layer, output_layer]: for unit in layer.units: unit.log_names = log_names unit.logs = {name: [] for name in unit.log_names} # connections weights = read_weights( os.path.join(os.path.dirname(__file__), 'emergent_projects/leabra_std{}.wts'.format(n))) inphid_conn_spec = leabra.ConnectionSpec(proj='full', lrule='leabra', lrate=0.04, rnd_mean=0.5, rnd_var=0.0, wt_scale_abs=1.0, wt_scale_rel=1.0) hidout_conn_spec = leabra.ConnectionSpec(proj='full', lrule='leabra', lrate=0.04, rnd_mean=0.5, rnd_var=0.0, wt_scale_abs=1.0, wt_scale_rel=1.0) inphid_conn = leabra.Connection(input_layer, hidden_layer, spec=inphid_conn_spec) inphid_conn.weights = weights[('Input', 'Hidden')] hidout_conn = leabra.Connection(hidden_layer, output_layer, spec=hidout_conn_spec) hidout_conn.weights = weights[('Hidden', 'Output')] # network network = leabra.Network( layers=[input_layer, hidden_layer, output_layer], connections=[inphid_conn, hidout_conn]) n_sqrt = int(round(np.sqrt(n))) network.set_inputs( {'input_layer': [0.95] * n_sqrt + [0.0] * (n - n_sqrt)}) network.set_outputs( {'output_layer': [0.0] * (n - n_sqrt) + [0.95] * n_sqrt}) # FIXME 0.95 -> 1.0 return network
def test_simple_pattern_learning(self): """Quantitative test on the pair of neurons scenario""" check = True for inhib in [False, True]: if inhib: emergent_data = data.parse_weights('neuron_pair_inhib.txt') else: emergent_data = data.parse_weights('neuron_pair.txt') u_spec = leabra.UnitSpec(act_thr=0.5, act_gain=100, act_sd=0.01, g_bar_e=1.0, g_bar_i=1.0, g_bar_l=0.1, e_rev_e=1.0, e_rev_i=0.25, e_rev_l=0.3, avg_l_min=0.2, avg_l_init=0.155, avg_l_max=1.5, adapt_on=False) input_layer = leabra.Layer(1, unit_spec=u_spec, name='input_layer') g_i = 1.5 if inhib else 0.0 output_spec = leabra.LayerSpec(g_i=g_i, ff=1.0, fb=0.5, fb_dt=1 / 1.4, ff0=0.1) output_layer = leabra.Layer(1, spec=output_spec, unit_spec=u_spec, name='output_layer') for u in output_layer.units: u.avg_l_lrn = 1.0 conspec = leabra.ConnectionSpec(proj='full', lrule='leabra', lrate=0.04, m_lrn=0.0, rnd_mean=0.5, rnd_var=0.0) conn = leabra.Connection(input_layer, output_layer, spec=conspec) network = leabra.Network(layers=[input_layer, output_layer], connections=[conn]) network.set_inputs({'input_layer': [0.95]}) network.set_outputs({'output_layer': [0.95]}) logs = {'wt': [], 'sse': []} for t in range(50): logs['wt'].append(conn.links[0].wt) sse = network.trial() logs['sse'].append(sse) for name in ['wt', 'sse']: for t, (py, em) in enumerate(zip(logs[name], emergent_data[name])): if not np.allclose(py, em, rtol=0, atol=1e-05): print( '{}:{:2d} [py] {:.10f} != {:.10f} [emergent] ({}inhib) diff={:g}' .format(name, t, py, em, '' if inhib else 'no ', py - em)) check = False self.assertTrue(check)