def setUpClass(cls): """ Compile the network for this test """ def my_diagonal(pre, post, weight): synapses = CSR() for post_rk in post.ranks: pre_ranks = [] delays = [] if post_rk - 1 in pre.ranks: pre_ranks.append(post_rk - 1) if post_rk in pre.ranks: pre_ranks.append(post_rk) if post_rk + 1 in pre.ranks: pre_ranks.append(post_rk + 1) synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks), [0] * len(pre_ranks)) return synapses def my_diagonal_with_uniform_delay(pre, post, weight, delay): synapses = CSR() for post_rk in post.ranks: pre_ranks = [] delays = [] if post_rk - 1 in pre.ranks: pre_ranks.append(post_rk - 1) if post_rk in pre.ranks: pre_ranks.append(post_rk) if post_rk + 1 in pre.ranks: pre_ranks.append(post_rk + 1) synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks), [delay] * len(pre_ranks)) return synapses def my_diagonal_with_non_uniform_delay(pre, post, weight, delay): synapses = CSR() for post_rk in post.ranks: pre_ranks = [] delays = [] if post_rk - 1 in pre.ranks: pre_ranks.append(post_rk - 1) if post_rk in pre.ranks: pre_ranks.append(post_rk) if post_rk + 1 in pre.ranks: pre_ranks.append(post_rk + 1) synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks), delay.get_values(len(pre_ranks))) return synapses neuron = Neuron(equations="r = 1") neuron2 = Neuron(equations="r = sum(exc)") pop1 = Population(5, neuron) pop2 = Population(5, neuron2) proj1 = Projection(pre=pop1, post=pop2, target="exc") proj1.connect_with_func(method=my_diagonal, weight=0.1) proj2 = Projection(pre=pop1, post=pop2, target="exc2") proj2.connect_with_func(method=my_diagonal_with_uniform_delay, weight=0.1, delay=2) proj3 = Projection(pre=pop1, post=pop2, target="exc3") proj3.connect_with_func(method=my_diagonal_with_non_uniform_delay, weight=0.1, delay=DiscreteUniform(1, 5)) cls.test_net = Network() cls.test_net.add([pop1, pop2, proj1, proj2, proj3]) cls.test_net.compile(silent=True) cls.test_proj1 = cls.test_net.get(proj1) cls.test_proj2 = cls.test_net.get(proj2) cls.test_proj3 = cls.test_net.get(proj3)
# A difference of Gaussians is used as weight wPos = Gaussian2D(1.0, [13, 13], [3, 3]) wNeg = Gaussian2D(2.0, [13, 13], [1.0, 1.0]) wDoG = (positive(wPos - wNeg) / np.sum(positive(wPos - wNeg)))[:, :, None] #V4L23_V4L4SUR = Convolution(V4L23, V4L4, target='S_SUR')####################################NEW #V4L23_V4L4SUR.connect_filter(weights=wDoG, subsampling=ssList24, keep_last_dimension=True)##NEW ## Connection from V4 L2/3 to FEF visual (excitatory) # The auxiliary population is used to pool the down-sampled V4L23 Population. # Afterwards it could be up-sampled again. The combination of the two is # currently not possible in ANNarchy ssList2v = ssList24[9::params['V4L4_shape'][-1]] V4L23_AuxE = Pooling(V4L23, AuxE, target='exc', operation='max') V4L23_AuxE.connect_pooling(extent=(1, 1) + params['PFC_shape']) AuxE_FEFv = Projection(AuxE, FEFv, target='exc', synapse=StandardSynapse) AuxE_FEFv.connect_with_func(con_scale, factor=2, delays=params['FEFv_delay']) ## Connections from FEF visual to FEF visuo-motoric(excitatory and suppressive) # A lowered Gaussian is used to simulate the combined responses G = Gaussian2D(1.0, changed['RFsizev_vm'], changed['RFsigmav_vm']) v_vm_shape = (params['FEFvm_shape'][-1], 1, 1) wvvm = np.tile((G - params['vSv2'])[None, :, :], v_vm_shape) wvvm *= params['dogScalingFactor_FEFvm']**np.arange(6)[:, None, None] # The plus sign(+) is needed, so that wvvm will not be overwritten FEFv_FEFvmE = Convolution(FEFv, FEFvm, target='E_v') FEFv_FEFvmE.connect_filters(weights=positive(+wvvm)) FEFv_FEFvmS = Convolution(FEFv, FEFvm, target='S_v') FEFv_FEFvmS.connect_filters(weights=positive(-wvvm)) ## Connection from FEF visuo-motoric to V4 L4 (amplification) # The auxiliary population is used to pool FEFvm activities over different