def setUpClass(cls):
        """
        Define and compile the network for this test.
        """
        input_neuron = Neuron(parameters="r=0.0")

        output_neuron = Neuron(equations="""
                r = sum(p1) + sum(p2)
            """)

        syn_max = Synapse(psp="pre.r * w", operation="max")

        syn_min = Synapse(psp="pre.r * w", operation="min")

        syn_mean = Synapse(psp="pre.r * w", operation="mean")

        pop1 = Population((3, 3), neuron=input_neuron)
        pop2 = Population(4, neuron=output_neuron)

        proj1 = Projection(pop1, pop2, target="p1", synapse=syn_max)
        proj1.connect_all_to_all(weights=1.0)
        proj2 = Projection(pop1, pop2, target="p2", synapse=syn_min)
        proj2.connect_all_to_all(weights=1.0)
        proj3 = Projection(pop1, pop2, target="p3", synapse=syn_mean)
        proj3.connect_all_to_all(weights=1.0)

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
Beispiel #2
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(
            equations = "r = transfer_function(sum(exc), 0.0)",
            functions = "transfer_function(x, t) = if x > t: if x > 2*t : (x - 2*t)^2 else: x - t else: 0."
        )

        neuron2 = Neuron(
            equations = "r = glob_pos(sum(exc))"
        )

        synapse = Synapse(
            equations="w += hebb(pre.r, post.r)",
            functions="hebb(x, y) = x * y"
        )

        pop = Population(10, neuron)
        pop2 = Population(10, neuron2)
        proj = Projection(pop, pop, 'exc', synapse).connect_all_to_all(1.0)

        self.test_net = Network()
        self.test_net.add([pop, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)
        self.net_proj = self.test_net.get(proj)
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """)

        cov = Synapse(parameters="""
                tau = 5000.0
            """,
                      equations="""
                tau * dw/dt = (pre.r - mean(pre.r) ) * (post.r - mean(post.r) )
            """)

        pre = Population(6, neuron)
        post = Population(1, neuron)
        proj = Projection(pre, post, "exc",
                          synapse=cov).connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pre, post, proj])

        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(post)
Beispiel #4
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        simple_neuron = Neuron(
            parameters="r=1.0"
        ) 

        eq_set = Synapse(
            equations="""
                glob_var = 0.1 : projection
                semi_glob_var = 0.2 : postsynaptic
                w = t + glob_var + semi_glob_var
            """
        )

        pop0 = Population(3, simple_neuron)
        pop1 = Population(1, simple_neuron)

        proj = Projection(pop0, pop1, "exc", eq_set)
        proj.connect_all_to_all(weights=0.0)

        cls.test_net = Network()
        cls.test_net.add([pop0, pop1, proj])
        cls.test_net.compile(silent=True)
Beispiel #5
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="tau = 10", equations="r += 1/tau * t")

        neuron2 = Neuron(parameters="tau = 10: population",
                         equations="r += 1/tau * t: init = 1.0")

        Oja = Synapse(parameters="""
                tau = 5000.0 : postsynaptic
                alpha = 8.0
            """,
                      equations="""
                tau * dw/dt = pre.r * post.r - alpha * post.r^2 * w
            """)

        pop1 = Population(5, neuron)
        pop2 = Population(8, neuron2)

        proj = Projection(pre=pop1, post=pop2, target="exc", synapse=Oja)

        proj.connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pop1, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_proj = self.test_net.get(proj)
    def setUpClass(cls):
        """
        Compile the network for this test.

        The input_neuron will generate a sequence of values:

            r_t = [-1, 0, 2, 5, 9, 14, 20, ...]

        one time as global (glob_r) and one time as local variable (r).
        """
        input_neuron = Neuron(equations="""
                glob_r = glob_r + t : init = -1, population
                r = r + t : init = -1
            """)

        neuron2 = Neuron(equations="""
                r = sum(ff)
            """)

        synapse_glob = Synapse(psp="pre.glob_r * w")

        pop1 = Population((3), input_neuron)
        pop2 = Population((3), neuron2)

        # A projection with uniform delay
        proj = Projection(pre=pop1, post=pop2, target="ff")
        proj.connect_one_to_one(weights=1.0, delays=10.0)

        # A projection with uniform delay
        proj2 = Projection(pre=pop1,
                           post=pop2,
                           target="ff_glob",
                           synapse=synapse_glob)
        proj2.connect_one_to_one(weights=1.0, delays=10.0)

        # Build up network
        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj, proj2])
        cls.test_net.compile(silent=True)

        # Store references for easier usage in test cases
        cls.net_proj = cls.test_net.get(proj)
        cls.net_proj2 = cls.test_net.get(proj2)
        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
Beispiel #7
0
    name="Scaled Neuron",
    description="Like Linear Neuron with multiplicative attention modulation.")

#####################################################
##########  Synapse models   ########################
#####################################################
PostCovariance = Synapse(
    parameters="""
        tau = 15000.0 : projection
        tau_alpha = 1.0 : projection
        regularization_threshold = 3.5 : projection
        threshold_post = 0.0 : projection
        threshold_pre = 0.15 : projection
        alpha_factor = 15.0 : projection
    """,
    psp="w * pre.r",
    equations="""
        tau_alpha * dalpha/dt  + alpha =  pos(post.mp - regularization_threshold) * alpha_factor
        trace = (pre.r - mean(pre.r) - threshold_pre) * pos(post.r - mean(post.r) - threshold_post)
        delta = (trace - alpha*pos(post.r - mean(post.r) - threshold_post) * pos(post.r - mean(post.r) - threshold_post)*w)
        tau * dw/dt = delta : min=0
   """,
    name="Covariance learning rule",
    description=
    "Synaptic plasticity based on covariance, with an additional regularization term."
)

ReversedSynapse = Synapse(
    parameters="""
    """,
    psp="""
        w * pos(reversal - pre.r)
Beispiel #8
0
    """,
                      equations="""
        tau * dmp/dt + mp = (sum(exc) -sum(inh) + baseline + noise * Uniform(-1.0,1.0)) * (1 + sum(att))
        r = pos(mp)
    """)

#####################################################
##########  Synapse models   ########################
#####################################################
PostCovariance = Synapse(parameters="""
        tau = 15000.0 : projection
        tau_alpha = 1.0 : projection
        regularization_threshold = 3.5 : projection
        threshold_post = 0.0 : projection
        threshold_pre = 0.15 : projection
	alpha_factor = 15.0 : projection
    """,
                         equations="""
        tau_alpha * dalpha/dt  + alpha =  pos(post.mp - regularization_threshold) * alpha_factor
        trace = (pre.r - mean(pre.r) - threshold_pre) * pos(post.r - mean(post.r) - threshold_post)
        delta = (trace - alpha*pos(post.r - mean(post.r) - threshold_post) * pos(post.r - mean(post.r) - threshold_post)*w)
        tau * dw/dt = delta : min=0
   """)

ReversedSynapse = Synapse(parameters="""
    """,
                          psp="""
        w * pos(reversal - pre.r)
    """)

#DA_typ = 1  ==> D1 type  DA_typ = -1 ==> D2 type
DAPostCovarianceNoThreshold = Synapse(parameters="""
    equations="""
        svm = sum(vm)
        tau * dr /dt = -r + vFEFvm_m*sum(vm) - vSvm*max(svm) - vSFix*sum(fix) : min=minVis
        decision = if (r>Theta): id else: -1
    """,
    name="FEFm Neuron",
    description=
    "Neuron with excitation and suppression as inputs. If the rate exceeds a threshold, the id of the neuron can be read out.",
    extra_values=params)

##########################################
##########  SYNAPSE DEFINITION   #########
##########################################
StandardSynapse = Synapse(
    psp="w * pre.r",
    name="Standard",
    description=
    "Standard synapse, without plasticity which calculates the psp as a multiplication of weight and pre-synaptic rate."
)

##########################################
######### POPULATION DEFINITION  #########
##########################################
#Input_Pop = Population(params['resVisual'], FEFFix_Neuron, name='Image')
V1 = Population(params['V1_shape'], V1_Neuron, name='V1')
V4L4 = Population(params['V4L4_shape'], V4L4_Neuron, name='V4L4')
V4L23 = Population(params['V4L23_shape'], V4L23_Neuron, name='V4L23')
FEFv = Population(params['FEF_shape'], FEFv_Neuron, name='FEFv')
FEFvm = Population(params['FEFvm_shape'], FEFvm_Neuron, name='FEFvm')
FEFm = Population(params['FEF_shape'],
                  FEFm_Neuron,
                  name='FEFm',