예제 #1
0
    def setUpClass(cls):
        """
        Define and compile the network for this test.
        """
        input_neuron = Neuron(parameters="r=0.0")

        output_neuron = Neuron(equations="""
                r = sum(p1) + sum(p2)
            """)

        syn_max = Synapse(psp="pre.r * w", operation="max")

        syn_min = Synapse(psp="pre.r * w", operation="min")

        syn_mean = Synapse(psp="pre.r * w", operation="mean")

        pop1 = Population((3, 3), neuron=input_neuron)
        pop2 = Population(4, neuron=output_neuron)

        proj1 = Projection(pop1, pop2, target="p1", synapse=syn_max)
        proj1.connect_all_to_all(weights=1.0)
        proj2 = Projection(pop1, pop2, target="p2", synapse=syn_min)
        proj2.connect_all_to_all(weights=1.0)
        proj3 = Projection(pop1, pop2, target="p3", synapse=syn_mean)
        proj3.connect_all_to_all(weights=1.0)

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
예제 #2
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(
            equations = "r = transfer_function(sum(exc), 0.0)",
            functions = "transfer_function(x, t) = if x > t: if x > 2*t : (x - 2*t)^2 else: x - t else: 0."
        )

        neuron2 = Neuron(
            equations = "r = glob_pos(sum(exc))"
        )

        synapse = Synapse(
            equations="w += hebb(pre.r, post.r)",
            functions="hebb(x, y) = x * y"
        )

        pop = Population(10, neuron)
        pop2 = Population(10, neuron2)
        proj = Projection(pop, pop, 'exc', synapse).connect_all_to_all(1.0)

        self.test_net = Network()
        self.test_net.add([pop, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)
        self.net_proj = self.test_net.get(proj)
예제 #3
0
    def setUpClass(cls):
        """
        Compile the network for this test.

        The input_neuron will generate a sequence of values:

            r_t = [-1, 0, 2, 5, 9, 14, 20, ...]
        """
        input_neuron = Neuron(equations="""
                r = r + t : init = -1
            """)

        neuron2 = Neuron(equations="""
                r = sum(ff)
            """)

        pop1 = Population((3), input_neuron)
        pop2 = Population((3), neuron2)

        # A projection with non-uniform delay
        proj = Projection(pop1, pop2, target="ff")
        proj.connect_one_to_one(weights=1.0, delays=Uniform(1, 5))

        # Build up network
        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj])
        cls.test_net.compile(silent=True)

        # Store references for easier usage in test cases
        cls.net_proj = cls.test_net.get(proj)
        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
예제 #4
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        neuron = Neuron(equations="r = 1")

        neuron2 = Neuron(equations="r = sum(exc)")

        pop1 = Population((3, 3), neuron)
        pop2 = Population((3, 3), neuron2)

        proj1 = Projection(pre=pop1, post=pop2, target="exc")
        proj2 = Projection(pre=pop1, post=pop2, target="exc")
        proj3 = Projection(pre=pop1, post=pop2, target="exc")

        proj1.connect_one_to_one(weights=0.1)
        proj2.connect_all_to_all(weights=0.1)
        proj3.connect_fixed_number_pre(3, weights=0.1)

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.test_proj1 = cls.test_net.get(proj1)
        cls.test_proj2 = cls.test_net.get(proj2)
        cls.test_proj3 = cls.test_net.get(proj3)
예제 #5
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="tau = 10", equations="r += 1/tau * t")

        neuron2 = Neuron(parameters="tau = 10: population",
                         equations="r += 1/tau * t: init = 1.0")

        Oja = Synapse(parameters="""
                tau = 5000.0 : postsynaptic
                alpha = 8.0
            """,
                      equations="""
                tau * dw/dt = pre.r * post.r - alpha * post.r^2 * w
            """)

        pop1 = Population(5, neuron)
        pop2 = Population(8, neuron2)

        proj = Projection(pre=pop1, post=pop2, target="exc", synapse=Oja)

        proj.connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pop1, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_proj = self.test_net.get(proj)
예제 #6
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        # neuron defintions common used for test cases
        local_eq = Neuron(
            equations="""
                noise = Uniform(0,1)
            	    r = t
            """
        )

        global_eq = Neuron(
            equations="""
                noise = Uniform(0,1) : population
                glob_r = t : population
                r = t
            """
        )

        mixed_eq = Neuron(
            parameters="glob_par = 1.0: population",
            equations="""
                r = t + glob_par
            """
        )

        bound_eq = Neuron(
            parameters="""
                min_r=1.0: population
                max_r=3.0: population
            """,
            equations="""
                r = t : min=min_r, max=max_r
            """
        )

        tc_loc_up_pop = Population(3, local_eq)
        tc_glob_up_pop = Population(3, global_eq)
        tc_mixed_up_pop = Population(3, mixed_eq)
        tc_bound_up_pop = Population(3, bound_eq)

        m = Monitor(tc_bound_up_pop, 'r')

        cls.test_net = Network()
        cls.test_net.add([tc_loc_up_pop, tc_glob_up_pop,
                          tc_mixed_up_pop, tc_bound_up_pop, m])
        cls.test_net.compile(silent=True)

        cls.net_loc_pop = cls.test_net.get(tc_loc_up_pop)
        cls.net_glob_pop = cls.test_net.get(tc_glob_up_pop)
        cls.net_mix_pop = cls.test_net.get(tc_mixed_up_pop)
        cls.net_bound_pop = cls.test_net.get(tc_bound_up_pop)
        cls.net_m = cls.test_net.get(m)
예제 #7
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """)

        cov = Synapse(parameters="""
                tau = 5000.0
            """,
                      equations="""
                tau * dw/dt = (pre.r - mean(pre.r) ) * (post.r - mean(post.r) )
            """)

        pre = Population(6, neuron)
        post = Population(1, neuron)
        proj = Projection(pre, post, "exc",
                          synapse=cov).connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pre, post, proj])

        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(post)
예제 #8
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        simple_neuron = Neuron(
            parameters="r=1.0"
        ) 

        eq_set = Synapse(
            equations="""
                glob_var = 0.1 : projection
                semi_glob_var = 0.2 : postsynaptic
                w = t + glob_var + semi_glob_var
            """
        )

        pop0 = Population(3, simple_neuron)
        pop1 = Population(1, simple_neuron)

        proj = Projection(pop0, pop1, "exc", eq_set)
        proj.connect_all_to_all(weights=0.0)

        cls.test_net = Network()
        cls.test_net.add([pop0, pop1, proj])
        cls.test_net.compile(silent=True)
예제 #9
0
    def setUpClass(cls):
        """
        Compile the network for this test.

        The input_neuron will generate a sequence of values:

            r_t = [-1, 0, 2, 5, 9, 14, 20, ...]

        one time as global (glob_r) and one time as local variable (r).
        """
        input_neuron = Neuron(equations="""
                glob_r = glob_r + t : init = -1, population
                r = r + t : init = -1
            """)

        neuron2 = Neuron(equations="""
                r = sum(ff)
            """)

        synapse_glob = Synapse(psp="pre.glob_r * w")

        pop1 = Population((3), input_neuron)
        pop2 = Population((3), neuron2)

        # A projection with uniform delay
        proj = Projection(pre=pop1, post=pop2, target="ff")
        proj.connect_one_to_one(weights=1.0, delays=10.0)

        # A projection with uniform delay
        proj2 = Projection(pre=pop1,
                           post=pop2,
                           target="ff_glob",
                           synapse=synapse_glob)
        proj2.connect_one_to_one(weights=1.0, delays=10.0)

        # Build up network
        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj, proj2])
        cls.test_net.compile(silent=True)

        # Store references for easier usage in test cases
        cls.net_proj = cls.test_net.get(proj)
        cls.net_proj2 = cls.test_net.get(proj2)
        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
예제 #10
0
    def setUpClass(self):
        """
        Compile the network for this test. Adapted the example
        from documentation.
        """

        SimpleSpike = Neuron(equations="mp=g_exc", spike="mp >= 1.0", reset="")

        inp = Population(1, neuron=Neuron(equations="r=sin(t)"))
        out = Population(1, neuron=SimpleSpike)
        m = Monitor(out, "mp")

        proj = CurrentInjection(inp, out, 'exc')
        proj.connect_current()

        self.test_net = Network()
        self.test_net.add([inp, out, proj, m])
        self.test_net.compile(silent=True)

        self.output = self.test_net.get(out)
        self.m = self.test_net.get(m)
예제 #11
0
    def setUpClass(cls):
        """
        Build up the network
        """
        simple_emit = Neuron(spike="t==1", )
        simple_recv = Neuron(equations="""
                g_exc1 = 0
                g_exc2 = 0
                g_exc3 = 0
            """,
                             spike="g_exc1>30")

        # simple in/out populations
        in_pop = Population(5, neuron=simple_emit)
        out_pop = Population(2, neuron=simple_recv)

        # create the projections for the test cases (TC)
        # TC: no delay
        proj = Projection(pre=in_pop, post=out_pop, target="exc1")
        proj.connect_all_to_all(weights=1.0, storage_format="csr")
        # TC: uniform delay
        proj_u = Projection(pre=in_pop, post=out_pop, target="exc2")
        proj_u.connect_all_to_all(weights=1.0,
                                  delays=2.0,
                                  storage_format="csr")
        # TC: non-uniform delay
        proj_nu = Projection(pre=in_pop, post=out_pop, target="exc3")
        proj_nu.connect_all_to_all(weights=1.0, delays=Uniform(2, 10))

        # Monitor to record the currents
        m = Monitor(out_pop, ["g_exc1", "g_exc2", "g_exc3"])

        # build network and store required object
        # instances
        net = Network()
        net.add([in_pop, out_pop, proj, proj_u, proj_nu, m])
        cls.test_net = net
        cls.test_net.compile(silent=True)
        cls.test_g_exc_m = net.get(m)
        cls.test_proj = net.get(proj_nu)
예제 #12
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="r=0.0")

        out1 = Neuron(equations="""
                r =  sum(one2one)
            """)

        out2 = Neuron(equations="""
                r =  sum(all2all) + sum(fnp)
            """)

        pop1 = Population((17, 17), neuron)
        pop2 = Population((17, 17), out1)
        pop3 = Population(4, out2)

        proj = Projection(pre=pop1, post=pop2, target="one2one")
        proj.connect_one_to_one(
            weights=0.0,
            force_multiple_weights=True)  # weights set in the test

        proj2 = Projection(pre=pop1, post=pop3, target="all2all")
        proj2.connect_all_to_all(weights=Uniform(0, 1))

        proj3 = Projection(pre=pop1, post=pop3, target="fnp")
        proj3.connect_fixed_number_pre(5, weights=Uniform(0, 1))

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, pop3, proj, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
        cls.net_pop3 = cls.test_net.get(pop3)
        cls.net_proj = cls.test_net.get(proj)
        cls.net_proj2 = cls.test_net.get(proj2)
        cls.net_proj3 = cls.test_net.get(proj3)
예제 #13
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """,
                        equations="""
                mean_r = mean(r)
                max_r = max(r)
                min_r = min(r)
                l1 = norm1(r)
                l2 = norm2(r)
            """)

        pop = Population(6, neuron)

        self.test_net = Network()
        self.test_net.add([pop])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)
예제 #14
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        BuiltinFuncs = Neuron(parameters="""
                base = 2.0
            """,
                              equations="""
                r = modulo(t,3)
                pr = power(base,3)
                clip_below = clip(-2, -1, 1)
                clip_within = clip(0, -1, 1)
                clip_above = clip(2, -1, 1)
            """)

        pop1 = Population(1, BuiltinFuncs)
        mon = Monitor(pop1,
                      ['r', 'pr', 'clip_below', 'clip_within', 'clip_above'])

        self.test_net = Network()
        self.test_net.add([pop1, mon])
        self.test_net.compile(silent=True)

        self.test_mon = self.test_net.get(mon)
예제 #15
0
J = 15.0 * np.sqrt(D)
tau = 10.0
tau_syn = 0.1
v_th = 100.0
T = 50.0
dt = 0.001
setup(method='explicit', dt=dt)

# theta neuron definition
Theta = Neuron(parameters=f"""
        tau = {tau}  : population
        eta = {eta}
        J = {J} : population
        tau_s = {tau_syn} : population
    """,
               equations="""
        v_old = v_new
        tau * dv/dt = 1.0 - cos(v_old) + (1.0 + cos(v_old))*(eta + J*g_syn*tau) : init=6.2832, min=0.0
        tau_s * dg_syn/dt = g_exc*tau_s - g_syn
        v_tmp = v/(2*pi) : int
        v_new = (v/(2*pi) - v_tmp)*2*pi
    """,
               spike="(v_new > pi)*(v_old < pi)")

# population setup
pop1 = Population(N, neuron=Theta, name="ThetaPop1")
pop1.eta = eta + D * np.tan(
    (np.pi / 2) * (2 * np.arange(1, N + 1) - N - 1) / (N + 1))

# projection setup
proj = Projection(pre=pop1, post=pop1, target='exc', name='fb')
proj.connect_all_to_all(100.0 / N, allow_self_connections=False)
예제 #16
0
np.random.seed()
setup(num_threads=params['num_threads'])
#Model parameters
baseline_dopa = Constant('baseline_dopa', params['baseline_dopa'])
reversal = Constant('reversal', changed['reversal_SNr'])

#####################################################
##########  Neuron models   #########################
#####################################################
LinearNeuron = Neuron(
    parameters="""
        tau = 10.0 : population
        phi = 0.0 : population
        B = 0.0
    """,
    equations="""
        tau * dmp/dt = -mp + sum(exc) - sum(inh) + B + phi * Uniform(-1.0,1.0)
        r = pos(mp)
    """,
    name="Linear Neuron",
    description=
    "Regular rate-coded neuron with excitatory and inhibitory inputs plus baseline and noise."
)

DopamineNeuron = Neuron(
    parameters="""
        tau = 10.0 : population
        alpha = 0 : population
        B = 0.0
    """,
    equations="""
        aux = if (sum(exc)>0): pos(1.0-B-sum(inh)) else: -10 * sum(inh)
# -*- coding: utf-8 -*-

from ANNarchy import Neuron

# The following devices current (rate) injection to spiking population
# follow the examples on Hybrid networks: https://annarchy.readthedocs.io/en/latest/manual/Hybrid.html

CurrentInjector = Neuron(equations="""
    r = amplitude
""",
                         parameters="""
    amplitude = 0.0
""")

DCCurrentInjector = CurrentInjector

ACCurrentInjector = Neuron(equations="""
    r = amplitude * sin(omega*t + phase) + offset
""",
                           parameters="""
    omega = 0.0
    amplitude = 1.0
    phase = 0.0
    offset = 0.0
""")
예제 #18
0
from ANNarchy import Neuron, Population, Projection, setup, Synapse, Uniform, Constant
from ANNarchy.extensions.convolution import Pooling, Convolution
from parameters import params
from functions import rangeX, Gaussian2D, positive
from Connections import con_scale
from changed_val import changed

setup(num_threads=params['num_threads'])

minVis = Constant('minVis', 0)

##########################################
##########  NEURON DEFINITION   ##########
##########################################
## Input Neuron: Has to be set to value. Does not change over time.
Inp_Neuron = Neuron(parameters="r = 0.0")

## Basic Auxillary Neuron is transmitting an unmodified input
Aux_Neuron = Neuron(equations="""r = sum(exc)""")

## Neuron of V1 population: Applies the power rule to the given baseline input
# See Eq 4.29
V1_Neuron = Neuron(parameters="""
        pV1C = 'pV1C' : population
	tau_up = 1.0 : population
	tau_down = 20.0 : population
        baseline = 0.0
        noise = 'noise_V1' : population
        frequency = 1/15. : population
    """,
                   equations="""
Izhikevich_Hamker = Neuron(parameters="""
    a = 0.02
    b = 0.2
    c = -72.0
    d = 6.0
    n0 = 140.
    n1 = 5.0
    n2 = 0.04
    I = 0.0
    tau_refrac = 10.0
    tau_ampa = 10.0
    tau_gaba = 10.0
    E_ampa = 0.0
    E_gaba = -90.0
    tau_syn = 1.0
    C = 1.0
    v_th = 30.0
""",
                           equations="""
    I_syn_ex = - g_ampa*(v-E_ampa)
    I_syn_in = - g_gaba*(v-E_gaba)
    I_syn = I_syn_ex + I_syn_in - g_base*v
    dg_base/dt = -g_base/tau_syn : init = 0
    dg_ampa/dt = -g_ampa/tau_ampa : init = 0
    dg_gaba/dt = -g_gaba/tau_gaba : init = 0
    dv/dt = n2*v*v+n1*v+n0 - u/C  + I + I_syn : init = -72.0
    du/dt = a*(b*(v)-u) : init = -14.4
""",
                           spike="""
    v>=v_th
""",
                           reset="""
    v = c
    u = u+d
""",
                           refractory="""tau_refrac""")
from ANNarchy import Neuron, Population, Projection, setup, Synapse, Uniform, Constant
from ANNarchy.extensions.convolution import Pooling, Convolution
from parameters import params
from functions import rangeX, Gaussian2D, positive
from Connections import con_scale
from changed_val import changed
setup(num_threads=params['num_threads'])

minVis = Constant('minVis', 0)

##########################################
##########  NEURON DEFINITION   ##########
##########################################
## Input Neuron: Has to be set to value. Does not change over time.
FEFFix_Neuron = Neuron(parameters="r = 0.0",
                       name="FEFfix Neuron",
                       description="Input neuron, rate has to be specified.")

## Basic Auxillary Neuron is transmitting an unmodified input
Aux_Neuron = Neuron(
    equations="""r = sum(exc)""",
    name="Auxillary Neuron",
    description="Basic auxillary neuron is transmitting an unmodified input.")

## Neuron of V1 population: Applies the power rule to the given baseline input
# See Eq 4.29
V1_Neuron = Neuron(
    parameters="""
        pV1C = 'pV1C' : population
        tau_up = 1.0 : population
        tau_down = 20.0 : population
예제 #21
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        def my_diagonal(pre, post, weight):
            synapses = CSR()
            for post_rk in post.ranks:
                pre_ranks = []
                delays = []
                if post_rk - 1 in pre.ranks:
                    pre_ranks.append(post_rk - 1)
                if post_rk in pre.ranks:
                    pre_ranks.append(post_rk)
                if post_rk + 1 in pre.ranks:
                    pre_ranks.append(post_rk + 1)

                synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks),
                             [0] * len(pre_ranks))

            return synapses

        def my_diagonal_with_uniform_delay(pre, post, weight, delay):
            synapses = CSR()
            for post_rk in post.ranks:
                pre_ranks = []
                delays = []
                if post_rk - 1 in pre.ranks:
                    pre_ranks.append(post_rk - 1)
                if post_rk in pre.ranks:
                    pre_ranks.append(post_rk)
                if post_rk + 1 in pre.ranks:
                    pre_ranks.append(post_rk + 1)

                synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks),
                             [delay] * len(pre_ranks))

            return synapses

        def my_diagonal_with_non_uniform_delay(pre, post, weight, delay):
            synapses = CSR()
            for post_rk in post.ranks:
                pre_ranks = []
                delays = []
                if post_rk - 1 in pre.ranks:
                    pre_ranks.append(post_rk - 1)
                if post_rk in pre.ranks:
                    pre_ranks.append(post_rk)
                if post_rk + 1 in pre.ranks:
                    pre_ranks.append(post_rk + 1)

                synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks),
                             delay.get_values(len(pre_ranks)))

            return synapses

        neuron = Neuron(equations="r = 1")

        neuron2 = Neuron(equations="r = sum(exc)")

        pop1 = Population(5, neuron)
        pop2 = Population(5, neuron2)

        proj1 = Projection(pre=pop1, post=pop2, target="exc")
        proj1.connect_with_func(method=my_diagonal, weight=0.1)

        proj2 = Projection(pre=pop1, post=pop2, target="exc2")
        proj2.connect_with_func(method=my_diagonal_with_uniform_delay,
                                weight=0.1,
                                delay=2)

        proj3 = Projection(pre=pop1, post=pop2, target="exc3")
        proj3.connect_with_func(method=my_diagonal_with_non_uniform_delay,
                                weight=0.1,
                                delay=DiscreteUniform(1, 5))

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.test_proj1 = cls.test_net.get(proj1)
        cls.test_proj2 = cls.test_net.get(proj2)
        cls.test_proj3 = cls.test_net.get(proj3)
예제 #22
0
from changed_val import changed
import numpy as np
np.random.seed()
setup(num_threads=params['num_threads'])
#Model parameters
baseline_dopa = Constant('baseline_dopa', params['baseline_dopa'])
reversal = Constant('reversal', changed['reversal_SNr'])

#####################################################
##########  Neuron models   #########################
#####################################################
LinearNeuron = Neuron(parameters="""
        tau = 10.0 : population
        noise = 0.0 : population
        baseline = 0.0
    """,
                      equations="""
        tau * dmp/dt + mp = sum(exc) - sum(inh) + baseline + noise * Uniform(-1.0,1.0)
        r = pos(mp) : max=0.15
    """)

DopamineNeuron = Neuron(parameters="""
        tau = 10.0 : population
        firing = 0 : population
        baseline = 0.0
    """,
                        equations="""
	test=sum(inh)
        aux = if (sum(exc)>0): pos(1.0-baseline-sum(inh)) else: -10 * sum(inh)
        tau * dmp/dt + mp =  firing * aux + baseline
        r = pos(mp)