Example #1
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(
            equations = "r = transfer_function(sum(exc), 0.0)",
            functions = "transfer_function(x, t) = if x > t: if x > 2*t : (x - 2*t)^2 else: x - t else: 0."
        )

        neuron2 = Neuron(
            equations = "r = glob_pos(sum(exc))"
        )

        synapse = Synapse(
            equations="w += hebb(pre.r, post.r)",
            functions="hebb(x, y) = x * y"
        )

        pop = Population(10, neuron)
        pop2 = Population(10, neuron2)
        proj = Projection(pop, pop, 'exc', synapse).connect_all_to_all(1.0)

        self.test_net = Network()
        self.test_net.add([pop, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)
        self.net_proj = self.test_net.get(proj)
Example #2
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """)

        cov = Synapse(parameters="""
                tau = 5000.0
            """,
                      equations="""
                tau * dw/dt = (pre.r - mean(pre.r) ) * (post.r - mean(post.r) )
            """)

        pre = Population(6, neuron)
        post = Population(1, neuron)
        proj = Projection(pre, post, "exc",
                          synapse=cov).connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pre, post, proj])

        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(post)
Example #3
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="tau = 10", equations="r += 1/tau * t")

        neuron2 = Neuron(parameters="tau = 10: population",
                         equations="r += 1/tau * t: init = 1.0")

        Oja = Synapse(parameters="""
                tau = 5000.0 : postsynaptic
                alpha = 8.0
            """,
                      equations="""
                tau * dw/dt = pre.r * post.r - alpha * post.r^2 * w
            """)

        pop1 = Population(5, neuron)
        pop2 = Population(8, neuron2)

        proj = Projection(pre=pop1, post=pop2, target="exc", synapse=Oja)

        proj.connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pop1, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_proj = self.test_net.get(proj)
 def setUpClass(self):
     """
     Compile the network for this test
     """
     self.test_net = Network()
     self.test_net.add([pop1, pop2, proj])
     self.test_net.compile(silent=True, debug_build=True, clean=True)
Example #5
0
class test_CurrentInjection(unittest.TestCase):
    """
    Test the implementation of the specialized projection
    'CurrentInjection'. Based on the example in the documentation.
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test. Adapted the example
        from documentation.
        """

        SimpleSpike = Neuron(equations="mp=g_exc", spike="mp >= 1.0", reset="")

        inp = Population(1, neuron=Neuron(equations="r=sin(t)"))
        out = Population(1, neuron=SimpleSpike)
        m = Monitor(out, "mp")

        proj = CurrentInjection(inp, out, 'exc')
        proj.connect_current()

        self.test_net = Network()
        self.test_net.add([inp, out, proj, m])
        self.test_net.compile(silent=True)

        self.output = self.test_net.get(out)
        self.m = self.test_net.get(m)

    def setUp(self):
        """
        Automatically called before each test method, basically to reset the network after every test.
        """
        self.test_net.reset()

    def test_compile(self):
        """
        Enforce compilation of the network.
        """
        pass

    def test_run_one_loop(self):
        self.test_net.simulate(11)

        rec_data = self.m.get("mp")[:, 0]
        # there is 1 dt delay between the input and output
        target = [0] + [sin(x) for x in range(10)]

        self.assertTrue(np.allclose(rec_data, target))
Example #6
0
    def setUpClass(cls):
        """
        Define and compile the network for this test.
        """
        input_neuron = Neuron(parameters="r=0.0")

        output_neuron = Neuron(equations="""
                r = sum(p1) + sum(p2)
            """)

        syn_max = Synapse(psp="pre.r * w", operation="max")

        syn_min = Synapse(psp="pre.r * w", operation="min")

        syn_mean = Synapse(psp="pre.r * w", operation="mean")

        pop1 = Population((3, 3), neuron=input_neuron)
        pop2 = Population(4, neuron=output_neuron)

        proj1 = Projection(pop1, pop2, target="p1", synapse=syn_max)
        proj1.connect_all_to_all(weights=1.0)
        proj2 = Projection(pop1, pop2, target="p2", synapse=syn_min)
        proj2.connect_all_to_all(weights=1.0)
        proj3 = Projection(pop1, pop2, target="p3", synapse=syn_mean)
        proj3.connect_all_to_all(weights=1.0)

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
Example #7
0
    def setUpClass(cls):
        """
        Compile the network for this test.

        The input_neuron will generate a sequence of values:

            r_t = [-1, 0, 2, 5, 9, 14, 20, ...]
        """
        input_neuron = Neuron(equations="""
                r = r + t : init = -1
            """)

        neuron2 = Neuron(equations="""
                r = sum(ff)
            """)

        pop1 = Population((3), input_neuron)
        pop2 = Population((3), neuron2)

        # A projection with non-uniform delay
        proj = Projection(pop1, pop2, target="ff")
        proj.connect_one_to_one(weights=1.0, delays=Uniform(1, 5))

        # Build up network
        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj])
        cls.test_net.compile(silent=True)

        # Store references for easier usage in test cases
        cls.net_proj = cls.test_net.get(proj)
        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
Example #8
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        simple_neuron = Neuron(
            parameters="r=1.0"
        ) 

        eq_set = Synapse(
            equations="""
                glob_var = 0.1 : projection
                semi_glob_var = 0.2 : postsynaptic
                w = t + glob_var + semi_glob_var
            """
        )

        pop0 = Population(3, simple_neuron)
        pop1 = Population(1, simple_neuron)

        proj = Projection(pop0, pop1, "exc", eq_set)
        proj.connect_all_to_all(weights=0.0)

        cls.test_net = Network()
        cls.test_net.add([pop0, pop1, proj])
        cls.test_net.compile(silent=True)
Example #9
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        neuron = Neuron(equations="r = 1")

        neuron2 = Neuron(equations="r = sum(exc)")

        pop1 = Population((3, 3), neuron)
        pop2 = Population((3, 3), neuron2)

        proj1 = Projection(pre=pop1, post=pop2, target="exc")
        proj2 = Projection(pre=pop1, post=pop2, target="exc")
        proj3 = Projection(pre=pop1, post=pop2, target="exc")

        proj1.connect_one_to_one(weights=0.1)
        proj2.connect_all_to_all(weights=0.1)
        proj3.connect_fixed_number_pre(3, weights=0.1)

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.test_proj1 = cls.test_net.get(proj1)
        cls.test_proj2 = cls.test_net.get(proj2)
        cls.test_proj3 = cls.test_net.get(proj3)
Example #10
0
 def setUpClass(self):
     """
     Compile the network for this test
     """
     self.test_net = Network()
     self.test_net.add([pop1, pop2, proj])
     self.test_net.compile(silent=True, debug_build=True, clean=True)
Example #11
0
class test_CSRConnectivity(unittest.TestCase):
    """
    This class tests the functionality of the connectivity patterns within *Projections*.
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        self.test_net = Network()
        self.test_net.add([pop1, pop2, proj])
        self.test_net.compile(silent=True, debug_build=True, clean=True)

    def setUp(self):
        """
        In our *setUp()* function we call *reset()* to reset the network before every test.
        """
        self.test_net.reset()

    def test_all_to_all(self):
        """
        Tests the *all_to_all* connectivity pattern, in which every pre-synaptic neuron
        is connected to every post-synaptic neuron.

        We test correctness of ranks and weight values.
        """
        tmp = self.test_net.get(proj)

        #self.assertEqual(tmp.dendrite(3).rank, [0, 1, 2, 3, 4, 5, 6, 7, 8])
        self.assertTrue(numpy.allclose(tmp.dendrite(3).w, numpy.ones((8, 1)) * 0.1))
Example #12
0
class test_SynapticAccess(unittest.TestCase):
    """
    ANNarchy support several global operations, there are always applied on
    variables of *Population* objects.

    This particular test focuses on the usage of them in synaptic learning rules
    (for instance covariance).
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """)

        cov = Synapse(parameters="""
                tau = 5000.0
            """,
                      equations="""
                tau * dw/dt = (pre.r - mean(pre.r) ) * (post.r - mean(post.r) )
            """)

        pre = Population(6, neuron)
        post = Population(1, neuron)
        proj = Projection(pre, post, "exc",
                          synapse=cov).connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pre, post, proj])

        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(post)

    @classmethod
    def tearDownClass(cls):
        del cls.test_net

    def test_compile(self):
        """
        Tests the result of *norm1(r)* for *pop*.
        """
        pass
class test_CSRConnectivity(unittest.TestCase):
    """
    This class tests the functionality of the connectivity patterns within *Projections*.
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        self.test_net = Network()
        self.test_net.add([pop1, pop2, proj])
        self.test_net.compile(silent=True, debug_build=True, clean=True)

    def setUp(self):
        """
        In our *setUp()* function we call *reset()* to reset the network before every test.
        """
        self.test_net.reset()

    def test_all_to_all(self):
        """
        Tests the *all_to_all* connectivity pattern, in which every pre-synaptic neuron
        is connected to every post-synaptic neuron.

        We test correctness of ranks and weight values.
        """
        tmp = self.test_net.get(proj)

        #self.assertEqual(tmp.dendrite(3).rank, [0, 1, 2, 3, 4, 5, 6, 7, 8])
        self.assertTrue(
            numpy.allclose(tmp.dendrite(3).w,
                           numpy.ones((8, 1)) * 0.1))
Example #14
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        # neuron defintions common used for test cases
        local_eq = Neuron(
            equations="""
                noise = Uniform(0,1)
            	    r = t
            """
        )

        global_eq = Neuron(
            equations="""
                noise = Uniform(0,1) : population
                glob_r = t : population
                r = t
            """
        )

        mixed_eq = Neuron(
            parameters="glob_par = 1.0: population",
            equations="""
                r = t + glob_par
            """
        )

        bound_eq = Neuron(
            parameters="""
                min_r=1.0: population
                max_r=3.0: population
            """,
            equations="""
                r = t : min=min_r, max=max_r
            """
        )

        tc_loc_up_pop = Population(3, local_eq)
        tc_glob_up_pop = Population(3, global_eq)
        tc_mixed_up_pop = Population(3, mixed_eq)
        tc_bound_up_pop = Population(3, bound_eq)

        m = Monitor(tc_bound_up_pop, 'r')

        cls.test_net = Network()
        cls.test_net.add([tc_loc_up_pop, tc_glob_up_pop,
                          tc_mixed_up_pop, tc_bound_up_pop, m])
        cls.test_net.compile(silent=True)

        cls.net_loc_pop = cls.test_net.get(tc_loc_up_pop)
        cls.net_glob_pop = cls.test_net.get(tc_glob_up_pop)
        cls.net_mix_pop = cls.test_net.get(tc_mixed_up_pop)
        cls.net_bound_pop = cls.test_net.get(tc_bound_up_pop)
        cls.net_m = cls.test_net.get(m)
Example #15
0
    def setUpClass(self):
        """
        Compile the network for this test. Adapted the example
        from documentation.
        """

        SimpleSpike = Neuron(equations="mp=g_exc", spike="mp >= 1.0", reset="")

        inp = Population(1, neuron=Neuron(equations="r=sin(t)"))
        out = Population(1, neuron=SimpleSpike)
        m = Monitor(out, "mp")

        proj = CurrentInjection(inp, out, 'exc')
        proj.connect_current()

        self.test_net = Network()
        self.test_net.add([inp, out, proj, m])
        self.test_net.compile(silent=True)

        self.output = self.test_net.get(out)
        self.m = self.test_net.get(m)
Example #16
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """,
                        equations="""
                mean_r = mean(r)
                max_r = max(r)
                min_r = min(r)
                l1 = norm1(r)
                l2 = norm2(r)
            """)

        pop = Population(6, neuron)

        self.test_net = Network()
        self.test_net.add([pop])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)
Example #17
0
    def setUpClass(self):
        """
        Compile the network for this test
        """
        BuiltinFuncs = Neuron(parameters="""
                base = 2.0
            """,
                              equations="""
                r = modulo(t,3)
                pr = power(base,3)
                clip_below = clip(-2, -1, 1)
                clip_within = clip(0, -1, 1)
                clip_above = clip(2, -1, 1)
            """)

        pop1 = Population(1, BuiltinFuncs)
        mon = Monitor(pop1,
                      ['r', 'pr', 'clip_below', 'clip_within', 'clip_above'])

        self.test_net = Network()
        self.test_net.add([pop1, mon])
        self.test_net.compile(silent=True)

        self.test_mon = self.test_net.get(mon)
Example #18
0
    def setUpClass(cls):
        """
        Compile the network for this test.

        The input_neuron will generate a sequence of values:

            r_t = [-1, 0, 2, 5, 9, 14, 20, ...]

        one time as global (glob_r) and one time as local variable (r).
        """
        input_neuron = Neuron(equations="""
                glob_r = glob_r + t : init = -1, population
                r = r + t : init = -1
            """)

        neuron2 = Neuron(equations="""
                r = sum(ff)
            """)

        synapse_glob = Synapse(psp="pre.glob_r * w")

        pop1 = Population((3), input_neuron)
        pop2 = Population((3), neuron2)

        # A projection with uniform delay
        proj = Projection(pre=pop1, post=pop2, target="ff")
        proj.connect_one_to_one(weights=1.0, delays=10.0)

        # A projection with uniform delay
        proj2 = Projection(pre=pop1,
                           post=pop2,
                           target="ff_glob",
                           synapse=synapse_glob)
        proj2.connect_one_to_one(weights=1.0, delays=10.0)

        # Build up network
        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj, proj2])
        cls.test_net.compile(silent=True)

        # Store references for easier usage in test cases
        cls.net_proj = cls.test_net.get(proj)
        cls.net_proj2 = cls.test_net.get(proj2)
        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
Example #19
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="r=0.0")

        out1 = Neuron(equations="""
                r =  sum(one2one)
            """)

        out2 = Neuron(equations="""
                r =  sum(all2all) + sum(fnp)
            """)

        pop1 = Population((17, 17), neuron)
        pop2 = Population((17, 17), out1)
        pop3 = Population(4, out2)

        proj = Projection(pre=pop1, post=pop2, target="one2one")
        proj.connect_one_to_one(
            weights=0.0,
            force_multiple_weights=True)  # weights set in the test

        proj2 = Projection(pre=pop1, post=pop3, target="all2all")
        proj2.connect_all_to_all(weights=Uniform(0, 1))

        proj3 = Projection(pre=pop1, post=pop3, target="fnp")
        proj3.connect_fixed_number_pre(5, weights=Uniform(0, 1))

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, pop3, proj, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.net_pop1 = cls.test_net.get(pop1)
        cls.net_pop2 = cls.test_net.get(pop2)
        cls.net_pop3 = cls.test_net.get(pop3)
        cls.net_proj = cls.test_net.get(proj)
        cls.net_proj2 = cls.test_net.get(proj2)
        cls.net_proj3 = cls.test_net.get(proj3)
    def setUpClass(cls):
        """
        Build up the network
        """
        simple_emit = Neuron(spike="t==1", )
        simple_recv = Neuron(equations="""
                g_exc1 = 0
                g_exc2 = 0
                g_exc3 = 0
            """,
                             spike="g_exc1>30")

        # simple in/out populations
        in_pop = Population(5, neuron=simple_emit)
        out_pop = Population(2, neuron=simple_recv)

        # create the projections for the test cases (TC)
        # TC: no delay
        proj = Projection(pre=in_pop, post=out_pop, target="exc1")
        proj.connect_all_to_all(weights=1.0, storage_format="csr")
        # TC: uniform delay
        proj_u = Projection(pre=in_pop, post=out_pop, target="exc2")
        proj_u.connect_all_to_all(weights=1.0,
                                  delays=2.0,
                                  storage_format="csr")
        # TC: non-uniform delay
        proj_nu = Projection(pre=in_pop, post=out_pop, target="exc3")
        proj_nu.connect_all_to_all(weights=1.0, delays=Uniform(2, 10))

        # Monitor to record the currents
        m = Monitor(out_pop, ["g_exc1", "g_exc2", "g_exc3"])

        # build network and store required object
        # instances
        net = Network()
        net.add([in_pop, out_pop, proj, proj_u, proj_nu, m])
        cls.test_net = net
        cls.test_net.compile(silent=True)
        cls.test_g_exc_m = net.get(m)
        cls.test_proj = net.get(proj_nu)
Example #21
0
def grid_search_annarchy(param_grid: dict, param_map: dict, dt: float, simulation_time: float,
                         inputs: dict, outputs: dict, sampling_step_size: Optional[float] = None,
                         permute_grid: bool = False, circuit=None, **kwargs) -> DataFrame:
    """Function that runs multiple parametrizations of the same circuit in parallel and returns a combined output.

    Parameters
    ----------
    param_grid
        Key-value pairs for each circuit parameter that should be altered over different circuit parametrizations.
    param_map
        Key-value pairs that map the keys of param_grid to concrete circuit variables.
    dt
        Simulation step-size in s.
    simulation_time
        Simulation time in s.
    inputs
        Inputs as provided to the `run` method of `:class:ComputeGraph`.
    outputs
        Outputs as provided to the `run` method of `:class:ComputeGraph`.
    sampling_step_size
        Sampling step-size as provided to the `run` method of `:class:ComputeGraph`.
    permute_grid
        If true, all combinations of the provided param_grid values will be realized. If false, the param_grid values
        will be traversed pairwise.
    circuit
        Instance of ANNarchy network.
    kwargs
        Additional keyword arguments passed to the `:class:ComputeGraph` initialization.



    Returns
    -------
    DataFrame
        Simulation results stored in a multi-index data frame where each index lvl refers to one of the parameters of
        param_grid.

    """

    from ANNarchy import Population, Projection, Network, TimedArray, Monitor, ANNarchyException

    # linearize parameter grid if necessary
    if type(param_grid) is dict:
        param_grid = linearize_grid(param_grid, permute_grid)

    # create annarchy net if necessary
    if circuit is None:
        circuit = Network(everything=True)

    # assign parameter updates to each circuit and combine them to unconnected network
    circuit_names = []
    param_info = []
    param_split = "__"
    val_split = "--"
    comb = "_"
    populations, projections = {}, {}
    for n in range(param_grid.shape[0]):

        # copy and re-parametrize populations
        try:
            for p in circuit.get_populations():
                name = f'net{n}/{p.name}'
                p_new = Population(geometry=p.geometry, neuron=p.neuron_type, name=name,
                                   stop_condition=p.stop_condition, storage_order=p._storage_order,
                                   copied=False)
                p_new = adapt_pop(p_new, param_grid.iloc[n, :], param_map)
                populations[name] = p_new

                # add input to population
                for node, inp in inputs.items():
                    if node in name:
                        inp_name = f'{name}_inp'
                        inp = TimedArray(rates=inp, name=inp_name)
                        proj = Projection(pre=inp, post=p_new, target='exc')
                        proj.connect_one_to_one(1.0)
                        populations[inp_name] = inp
                        projections[inp_name] = proj
        except ANNarchyException:
            pass

        # copy and re-parametrize projections
        try:
            for c in circuit.get_projections():
                source = c.pre if type(c.pre) is str else c.pre.name
                target = c.post if type(c.post) is str else c.post.name
                source = f'net{n}/{source}'
                target = f'net{n}/{target}'
                name = f'{source}/{target}/{c.name}'
                c_new = Projection(pre=source, post=target, target=c.target, synapse=c.synapse_type, name=name,
                                   copied=False)
                c_new._store_connectivity(c._connection_method, c._connection_args, c._connection_delay, c._storage_format)
                c_new = adapt_proj(c_new, param_grid.iloc[n, :], param_map)
                projections[name] = c_new
        except ANNarchyException:
            pass

        # collect parameter and circuit name infos
        circuit_names.append(f'net{n}')
        param_names = list(param_grid.columns.values)
        param_info_tmp = [f"{param_names[i]}{val_split}{val}" for i, val in enumerate(param_grid.iloc[n, :])]
        param_info.append(param_split.join(param_info_tmp))

    net = Network()
    for p in populations.values():
        net.add(p)
    for c in projections.values():
        net.add(c)

    # adjust output of simulation to combined network
    nodes = [p.name for p in circuit.get_populations()]
    out_names, var_names, out_lens, monitors, monitor_names = [], [], [], [], []
    for out_key, out in outputs.copy().items():
        out_names_tmp, out_lens_tmp = [], []
        if out[0] in nodes:
            for i, name in enumerate(param_info):
                out_tmp = list(out)
                out_tmp[0] = f'{circuit_names[i]}/{out_tmp[0]}'
                p = net.get_population(out_tmp[0])
                monitors.append(Monitor(p, variables=out_tmp[-1], period=sampling_step_size, start=True,
                                        net_id=net.id))
                monitor_names.append(f'{name}{param_split}out_var{val_split}{out_key}{comb}{out[0]}')
                var_names.append(out_tmp[-1])
                out_names_tmp.append(f'{out_key}{comb}{out[0]}')
                out_lens_tmp.append(p.geometry[0])
        elif out[0] == 'all':
            for node in nodes:
                for i, name in enumerate(param_info):
                    out_tmp = list(out)
                    out_tmp[0] = f'{circuit_names[i]}/{node}'
                    p = net.get_population(out_tmp[0])
                    monitors.append(Monitor(p, variables=out_tmp[-1], period=sampling_step_size, start=True,
                                            net_id=net.id))
                    monitor_names.append(f'{name}{param_split}out_var{val_split}{out_key}{comb}{node}')
                    var_names.append(out_tmp[-1])
                    out_names_tmp.append(f'{out_key}{comb}{node}')
                    out_lens_tmp.append(p.geometry[0])
        else:
            node_found = False
            for node in nodes:
                if out[0] in node:
                    node_found = True
                    for i, name in enumerate(param_info):
                        out_tmp = list(out)
                        out_tmp[0] = f'{circuit_names[i]}/{node}'
                        p = net.get_population(out_tmp[0])
                        monitors.append(Monitor(p, variables=out_tmp[-1], period=sampling_step_size, start=True,
                                                net_id=net.id))
                        monitor_names.append(f'{name}{param_split}out_var{val_split}{out_key}{comb}{node}')
                        var_names.append(out_tmp[-1])
                        out_names_tmp.append(f'{out_key}{comb}{node}')
                        out_lens_tmp.append(p.geometry[0])
            if not node_found:
                raise ValueError(f'Invalid output identifier in output: {out_key}. '
                                 f'Node {out[0]} is not part of this network')
        out_names += list(set(out_names_tmp))
        out_lens += list(set(out_lens_tmp))
    #net.add(monitors)

    # simulate the circuits behavior
    net.compile()
    net.simulate(duration=simulation_time)

    # transform output into pyrates-compatible data format
    results = pyrates_from_annarchy(monitors, vars=list(set(var_names)),
                                    monitor_names=monitor_names, **kwargs)

    # transform results into long-form dataframe with changed parameters as columns
    multi_idx = [param_grid[key].values for key in param_grid.keys()]
    n_iters = len(multi_idx[0])
    outs = []
    for out_name, out_len in zip(out_names, out_lens):
        outs += [f'{out_name}_n{i}' for i in range(out_len)] * n_iters
    multi_idx_final = []
    for idx in multi_idx:
        for val in idx:
            for out_len in out_lens:
                multi_idx_final += [val]*len(out_names)*out_len
    index = MultiIndex.from_arrays([multi_idx_final, outs], names=list(param_grid.keys()) + ["out_var"])
    index = MultiIndex.from_tuples(list(set(index)), names=list(param_grid.keys()) + ["out_var"])
    results_final = DataFrame(columns=index, data=np.zeros_like(results.values), index=results.index)
    for col in results.keys():
        params = col.split(param_split)
        indices = [None] * len(results_final.columns.names)
        for param in params:
            var, val = param.split(val_split)[:2]
            idx = list(results_final.columns.names).index(var)
            try:
                indices[idx] = float(val)
            except ValueError:
                indices[idx] = val
        results_final.loc[:, tuple(indices)] = results[col].values

    return results_final
Example #22
0
class test_BuiltinFunctions(unittest.TestCase):
    """
    Test the correct evaluation of builtin functions
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        BuiltinFuncs = Neuron(parameters="""
                base = 2.0
            """,
                              equations="""
                r = modulo(t,3)
                pr = power(base,3)
                clip_below = clip(-2, -1, 1)
                clip_within = clip(0, -1, 1)
                clip_above = clip(2, -1, 1)
            """)

        pop1 = Population(1, BuiltinFuncs)
        mon = Monitor(pop1,
                      ['r', 'pr', 'clip_below', 'clip_within', 'clip_above'])

        self.test_net = Network()
        self.test_net.add([pop1, mon])
        self.test_net.compile(silent=True)

        self.test_mon = self.test_net.get(mon)

    @classmethod
    def tearDownClass(cls):
        """
        All tests of this class are done. We can destroy the network.
        """
        del cls.test_net

    def setUp(self):
        """
        Automatically called before each test method, basically to reset the network after every test.
        """
        self.test_net.reset()

    def tearDown(self):
        """
        Since all tests are independent, after every test we use the *get()* method for every monotor to clear all recordings.
        """
        self.test_mon.get()

    def test_modulo(self):
        """
        Test modulo function.
        """
        self.test_net.simulate(10)
        data_m = self.test_mon.get('r')
        self.assertTrue(
            np.allclose(data_m, [[0.0], [1.0], [2.0], [0.0], [1.0], [2.0],
                                 [0.0], [1.0], [2.0], [0.0]]))

    def test_integer_power(self):
        """
        Test integer power function.
        """
        self.test_net.simulate(1)
        data_m = self.test_mon.get('pr')
        self.assertTrue(np.allclose(data_m, [[8.0]]))

    def test_clip_below(self):
        """
        The clip(x, a, b) method ensures that x is within range [a,b]. This tests validates that x = -2 is clipped to -1
        """
        data_clip_below = self.test_mon.get('clip_below')
        self.assertTrue(np.allclose(data_clip_below, [[-1.0]]))

    def test_clip_within(self):
        """
        The clip(x, a, b) method ensures that x is within range [a,b]. This tests validates that x = 0 retains.
        """
        data_clip_within = self.test_mon.get('clip_within')
        self.assertTrue(np.allclose(data_clip_within, [[0.0]]))

    def test_clip_above(self):
        """
        The clip(x, a, b) method ensures that x is within range [a,b]. This tests validates that x = 2 is clipped to 1.
        """
        data_clip_above = self.test_mon.get('clip_above')
        self.assertTrue(np.allclose(data_clip_above, [[1.0]]))
Example #23
0
class test_CustomFunc(unittest.TestCase):
    """
    This class tests the definition of custom functions, they
    can defined on three levels:

        * globally
        * within neurons
        * within synapses 
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(
            equations = "r = transfer_function(sum(exc), 0.0)",
            functions = "transfer_function(x, t) = if x > t: if x > 2*t : (x - 2*t)^2 else: x - t else: 0."
        )

        neuron2 = Neuron(
            equations = "r = glob_pos(sum(exc))"
        )

        synapse = Synapse(
            equations="w += hebb(pre.r, post.r)",
            functions="hebb(x, y) = x * y"
        )

        pop = Population(10, neuron)
        pop2 = Population(10, neuron2)
        proj = Projection(pop, pop, 'exc', synapse).connect_all_to_all(1.0)

        self.test_net = Network()
        self.test_net.add([pop, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)
        self.net_proj = self.test_net.get(proj)

    @classmethod
    def tearDownClass(cls):
        del cls.test_net

    def setUp(self):
        """
        In our *setUp()* function we call *reset()* to reset the network.
        """
        self.test_net.reset()

    def test_neuron(self):
        """
        Custom func defined within a neuron object, providing numpy.array data.
        """
        self.assertTrue(numpy.allclose(self.net_pop.transfer_function(numpy.array([0., 1., 2., 3.]), numpy.array([2., 2., 2., 2.])), [0, 0, 0, 1]))
        
    def test_neuron2(self):
        """
        Custom func defined within a neuron object, providing simple lists.
        """        
        self.assertTrue(numpy.allclose(self.net_pop.transfer_function([0., 1., 2., 3.], [2., 2., 2., 2.]), [0, 0, 0, 1]))

    def test_synapse(self):
        """
        Custom func defined within a synapse object, providing simple lists.
        """
        self.assertTrue(numpy.allclose(self.net_proj.hebb(numpy.array([0., 1., 2., 3.]), numpy.array([0., 1., 2., 3.])), [0, 1, 4, 9]))

    def test_synapse2(self):
        """
        Custom func defined within a synapse object, providing simple lists.
        """
        self.assertTrue(numpy.allclose(self.net_proj.hebb([0., 1., 2., 3.], [0., 1., 2., 3.]), [0, 1, 4, 9]))
Example #24
0
class test_GlobalOps_1D(unittest.TestCase):
    """
    ANNarchy support several global operations, there are always applied on
    variables of *Population* objects. Currently the following methods
    are supported:

        * mean()
        * max()
        * min()
        * norm1()
        * norm2()

    They are used in the equations of our neuron definition.
    This particular test focuses on a one-dimensional *Population*.
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """,
                        equations="""
                mean_r = mean(r)
                max_r = max(r)
                min_r = min(r)
                l1 = norm1(r)
                l2 = norm2(r)
            """)

        pop = Population(6, neuron)

        self.test_net = Network()
        self.test_net.add([pop])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)

    @classmethod
    def tearDownClass(cls):
        del cls.test_net

    def setUp(self):
        """
        In our *setUp()* function we set the variable *r*.
        We also call *simulate()* to calculate mean/max/min.
        """
        # reset() set all variables to init value (default 0), which is
        # unfortunately meaningless for mean/max/min. So we set here some
        # better values
        self.net_pop.r = [2.0, 1.0, 0.0, -5.0, -3.0, -1.0]

        # 1st step: calculate mean/max/min and store in intermediate
        #           variables
        # 2nd step: write intermediate variables to accessible variables.
        self.test_net.simulate(2)

    def tearDown(self):
        """
        After each test we call *reset()* to reset the network.
        """
        self.test_net.reset()

    def test_get_mean_r(self):
        """
        Tests the result of *mean(r)* for *pop*.
        """
        self.assertTrue(numpy.allclose(self.net_pop.mean_r, -1.0))

    def test_get_max_r(self):
        """
        Tests the result of *max(r)* for *pop*.
        """
        self.assertTrue(numpy.allclose(self.net_pop.max_r, 2.0))

    def test_get_min_r(self):
        """
        Tests the result of *min(r)* for *pop*.
        """
        self.assertTrue(numpy.allclose(self.net_pop.min_r, -5.0))

    def test_get_l1_norm(self):
        """
        Tests the result of *norm1(r)* (L1 norm) for *pop*.
        """
        self.assertTrue(numpy.allclose(self.net_pop.l1, 12.0))

    def test_get_l2_norm(self):
        """
        Tests the result of *norm2(r)* (L2 norm) for *pop*.
        """
        # compute control value
        l2norm = numpy.linalg.norm(self.net_pop.r, ord=2)

        # test
        self.assertTrue(numpy.allclose(self.net_pop.l2, l2norm))
Example #25
0
class test_GlobalOps_1D_Large(unittest.TestCase):
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="""
                r=0
            """,
                        equations="""
                mean_r = mean(r)
                max_r = max(r)
                min_r = min(r)
                l1 = norm1(r)
                l2 = norm2(r)
            """)

        pop = Population(500, neuron)

        self.test_net = Network()
        self.test_net.add([pop])
        self.test_net.compile(silent=True)

        self.net_pop = self.test_net.get(pop)

    @classmethod
    def tearDownClass(cls):
        del cls.test_net

    def tearDown(self):
        """
        After each test we call *reset()* to reset the network.
        """
        self.test_net.reset()

    def test_mean_r(self):
        """
        """
        rand_val = numpy.random.random(500)
        self.net_pop.r = rand_val
        self.test_net.simulate(2)

        self.assertTrue(
            numpy.allclose(self.net_pop.mean_r, numpy.mean(rand_val)))

    def test_min_r(self):
        """
        """
        rand_val = numpy.random.random(500)
        self.net_pop.r = rand_val
        self.test_net.simulate(2)

        self.assertTrue(
            numpy.allclose(self.net_pop.min_r, numpy.amin(rand_val)))

    def test_max_r(self):
        """
        """
        rand_val = numpy.random.random(500)
        self.net_pop.r = rand_val
        self.test_net.simulate(2)

        self.assertTrue(
            numpy.allclose(self.net_pop.max_r, numpy.amax(rand_val)))
Example #26
0
    def setUpClass(cls):
        """
        Compile the network for this test
        """
        def my_diagonal(pre, post, weight):
            synapses = CSR()
            for post_rk in post.ranks:
                pre_ranks = []
                delays = []
                if post_rk - 1 in pre.ranks:
                    pre_ranks.append(post_rk - 1)
                if post_rk in pre.ranks:
                    pre_ranks.append(post_rk)
                if post_rk + 1 in pre.ranks:
                    pre_ranks.append(post_rk + 1)

                synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks),
                             [0] * len(pre_ranks))

            return synapses

        def my_diagonal_with_uniform_delay(pre, post, weight, delay):
            synapses = CSR()
            for post_rk in post.ranks:
                pre_ranks = []
                delays = []
                if post_rk - 1 in pre.ranks:
                    pre_ranks.append(post_rk - 1)
                if post_rk in pre.ranks:
                    pre_ranks.append(post_rk)
                if post_rk + 1 in pre.ranks:
                    pre_ranks.append(post_rk + 1)

                synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks),
                             [delay] * len(pre_ranks))

            return synapses

        def my_diagonal_with_non_uniform_delay(pre, post, weight, delay):
            synapses = CSR()
            for post_rk in post.ranks:
                pre_ranks = []
                delays = []
                if post_rk - 1 in pre.ranks:
                    pre_ranks.append(post_rk - 1)
                if post_rk in pre.ranks:
                    pre_ranks.append(post_rk)
                if post_rk + 1 in pre.ranks:
                    pre_ranks.append(post_rk + 1)

                synapses.add(post_rk, pre_ranks, [weight] * len(pre_ranks),
                             delay.get_values(len(pre_ranks)))

            return synapses

        neuron = Neuron(equations="r = 1")

        neuron2 = Neuron(equations="r = sum(exc)")

        pop1 = Population(5, neuron)
        pop2 = Population(5, neuron2)

        proj1 = Projection(pre=pop1, post=pop2, target="exc")
        proj1.connect_with_func(method=my_diagonal, weight=0.1)

        proj2 = Projection(pre=pop1, post=pop2, target="exc2")
        proj2.connect_with_func(method=my_diagonal_with_uniform_delay,
                                weight=0.1,
                                delay=2)

        proj3 = Projection(pre=pop1, post=pop2, target="exc3")
        proj3.connect_with_func(method=my_diagonal_with_non_uniform_delay,
                                weight=0.1,
                                delay=DiscreteUniform(1, 5))

        cls.test_net = Network()
        cls.test_net.add([pop1, pop2, proj1, proj2, proj3])
        cls.test_net.compile(silent=True)

        cls.test_proj1 = cls.test_net.get(proj1)
        cls.test_proj2 = cls.test_net.get(proj2)
        cls.test_proj3 = cls.test_net.get(proj3)
Example #27
0
#inp_e = Projection(pre=I_e, post=E, target='exc')
#inp_i = Projection(pre=I_i, post=I, target='exc')
#inp_e.connect_one_to_one(1.0)
#inp_i.connect_one_to_one(1.0)
E.i_offset = 5.0
I.i_offset = 2.0

# monitoring
obs_e = Monitor(E, variables=['spike', 'v'], start=True)
obs_i = Monitor(I, variables=['spike', 'v'], start=True)

# simulation
############

# annarchy simulation
net = Network(everything=True)
net.compile()
net.simulate(duration=T)

# conversion to pyrates
rate_e = pyrates_from_annarchy(monitors=[net.get(obs_e)],
                               vars=['spike'],
                               pop_average=True)
rate_i = pyrates_from_annarchy(monitors=[net.get(obs_i)],
                               vars=['spike'],
                               pop_average=True)
v_e = pyrates_from_annarchy(monitors=[net.get(obs_e)],
                            vars=['v'],
                            pop_average=False)
v_i = pyrates_from_annarchy(monitors=[net.get(obs_i)],
                            vars=['v'],
Example #28
0
class test_Dendrite(unittest.TestCase):
    """
    This class tests the *Dendrite* object, which gathers all synapses
    belonging to a post-synaptic neuron in a *Projection*:

        * access to parameters
        * the *rank* method
        * the *size* method
    """
    @classmethod
    def setUpClass(self):
        """
        Compile the network for this test
        """
        neuron = Neuron(parameters="tau = 10", equations="r += 1/tau * t")

        neuron2 = Neuron(parameters="tau = 10: population",
                         equations="r += 1/tau * t: init = 1.0")

        Oja = Synapse(parameters="""
                tau = 5000.0 : postsynaptic
                alpha = 8.0
            """,
                      equations="""
                tau * dw/dt = pre.r * post.r - alpha * post.r^2 * w
            """)

        pop1 = Population(5, neuron)
        pop2 = Population(8, neuron2)

        proj = Projection(pre=pop1, post=pop2, target="exc", synapse=Oja)

        proj.connect_all_to_all(weights=1.0)

        self.test_net = Network()
        self.test_net.add([pop1, pop2, proj])
        self.test_net.compile(silent=True)

        self.net_proj = self.test_net.get(proj)

    @classmethod
    def tearDownClass(cls):
        del cls.test_net

    def setUp(self):
        """
        In our *setUp()* function we call *reset()* to reset the network.
        """
        self.test_net.reset()

    def test_none(self):
        """
        If a non-existent *Dendrite* is accessed, an error should be thrown.
        This is tested here.
        """
        from ANNarchy.core.Global import ANNarchyException
        with self.assertRaises(ANNarchyException) as cm:
            d = self.net_proj.dendrite(14)
        # self.assertEqual(cm.exception.code, 1)

    def test_pre_ranks(self):
        """
        Tests the *pre_ranks* method, which returns the ranks of the
        pre-synaptic neurons belonging to the accessed *Dendrite*.
        """
        self.assertEqual(self.net_proj.dendrite(5).pre_ranks, [0, 1, 2, 3, 4])

    def test_dendrite_size(self):
        """
        Tests the *size* method, which returns the number of pre-synaptic
        neurons belonging to the accessed *Dendrite*.
        """
        self.assertEqual(self.net_proj.dendrite(3).size, 5)

    def test_get_dendrite_tau(self):
        """
        Tests the direct access of the parameter *tau* of a *Dendrite*.
        """
        self.assertTrue(numpy.allclose(self.net_proj.dendrite(1).tau, 5000.0))

    def test_get_dendrite_alpha(self):
        """
        Tests the direct access of the variable *alpha* of a *Dendrite*.
        """
        self.assertTrue(
            numpy.allclose(
                self.net_proj.dendrite(0).alpha, [8.0, 8.0, 8.0, 8.0, 8.0]))

    def test_get_dendrite_weights(self):
        """
        Tests the direct access of the parameter *w* (weights) of a *Dendrite*.
        """
        self.assertTrue(
            numpy.allclose(
                self.net_proj.dendrite(7).w, [1.0, 1.0, 1.0, 1.0, 1.0]))

    def test_set_tau(self):
        """
        Tests the setting of the parameter *tau* for the whole *Projection* through a single value.
        """
        self.net_proj.tau = 6000.0
        self.assertTrue(numpy.allclose(self.net_proj.dendrite(0).tau, 6000.0))

    def test_set_tau_2(self):
        """
        Tests the setting of the parameter *tau* for a single dendrite with a single value.
        """
        old_value = self.net_proj.tau
        old_value[1] = 7000.0

        self.net_proj.dendrite(1).tau = 7000.0
        self.assertTrue(numpy.allclose(self.net_proj.dendrite(1).tau, 7000.0))
        self.assertTrue(numpy.allclose(self.net_proj.tau, old_value))

    def test_set_alpha(self):
        """
        Tests the setting of the parameter *alpha* of a *Dendrite*.
        """
        self.net_proj.dendrite(4).alpha = 9.0
        self.assertTrue(
            numpy.allclose(
                self.net_proj.dendrite(4).alpha, [9.0, 9.0, 9.0, 9.0, 9.0]))

    def test_set_alpha_2(self):
        """
        Tests the setting of the parameter *alpha* of a specific synapse in a *Dendrite*.
        """
        self.net_proj.dendrite(4)[1].alpha = 10.0
        self.assertTrue(
            numpy.allclose(
                self.net_proj.dendrite(4).alpha, [9.0, 10.0, 9.0, 9.0, 9.0]))

    def test_set_weights(self):
        """
        Tests the setting of the parameter *w* (weights) of a *Dendrite*.
        """
        self.net_proj.dendrite(6).w = 2.0
        self.assertTrue(
            numpy.allclose(
                self.net_proj.dendrite(6).w, [2.0, 2.0, 2.0, 2.0, 2.0]))

    def test_set_weights_2(self):
        """
        Tests the setting of the parameter *w* (weights) of a specific synapse in a *Dendrite*.
        """
        self.net_proj.dendrite(6)[2].w = 3.0
        self.assertTrue(
            numpy.allclose(
                self.net_proj.dendrite(6).w, [2.0, 2.0, 3.0, 2.0, 2.0]))

    def test_set_with_dict(self):
        """
        Test the setting of attributes using a dictionary.
        """
        new_value = self.net_proj.tau
        new_value[1] = 7000.0

        update = dict({'tau': 7000})
        self.net_proj.dendrite(1).set(update)
        self.assertTrue(numpy.allclose(self.net_proj.tau, new_value))

    def test_get_by_name(self):
        """
        Test the retrieval of an attribute by the name.
        """
        val = self.net_proj.dendrite(1).get('tau')
        self.assertTrue(numpy.allclose(val, 5000.0))