Exemple #1
0
    def make_vertices(self, model, n_steps):
        """Create the vertices to be simulated on the machine."""
        # Create the system region
        self.system_region = SystemRegion(model.machine_timestep,
                                          self.period is not None, n_steps)

        # Get all the outgoing signals to determine how big the size out is and
        # to build a list of keys.
        sigs_conns = model.get_signals_from_object(self)
        if len(sigs_conns) == 0:
            return netlistspec([])

        keys = list()
        self.transmission_parameters = list()
        for sig, transmission_params in sigs_conns[OutputPort.standard]:
            # Add the keys for this connection
            transform, sig_keys = get_transform_keys(sig, transmission_params)
            keys.extend(sig_keys)
            self.transmission_parameters.append((transmission_params,
                                                 transform))
        size_out = len(keys)

        # Build the keys region
        self.keys_region = regions.KeyspacesRegion(
            keys, [regions.KeyField({"cluster": "cluster"})],
            partitioned_by_atom=True
        )

        # Create the output region
        self.output_region = regions.MatrixRegion(
            np.zeros((n_steps, size_out)),
            sliced_dimension=regions.MatrixPartitioning.columns
        )

        self.regions = [self.system_region, self.keys_region,
                        self.output_region]

        # Partition by output dimension to create vertices
        transmit_constraint = partition.Constraint(10)
        sdram_constraint = partition.Constraint(8*2**20)  # Max 8MiB
        constraints = {
            transmit_constraint: lambda s: s.stop - s.start,
            sdram_constraint: (
                lambda s: regions.utils.sizeof_regions(self.regions, s)),
        }
        for sl in partition.partition(slice(0, size_out), constraints):
            # Determine the resources
            resources = {
                Cores: 1,
                SDRAM: regions.utils.sizeof_regions(self.regions, sl),
            }
            vsl = VertexSlice(sl, self._label, get_application("value_source"),
                              resources)
            self.vertices.append(vsl)

        # Return the vertices and callback methods
        return netlistspec(self.vertices, self.load_to_machine,
                           self.before_simulation)
    def make_vertices(self, model, n_steps):  # TODO remove n_steps
        """Construct the data which can be loaded into the memory of a
        SpiNNaker machine.
        """
        # Extract all the filters from the incoming connections to build the
        # filter regions.
        signals_conns = model.get_signals_to_object(self)[InputPort.standard]
        self.filter_region, self.filter_routing_region = make_filter_regions(
            signals_conns, model.dt, True, model.keyspaces.filter_routing_tag)

        # Use a matrix region to record into (slightly unpleasant)
        self.recording_region = regions.MatrixRegion(
            np.zeros((self.size_in, n_steps), dtype=np.uint32)
        )

        # This isn't partitioned, so we just compute the SDRAM requirement and
        # return a new vertex.
        self.system_region = SystemRegion(model.machine_timestep, self.size_in)

        self.regions = [None] * 15
        self.regions[0] = self.system_region
        self.regions[1] = self.filter_region
        self.regions[2] = self.filter_routing_region
        self.regions[14] = self.recording_region  # **YUCK**
        resources = {
            Cores: 1,
            SDRAM: regions.utils.sizeof_regions(self.regions, None)
        }

        self.vertex = Vertex(get_application("value_sink"), resources)

        # Return the spec
        return netlistspec(self.vertex, self.load_to_machine,
                           after_simulation_function=self.after_simulation)
Exemple #3
0
    def make_vertices(self, model, n_steps):
        """Make vertices for the filter."""
        # Get the complete matrix to be applied by the filter
        out_signals = model.get_signals_from_object(self)

        # Get the filter and filter routing regions
        filter_region, filter_routing_region = make_filter_regions(
            model.get_signals_to_object(self)[InputPort.standard],
            model.dt,
            True,
            model.keyspaces.filter_routing_tag,
            width=self.size_in)
        self._routing_region = filter_routing_region

        # Generate the vertices
        vertices = flatinsertionlist()

        for group in self.groups:
            vertices.append(
                group.make_vertices(out_signals, model.machine_timestep,
                                    filter_region, filter_routing_region))

        # Return the netlist specification
        return netlistspec(vertices=vertices,
                           load_function=self.load_to_machine)
    def make_vertices(self, model, *args, **kwargs):
        """Create vertices that will simulate the SDPTransmitter."""
        # Build the system region
        self._sys_region = SystemRegion(model.machine_timestep,
                                        self.size_in, 1)

        # Build the filter regions
        in_sigs = model.get_signals_to_object(self)[InputPort.standard]
        self._filter_region, self._routing_region = make_filter_regions(
            in_sigs, model.dt, True, model.keyspaces.filter_routing_tag)

        # Get the resources
        resources = {
            Cores: 1,
            SDRAM: region_utils.sizeof_regions(
                [self._sys_region, self._filter_region, self._routing_region],
                None
            )
        }

        # Create the vertex
        self._vertex = Vertex(get_application("tx"), resources)

        # Return the netlist specification
        return netlistspec(self._vertex,
                           load_function=self.load_to_machine)
Exemple #5
0
    def test_removes_sinkless_filters(self):
        """Test that making a netlist correctly filters out passthrough Nodes
        with no outgoing connections.
        """
        # Create the first operator
        object_a = mock.Mock(name="object A")
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        operator_a = mock.Mock(name="operator A")
        operator_a.make_vertices.return_value = \
            netlistspec(vertex_a, load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        object_b = mock.Mock(name="object B")
        operator_b = operators.Filter(16)  # Shouldn't need building

        # Create the model, add the items and add an entry to the connection
        # map.
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.connection_map.add_connection(operator_a, None,
                                            SignalParameters(), None,
                                            operator_b, None, None)
        netlist = model.make_netlist(1)

        # The netlist should contain vertex a and no nets
        assert netlist.nets == list()
        assert netlist.vertices == [vertex_a]
    def make_vertices(self, model, n_steps):
        """Make vertices for the filter."""
        # Get the complete matrix to be applied by the filter
        out_signals = model.get_signals_from_object(self)

        # Get the filter and filter routing regions
        filter_region, filter_routing_region = make_filter_regions(
            model.get_signals_to_object(self)[InputPort.standard],
            model.dt, True,
            model.keyspaces.filter_routing_tag,
            width=self.size_in
        )
        self._routing_region = filter_routing_region

        # Generate the vertices
        vertices = flatinsertionlist()

        for group in self.groups:
            vertices.append(
                group.make_vertices(out_signals,
                                    model.machine_timestep,
                                    filter_region,
                                    filter_routing_region)
            )

        # Return the netlist specification
        return netlistspec(vertices=vertices,
                           load_function=self.load_to_machine)
    def make_vertices(self, model, n_steps):  # TODO remove n_steps
        """Construct the data which can be loaded into the memory of a
        SpiNNaker machine.
        """
        # Extract all the filters from the incoming connections to build the
        # filter regions.
        signals_conns = model.get_signals_to_object(self)[InputPort.standard]
        self.filter_region, self.filter_routing_region = make_filter_regions(
            signals_conns, model.dt, True, model.keyspaces.filter_routing_tag)

        # Use a matrix region to record into (slightly unpleasant)
        self.recording_region = regions.MatrixRegion(
            np.zeros((self.size_in, n_steps), dtype=np.uint32))

        # This isn't partitioned, so we just compute the SDRAM requirement and
        # return a new vertex.
        self.system_region = SystemRegion(model.machine_timestep, self.size_in)

        self.regions = [None] * 15
        self.regions[0] = self.system_region
        self.regions[1] = self.filter_region
        self.regions[2] = self.filter_routing_region
        self.regions[14] = self.recording_region  # **YUCK**
        resources = {
            Cores: 1,
            SDRAM: regions.utils.sizeof_regions(self.regions, None)
        }

        self.vertex = Vertex(get_application("value_sink"), resources)

        # Return the spec
        return netlistspec(self.vertex,
                           self.load_to_machine,
                           after_simulation_function=self.after_simulation)
    def make_vertices(self, model, n_steps):  # TODO remove n_steps
        """Construct the data which can be loaded into the memory of a
        SpiNNaker machine.
        """
        # Extract all the filters from the incoming connections to build the
        # filter regions.
        signals_conns = model.get_signals_to_object(self)[InputPort.standard]
        filter_region, filter_routing_region = make_filter_regions(
            signals_conns, model.dt, True, model.keyspaces.filter_routing_tag)
        self._routing_region = filter_routing_region

        # Make sufficient vertices to ensure that each has a size_in of less
        # than max_width.
        n_vertices = (
            (self.size_in // self.max_width) +
            (1 if self.size_in % self.max_width else 0)
        )
        self.vertices = tuple(
            ValueSinkVertex(model.machine_timestep, n_steps, sl, filter_region,
                            filter_routing_region) for sl in
            divide_slice(slice(0, self.size_in), n_vertices)
        )

        # Return the spec
        return netlistspec(self.vertices, self.load_to_machine,
                           after_simulation_function=self.after_simulation)
    def make_vertices(self, model, *args, **kwargs):
        """Create vertices that will simulate the SDPTransmitter."""
        # Build the system region
        self._sys_region = SystemRegion(model.machine_timestep, self.size_in,
                                        1)

        # Build the filter regions
        in_sigs = model.get_signals_to_object(self)[InputPort.standard]
        self._filter_region, self._routing_region = make_filter_regions(
            in_sigs, model.dt, True, model.keyspaces.filter_routing_tag)

        # Get the resources
        resources = {
            Cores:
            1,
            SDRAM:
            region_utils.sizeof_regions(
                [self._sys_region, self._filter_region, self._routing_region],
                None)
        }

        # Create the vertex
        self._vertex = Vertex(self._label, get_application("tx"), resources)

        # Return the netlist specification
        return netlistspec(
            (self._vertex, ),  # Tuple is required
            load_function=self.load_to_machine)
    def test_removes_sinkless_filters(self):
        """Test that making a netlist correctly filters out passthrough Nodes
        with no outgoing connections.
        """
        # Create the first operator
        object_a = mock.Mock(name="object A")
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        operator_a = mock.Mock(name="operator A")
        operator_a.make_vertices.return_value = \
            netlistspec(vertex_a, load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        object_b = mock.Mock(name="object B")
        operator_b = operators.Filter(16)  # Shouldn't need building

        # Create the model, add the items and add an entry to the connection
        # map.
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.connection_map.add_connection(
            operator_a, None, SignalParameters(), None,
            operator_b, None, None
        )
        netlist = model.make_netlist(1)

        # The netlist should contain vertex a and no nets
        assert netlist.nets == list()
        assert netlist.vertices == [vertex_a]
Exemple #11
0
    def test_extra_operators_and_signals(self):
        """Test the operators in the extra_operators list are included when
        building netlists.
        """
        # Create the first operator
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec((vertex_a, ), load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        vertex_b = mock.Mock(name="vertex B")
        load_fn_b = mock.Mock(name="load function B")

        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec((vertex_b, ), load_fn_b)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.extra_operators = [operator_a, operator_b]
        netlist = model.make_netlist()

        # Check that the make_vertices functions were called
        operator_a.make_vertices.assert_called_once_with(model)
        operator_b.make_vertices.assert_called_once_with(model)

        # Check that the netlist is as expected
        assert len(netlist.nets) == 0

        assert netlist.operator_vertices == {
            operator_a: (vertex_a, ),
            operator_b: (vertex_b, ),
        }
        assert netlist.keyspaces is model.keyspaces
        assert len(netlist.constraints) == 0
        assert set(netlist.load_functions) == set([load_fn_a, load_fn_b])
        assert netlist.before_simulation_functions == [pre_fn_a]
        assert netlist.after_simulation_functions == [post_fn_a]
    def test_extra_operators_and_signals(self):
        """Test the operators in the extra_operators list are included when
        building netlists.
        """
        # Create the first operator
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec((vertex_a, ), load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        vertex_b = mock.Mock(name="vertex B")
        load_fn_b = mock.Mock(name="load function B")

        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec((vertex_b, ), load_fn_b)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.extra_operators = [operator_a, operator_b]
        netlist = model.make_netlist()

        # Check that the make_vertices functions were called
        operator_a.make_vertices.assert_called_once_with(model)
        operator_b.make_vertices.assert_called_once_with(model)

        # Check that the netlist is as expected
        assert len(netlist.nets) == 0

        assert netlist.operator_vertices == {
            operator_a: (vertex_a, ),
            operator_b: (vertex_b, ),
        }
        assert netlist.keyspaces is model.keyspaces
        assert len(netlist.constraints) == 0
        assert set(netlist.load_functions) == set([load_fn_a, load_fn_b])
        assert netlist.before_simulation_functions == [pre_fn_a]
        assert netlist.after_simulation_functions == [post_fn_a]
    def make_vertices(self, model, *args, **kwargs):
        """Create vertices that will simulate the SDPReceiver."""
        # NOTE This approach will result in more routes being created than are
        # actually necessary; the way to avoid this is to modify how the
        # builder deals with signals when creating netlists.

        # Get all outgoing signals and their associated transmission parameters
        for signal, transmission_params in \
                model.get_signals_from_object(self)[OutputPort.standard]:
            # Get the transform, and from this the keys
            transform = transmission_params.transform
            keys = [signal.keyspace(index=i) for i in
                    range(transform.shape[0])]

            # Create a vertex for this connection (assuming its size out <= 64)
            if len(keys) > 64:
                raise NotImplementedError(
                    "Connection is too wide to transmit to SpiNNaker. "
                    "Consider breaking the connection up or making the "
                    "originating node a function of time Node."
                )

            # Create the regions for the system
            sys_region = SystemRegion(model.machine_timestep, len(keys))
            keys_region = KeyspacesRegion(keys,
                                          [KeyField({"cluster": "cluster"})])

            # Get the resources
            resources = {
                Cores: 1,
                SDRAM: region_utils.sizeof_regions([sys_region, keys_region],
                                                   None)
            }

            # Create the vertex
            v = self.connection_vertices[transmission_params] = \
                Vertex(get_application("rx"), resources)
            self._sys_regions[v] = sys_region
            self._key_regions[v] = keys_region

        # Return the netlist specification
        return netlistspec(list(self.connection_vertices.values()),
                           load_function=self.load_to_machine)
Exemple #14
0
    def make_vertices(self, model, *args, **kwargs):
        """Create vertices that will simulate the SDPReceiver."""
        # NOTE This approach will result in more routes being created than are
        # actually necessary; the way to avoid this is to modify how the
        # builder deals with signals when creating netlists.

        # Get all outgoing signals and their associated transmission parameters
        for signal, transmission_params in \
                model.get_signals_from_object(self)[OutputPort.standard]:
            # Get the transform, and from this the keys
            transform = transmission_params.full_transform(slice_out=False)
            keys = [(signal, {"index": i}) for i in
                    range(transform.shape[0])]

            # Create a vertex for this connection (assuming its size out <= 64)
            if len(keys) > 64:
                raise NotImplementedError(
                    "Connection is too wide to transmit to SpiNNaker. "
                    "Consider breaking the connection up or making the "
                    "originating node a function of time Node."
                )

            # Create the regions for the system
            sys_region = SystemRegion(model.machine_timestep, len(keys))
            keys_region = KeyspacesRegion(keys,
                                          [KeyField({"cluster": "cluster"})])

            # Get the resources
            resources = {
                Cores: 1,
                SDRAM: region_utils.sizeof_regions([sys_region, keys_region],
                                                   None)
            }

            # Create the vertex
            v = self.connection_vertices[transmission_params] = \
                Vertex(self._label, get_application("rx"), resources)
            self._sys_regions[v] = sys_region
            self._key_regions[v] = keys_region

        # Return the netlist specification
        return netlistspec(list(self.connection_vertices.values()),
                           load_function=self.load_to_machine)
    def make_vertices(self, model, n_steps):  # TODO remove n_steps
        """Construct the data which can be loaded into the memory of a
        SpiNNaker machine.
        """
        # Extract all the filters from the incoming connections to build the
        # filter regions.
        signals_conns = model.get_signals_to_object(self)[InputPort.standard]
        filter_region, filter_routing_region = make_filter_regions(
            signals_conns, model.dt, True, model.keyspaces.filter_routing_tag)
        self._routing_region = filter_routing_region

        # Make sufficient vertices to ensure that each has a size_in of less
        # than max_width.
        n_vertices = ((self.size_in // self.max_width) +
                      (1 if self.size_in % self.max_width else 0))
        self.vertices = tuple(
            ValueSinkVertex(model.machine_timestep, n_steps, sl, filter_region,
                            filter_routing_region)
            for sl in divide_slice(slice(0, self.size_in), n_vertices))

        # Return the spec
        return netlistspec(self.vertices,
                           self.load_to_machine,
                           after_simulation_function=self.after_simulation)
    def make_vertices(self, model, n_steps):
        """Construct the data which can be loaded into the memory of a
        SpiNNaker machine.
        """
        # Build encoders, gain and bias regions
        params = model.params[self.ensemble]
        ens_regions = dict()

        # Convert the encoders combined with the gain to S1615 before creating
        # the region.
        encoders_with_gain = params.scaled_encoders
        ens_regions[EnsembleRegions.encoders] = regions.MatrixRegion(
            tp.np_to_fix(encoders_with_gain),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Combine the direct input with the bias before converting to S1615 and
        # creating the region.
        bias_with_di = params.bias + np.dot(encoders_with_gain,
                                            self.direct_input)
        assert bias_with_di.ndim == 1
        ens_regions[EnsembleRegions.bias] = regions.MatrixRegion(
            tp.np_to_fix(bias_with_di),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Convert the gains to S1615 before creating the region
        ens_regions[EnsembleRegions.gain] = regions.MatrixRegion(
            tp.np_to_fix(params.gain),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Extract all the filters from the incoming connections
        incoming = model.get_signals_to_object(self)

        (ens_regions[EnsembleRegions.input_filters],
         ens_regions[EnsembleRegions.input_routing]) = make_filter_regions(
            incoming[InputPort.standard], model.dt, True,
            model.keyspaces.filter_routing_tag,
            width=self.ensemble.size_in
        )
        (ens_regions[EnsembleRegions.inhibition_filters],
         ens_regions[EnsembleRegions.inhibition_routing]) = \
            make_filter_regions(
                incoming[EnsembleInputPort.global_inhibition], model.dt, True,
                model.keyspaces.filter_routing_tag, width=1
            )

        # Extract all the decoders for the outgoing connections and build the
        # regions for the decoders and the regions for the output keys.
        outgoing = model.get_signals_from_object(self)
        if OutputPort.standard in outgoing:
            decoders, output_keys = \
                get_decoders_and_keys(outgoing[OutputPort.standard], True)
        else:
            decoders = np.array([])
            output_keys = list()
        size_out = decoders.shape[0]

        ens_regions[EnsembleRegions.decoders] = regions.MatrixRegion(
            tp.np_to_fix(decoders / model.dt),
            sliced_dimension=regions.MatrixPartitioning.rows)
        ens_regions[EnsembleRegions.keys] = regions.KeyspacesRegion(
            output_keys,
            fields=[regions.KeyField({'cluster': 'cluster'})],
            partitioned_by_atom=True
        )

        # The population length region stores information about groups of
        # co-operating cores.
        ens_regions[EnsembleRegions.population_length] = \
            regions.ListRegion("I")

        # The ensemble region contains basic information about the ensemble
        ens_regions[EnsembleRegions.ensemble] = EnsembleRegion(
            model.machine_timestep, self.ensemble.size_in)

        # The neuron region contains information specific to the neuron type
        ens_regions[EnsembleRegions.neuron] = LIFRegion(
            model.dt, self.ensemble.neuron_type.tau_rc,
            self.ensemble.neuron_type.tau_ref
        )

        # Manage profiling
        n_profiler_samples = 0
        self.profiled = getconfig(model.config, self.ensemble, "profile",
                                  False)
        if self.profiled:
            # Try and get number of samples from config
            n_profiler_samples = getconfig(model.config, self.ensemble,
                                           "profile_num_samples")

            # If it's not specified, calculate sensible default
            if n_profiler_samples is None:
                n_profiler_samples = (len(EnsembleSlice.profiler_tag_names) *
                                      n_steps * 2)

        # Create profiler region
        ens_regions[EnsembleRegions.profiler] = regions.Profiler(
            n_profiler_samples)
        ens_regions[EnsembleRegions.ensemble].n_profiler_samples = \
            n_profiler_samples

        # Manage probes
        for probe in self.local_probes:
            if probe.attr in ("output", "spikes"):
                self.record_spikes = True
            elif probe.attr == "voltage":
                self.record_voltages = True
            else:
                raise NotImplementedError(
                    "Cannot probe {} on Ensembles".format(probe.attr)
                )

        # Set the flags
        ens_regions[EnsembleRegions.ensemble].record_spikes = \
            self.record_spikes
        ens_regions[EnsembleRegions.ensemble].record_voltages = \
            self.record_voltages

        # Create the probe recording regions
        ens_regions[EnsembleRegions.spikes] = regions.SpikeRecordingRegion(
            n_steps if self.record_spikes else 0)
        ens_regions[EnsembleRegions.voltages] = regions.VoltageRecordingRegion(
            n_steps if self.record_voltages else 0)

        # Create constraints against which to partition, initially assume that
        # we can devote 16 cores to every problem.
        sdram_constraint = partition.Constraint(128 * 2**20,
                                                0.9)  # 90% of 128MiB
        dtcm_constraint = partition.Constraint(16 * 64 * 2**10,
                                               0.9)  # 90% of 16 cores DTCM

        # The number of cycles available is 200MHz * the machine timestep; or
        # 200 * the machine timestep in microseconds.
        cycles = 200 * model.machine_timestep
        cpu_constraint = partition.Constraint(cycles * 16,
                                              0.8)  # 80% of 16 cores compute

        # Form the constraints dictionary
        def _make_constraint(f, size_in, size_out, **kwargs):
            """Wrap a usage computation method to work with the partitioner."""
            def f_(vertex_slice):
                # Calculate the number of neurons
                n_neurons = vertex_slice.stop - vertex_slice.start

                # Call the original method
                return f(size_in, size_out, n_neurons, **kwargs)
            return f_

        partition_constraints = {
            sdram_constraint: _make_constraint(_lif_sdram_usage,
                                               self.ensemble.size_in,
                                               size_out),
            dtcm_constraint: _make_constraint(_lif_dtcm_usage,
                                              self.ensemble.size_in, size_out),
            cpu_constraint: _make_constraint(_lif_cpu_usage,
                                             self.ensemble.size_in, size_out),
        }

        # Partition the ensemble to create clusters of co-operating cores
        self.clusters = list()
        vertices = list()
        constraints = list()
        for sl in partition.partition(slice(0, self.ensemble.n_neurons),
                                      partition_constraints):
            # For each slice we create a cluster of co-operating cores.  We
            # instantiate the cluster and then ask it to produce vertices which
            # will be added to the netlist.
            cluster = EnsembleCluster(sl, self.ensemble.size_in, size_out,
                                      ens_regions)
            self.clusters.append(cluster)

            # Get the vertices for the cluster
            cluster_vertices = cluster.make_vertices(cycles)
            vertices.extend(cluster_vertices)

            # Create a constraint which forces these vertices to be present on
            # the same chip
            constraints.append(SameChipConstraint(cluster_vertices))

        # Return the vertices and callback methods
        return netlistspec(vertices, self.load_to_machine,
                           after_simulation_function=self.after_simulation,
                           constraints=constraints)
    def make_vertices(self, model, n_steps):
        """Make vertices for the filter."""
        # Get the outgoing transforms and keys
        sigs = model.get_signals_from_object(self)
        if OutputPort.standard in sigs:
            outgoing = sigs[OutputPort.standard]
            transform, output_keys, sigs_pars_slices = \
                get_transforms_and_keys(outgoing)
        else:
            transform = np.array([[]])
            output_keys = list()
            sigs_pars_slices = list()

        size_out = len(output_keys)

        # Calculate how many cores and chips to use.
        if self.n_cores_per_chip is None or self.n_chips is None:
            # The number of cores is largely a function of the input size, we
            # try to ensure that each core is receiving a max of 32 packets per
            # timestep.
            n_cores_per_chip = int(min(16, np.ceil(self.size_in / 32.0)))

            # The number of chips is now determined by the size in (columns in
            # the transform matrix), the size out (rows in the transform
            # matrix) and the number of cores per chip.
            n_chips = self.n_chips or 1
            n_cores = n_chips * n_cores_per_chip

            while True:
                rows_per_core = int(
                    np.ceil(float(size_out) / (n_cores * n_chips)))
                load_per_core = rows_per_core * self.size_in

                # The 8,000 limits the number of columns in each row that we
                # need to process. This is a heuristic.
                if load_per_core <= 8000 or n_chips > 9:
                    # The load per core is acceptable or we're using way too
                    # many chips
                    break

                if n_cores < 16:
                    # Increase the number of cores per chip if we can
                    n_cores += 1
                else:
                    # Otherwise increase the number of chips
                    n_chips += 1

            # Store the result
            self.n_cores_per_chip = n_cores
            self.n_chips = n_chips

        # Slice the input space into the given number of subspaces, this is
        # repeated on each chip.
        input_slices = list(
            divide_slice(slice(0, self.size_in), self.n_cores_per_chip))

        # Slice the output space into the given number of subspaces, this is
        # sliced across all of the chips.
        output_slices = divide_slice(slice(0, size_out),
                                     self.n_cores_per_chip * self.n_chips)

        # Construct the output keys and transform regions; the output keys and
        # sliced, and the transform is sliced by rows.
        self.output_keys_region = regions.KeyspacesRegion(
            output_keys,
            fields=[regions.KeyField({'cluster': 'cluster'})],
            partitioned_by_atom=True)
        self.transform_region = regions.MatrixRegion(
            np_to_fix(transform),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Construct the system region
        self.system_region = SystemRegion(self.size_in, model.machine_timestep)

        # Get the incoming filters
        incoming = model.get_signals_to_object(self)
        self.filters_region, self.routing_region = make_filter_regions(
            incoming[InputPort.standard],
            model.dt,
            True,
            model.keyspaces.filter_routing_tag,
            width=self.size_in)

        # Make the vertices and constraints
        iter_output_slices = iter(output_slices)
        cons = list()  # List of constraints

        # For each chip that we'll be using
        for _ in range(self.n_chips):
            chip_vertices = list()

            # Each core is given an input slice and an output slice.  The same
            # set of input slices is used per chip, but we iterate through the
            # whole list of output slices.
            for in_slice, out_slice in zip(input_slices, iter_output_slices):
                # Determine the amount of SDRAM required (the 24 additional
                # bytes are for the application pointer table).  We also
                # include this cores contribution to a shared SDRAM vector.
                sdram = (24 + 4 * (in_slice.stop - in_slice.start) +
                         self.system_region.sizeof() +
                         self.filters_region.sizeof_padded() +
                         self.routing_region.sizeof_padded() +
                         self.output_keys_region.sizeof_padded(out_slice) +
                         self.transform_region.sizeof_padded(out_slice))

                # Create the vertex and include in the list of vertices
                v = ParallelFilterSlice(in_slice, out_slice, {
                    Cores: 1,
                    SDRAM: sdram
                }, sigs_pars_slices)
                chip_vertices.append(v)
                self.vertices.append(v)

            # Create a constraint which will force all of the vertices to exist
            # of the same chip.
            cons.append(SameChipConstraint(chip_vertices))

        # Return the spec
        return netlistspec(self.vertices,
                           self.load_to_machine,
                           constraints=cons)
Exemple #18
0
    def test_multiple_source_vertices(self):
        """Test that each of the vertices associated with a source is correctly
        included in the sources of a net.
        """
        class MyVertexSlice(VertexSlice):
            def __init__(self, *args, **kwargs):
                super(MyVertexSlice, self).__init__(*args, **kwargs)
                self.args = None

            def transmits_signal(self, signal_parameters,
                                 transmission_parameters):
                self.args = (signal_parameters, transmission_parameters)
                return False

        # Create the first operator
        vertex_a0 = VertexSlice(slice(0, 1))
        vertex_a1 = VertexSlice(slice(1, 2))
        vertex_a2 = MyVertexSlice(slice(2, 3))
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        object_a = mock.Mock(name="object A")
        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec([vertex_a0, vertex_a1, vertex_a2],
                        load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        vertex_b = Vertex()
        load_fn_b = mock.Mock(name="load function B")

        object_b = mock.Mock(name="object B")
        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec((vertex_b, ), load_fn_b)

        # Create a signal between the operators
        keyspace = mock.Mock(name="keyspace")
        keyspace.length = 32
        signal_ab_parameters = SignalParameters(keyspace=keyspace, weight=43)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.connection_map.add_connection(operator_a, None,
                                            signal_ab_parameters, None,
                                            operator_b, None, None)
        netlist = model.make_netlist()

        # Check that the netlist is as expected
        assert netlist.operator_vertices == {
            operator_a: (vertex_a0, vertex_a1, vertex_a2),
            operator_b: (vertex_b, ),
        }
        assert len(netlist.nets) == 1
        for net in itervalues(netlist.nets):
            assert net.sources == [vertex_a0, vertex_a1]
            assert net.sinks == [vertex_b]

        assert len(netlist.constraints) == 0

        # Check that `transmit_signal` was called correctly
        sig, tp = vertex_a2.args
        assert sig.keyspace is keyspace
        assert tp is None
    def test_multiple_sink_vertices(self):
        """Test that each of the vertices associated with a sink is correctly
        included in the sinks of a net.
        """
        # Create the first operator
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        object_a = mock.Mock(name="object A")
        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec((vertex_a, ), load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        vertex_b0 = mock.Mock(name="vertex B0")
        vertex_b1 = mock.Mock(name="vertex B1")
        load_fn_b = mock.Mock(name="load function B")

        object_b = mock.Mock(name="object B")
        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec([vertex_b0, vertex_b1], load_fn_b)

        # Create a third operator, which won't accept the signal
        vertex_c = mock.Mock(name="vertex C")
        vertex_c.accepts_signal.side_effect = lambda _, __: False

        object_c = mock.Mock(name="object C")
        operator_c = mock.Mock(name="operator C", spec_set=["make_vertices"])
        operator_c.make_vertices.return_value = netlistspec((vertex_c, ))

        # Create a signal between the operators
        keyspace = mock.Mock(name="keyspace")
        keyspace.length = 32
        signal_ab_parameters = SignalParameters(keyspace=keyspace, weight=3)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.object_operators[object_c] = operator_c
        model.connection_map.add_connection(
            operator_a, None, signal_ab_parameters, None,
            operator_b, None, None
        )
        model.connection_map.add_connection(
            operator_a, None, signal_ab_parameters, None,
            operator_c, None, None
        )
        netlist = model.make_netlist()

        # Check that the "accepts_signal" method of vertex_c was called with
        # reasonable arguments
        assert vertex_c.accepts_signal.called

        # Check that the netlist is as expected
        assert netlist.operator_vertices == {
            operator_a: (vertex_a, ),
            operator_b: (vertex_b0, vertex_b1),
            operator_c: (vertex_c, ),
        }
        assert len(netlist.nets) == 1
        for net in itervalues(netlist.nets):
            assert net.sources == [vertex_a]
            assert net.sinks == [vertex_b0, vertex_b1]
            assert net.weight == signal_ab_parameters.weight

        assert len(netlist.constraints) == 0
    def make_vertices(self, model, n_steps):
        """Make vertices for the filter."""
        # Get the outgoing transforms and keys
        sigs = model.get_signals_from_object(self)
        if OutputPort.standard in sigs:
            outgoing = sigs[OutputPort.standard]
            transform, output_keys, sigs_pars_slices = \
                get_transforms_and_keys(outgoing)
        else:
            transform = np.array([[]])
            output_keys = list()
            sigs_pars_slices = list()

        size_out = len(output_keys)

        # Calculate how many cores and chips to use.
        if self.n_cores_per_chip is None or self.n_chips is None:
            # The number of cores is largely a function of the input size, we
            # try to ensure that each core is receiving a max of 32 packets per
            # timestep.
            n_cores_per_chip = int(min(16, np.ceil(self.size_in / 32.0)))

            # The number of chips is now determined by the size in (columns in
            # the transform matrix), the size out (rows in the transform
            # matrix) and the number of cores per chip.
            n_chips = self.n_chips or 1
            n_cores = n_chips * n_cores_per_chip

            while True:
                rows_per_core = int(np.ceil(float(size_out) /
                                            (n_cores * n_chips)))
                load_per_core = rows_per_core * self.size_in

                # The 8,000 limits the number of columns in each row that we
                # need to process. This is a heuristic.
                if load_per_core <= 8000 or n_chips > 9:
                    # The load per core is acceptable or we're using way too
                    # many chips
                    break

                if n_cores < 16:
                    # Increase the number of cores per chip if we can
                    n_cores += 1
                else:
                    # Otherwise increase the number of chips
                    n_chips += 1

            # Store the result
            self.n_cores_per_chip = n_cores
            self.n_chips = n_chips

        # Slice the input space into the given number of subspaces, this is
        # repeated on each chip.
        input_slices = list(divide_slice(slice(0, self.size_in),
                                         self.n_cores_per_chip))

        # Slice the output space into the given number of subspaces, this is
        # sliced across all of the chips.
        output_slices = divide_slice(slice(0, size_out),
                                     self.n_cores_per_chip * self.n_chips)

        # Construct the output keys and transform regions; the output keys and
        # sliced, and the transform is sliced by rows.
        self.output_keys_region = regions.KeyspacesRegion(
            output_keys, fields=[regions.KeyField({'cluster': 'cluster'})],
            partitioned_by_atom=True
        )
        self.transform_region = regions.MatrixRegion(
            np_to_fix(transform),
            sliced_dimension=regions.MatrixPartitioning.rows
        )

        # Construct the system region
        self.system_region = SystemRegion(self.size_in, model.machine_timestep)

        # Get the incoming filters
        incoming = model.get_signals_to_object(self)
        self.filters_region, self.routing_region = make_filter_regions(
            incoming[InputPort.standard], model.dt, True,
            model.keyspaces.filter_routing_tag, width=self.size_in
        )

        # Make the vertices and constraints
        iter_output_slices = iter(output_slices)
        cons = list()  # List of constraints

        # For each chip that we'll be using
        for _ in range(self.n_chips):
            chip_vertices = list()

            # Each core is given an input slice and an output slice.  The same
            # set of input slices is used per chip, but we iterate through the
            # whole list of output slices.
            for in_slice, out_slice in zip(input_slices,
                                           iter_output_slices):
                # Determine the amount of SDRAM required (the 24 additional
                # bytes are for the application pointer table).  We also
                # include this cores contribution to a shared SDRAM vector.
                sdram = (24 + 4*(in_slice.stop - in_slice.start) +
                         self.system_region.sizeof() +
                         self.filters_region.sizeof_padded() +
                         self.routing_region.sizeof_padded() +
                         self.output_keys_region.sizeof_padded(out_slice) +
                         self.transform_region.sizeof_padded(out_slice))

                # Create the vertex and include in the list of vertices
                v = ParallelFilterSlice(in_slice, out_slice,
                                        {Cores: 1, SDRAM: sdram},
                                        sigs_pars_slices)
                chip_vertices.append(v)
                self.vertices.append(v)

            # Create a constraint which will force all of the vertices to exist
            # of the same chip.
            cons.append(SameChipConstraint(chip_vertices))

        # Return the spec
        return netlistspec(self.vertices, self.load_to_machine,
                           constraints=cons)
    def test_single_vertices(self):
        """Test that operators which produce single vertices work correctly and
        that all functions and signals are correctly collected and included in
        the final netlist.
        """
        # Create the first operator
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")
        constraint_a = mock.Mock(name="Constraint B")

        object_a = mock.Mock(name="object A")
        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec((vertex_a, ), load_fn_a, pre_fn_a, post_fn_a,
                        constraint_a)

        # Create the second operator
        vertex_b = mock.Mock(name="vertex B")
        load_fn_b = mock.Mock(name="load function B")
        constraint_b = mock.Mock(name="Constraint B")

        object_b = mock.Mock(name="object B")
        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec((vertex_b, ), load_fn_b, constraints=[constraint_b])

        # Create a signal between the operators
        keyspace = mock.Mock(name="keyspace")
        keyspace.length = 32
        signal_ab_parameters = SignalParameters(keyspace=keyspace, weight=43)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.connection_map.add_connection(
            operator_a, None, signal_ab_parameters, None,
            operator_b, None, None
        )
        netlist = model.make_netlist()

        # Check that the make_vertices functions were called
        operator_a.make_vertices.assert_called_once_with(model)
        operator_b.make_vertices.assert_called_once_with(model)

        # Check that the netlist is as expected
        assert len(netlist.nets) == 1
        for net in itervalues(netlist.nets):
            assert net.sources == [vertex_a]
            assert net.sinks == [vertex_b]
            assert net.weight == signal_ab_parameters.weight

        assert netlist.operator_vertices == {
            operator_a: (vertex_a, ),
            operator_b: (vertex_b, ),
        }

        assert netlist.keyspaces is model.keyspaces
        assert set(netlist.constraints) == set([constraint_a, constraint_b])
        assert set(netlist.load_functions) == set([load_fn_a, load_fn_b])
        assert netlist.before_simulation_functions == [pre_fn_a]
        assert netlist.after_simulation_functions == [post_fn_a]
Exemple #22
0
    def test_multiple_sink_vertices(self):
        """Test that each of the vertices associated with a sink is correctly
        included in the sinks of a net.
        """
        # Create the first operator
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        object_a = mock.Mock(name="object A")
        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec((vertex_a, ), load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        vertex_b0 = mock.Mock(name="vertex B0")
        vertex_b1 = mock.Mock(name="vertex B1")
        load_fn_b = mock.Mock(name="load function B")

        object_b = mock.Mock(name="object B")
        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec([vertex_b0, vertex_b1], load_fn_b)

        # Create a third operator, which won't accept the signal
        vertex_c = mock.Mock(name="vertex C")
        vertex_c.accepts_signal.side_effect = lambda _, __: False

        object_c = mock.Mock(name="object C")
        operator_c = mock.Mock(name="operator C", spec_set=["make_vertices"])
        operator_c.make_vertices.return_value = netlistspec((vertex_c, ))

        # Create a signal between the operators
        keyspace = mock.Mock(name="keyspace")
        keyspace.length = 32
        signal_ab_parameters = SignalParameters(keyspace=keyspace, weight=3)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.object_operators[object_c] = operator_c
        model.connection_map.add_connection(operator_a, None,
                                            signal_ab_parameters, None,
                                            operator_b, None, None)
        model.connection_map.add_connection(operator_a, None,
                                            signal_ab_parameters, None,
                                            operator_c, None, None)
        netlist = model.make_netlist()

        # Check that the "accepts_signal" method of vertex_c was called with
        # reasonable arguments
        assert vertex_c.accepts_signal.called

        # Check that the netlist is as expected
        assert netlist.operator_vertices == {
            operator_a: (vertex_a, ),
            operator_b: (vertex_b0, vertex_b1),
            operator_c: (vertex_c, ),
        }
        assert len(netlist.nets) == 1
        for net in itervalues(netlist.nets):
            assert net.sources == [vertex_a]
            assert net.sinks == [vertex_b0, vertex_b1]
            assert net.weight == signal_ab_parameters.weight

        assert len(netlist.constraints) == 0
Exemple #23
0
    def make_vertices(self, model, n_steps):
        """Construct the data which can be loaded into the memory of a
        SpiNNaker machine.
        """
        # Build encoders, gain and bias regions
        params = model.params[self.ensemble]
        ens_regions = dict()

        # Convert the encoders combined with the gain to S1615 before creating
        # the region.
        encoders_with_gain = params.scaled_encoders
        ens_regions[EnsembleRegions.encoders] = regions.MatrixRegion(
            tp.np_to_fix(encoders_with_gain),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Combine the direct input with the bias before converting to S1615 and
        # creating the region.
        bias_with_di = params.bias + np.dot(encoders_with_gain,
                                            self.direct_input)
        assert bias_with_di.ndim == 1
        ens_regions[EnsembleRegions.bias] = regions.MatrixRegion(
            tp.np_to_fix(bias_with_di),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Convert the gains to S1615 before creating the region
        ens_regions[EnsembleRegions.gain] = regions.MatrixRegion(
            tp.np_to_fix(params.gain),
            sliced_dimension=regions.MatrixPartitioning.rows)

        # Extract all the filters from the incoming connections
        incoming = model.get_signals_to_object(self)

        (ens_regions[EnsembleRegions.input_filters],
         ens_regions[EnsembleRegions.input_routing]) = make_filter_regions(
             incoming[InputPort.standard],
             model.dt,
             True,
             model.keyspaces.filter_routing_tag,
             width=self.ensemble.size_in)
        (ens_regions[EnsembleRegions.inhibition_filters],
         ens_regions[EnsembleRegions.inhibition_routing]) = \
            make_filter_regions(
                incoming[EnsembleInputPort.global_inhibition], model.dt, True,
                model.keyspaces.filter_routing_tag, width=1
            )

        # Extract all the decoders for the outgoing connections and build the
        # regions for the decoders and the regions for the output keys.
        outgoing = model.get_signals_from_object(self)
        if OutputPort.standard in outgoing:
            decoders, output_keys = \
                get_decoders_and_keys(outgoing[OutputPort.standard], True)
        else:
            decoders = np.array([])
            output_keys = list()
        size_out = decoders.shape[0]

        ens_regions[EnsembleRegions.decoders] = regions.MatrixRegion(
            tp.np_to_fix(decoders / model.dt),
            sliced_dimension=regions.MatrixPartitioning.rows)
        ens_regions[EnsembleRegions.keys] = regions.KeyspacesRegion(
            output_keys,
            fields=[regions.KeyField({'cluster': 'cluster'})],
            partitioned_by_atom=True)

        # The population length region stores information about groups of
        # co-operating cores.
        ens_regions[EnsembleRegions.population_length] = \
            regions.ListRegion("I")

        # The ensemble region contains basic information about the ensemble
        ens_regions[EnsembleRegions.ensemble] = EnsembleRegion(
            model.machine_timestep, self.ensemble.size_in)

        # The neuron region contains information specific to the neuron type
        ens_regions[EnsembleRegions.neuron] = LIFRegion(
            model.dt, self.ensemble.neuron_type.tau_rc,
            self.ensemble.neuron_type.tau_ref)

        # Manage profiling
        n_profiler_samples = 0
        self.profiled = getconfig(model.config, self.ensemble, "profile",
                                  False)
        if self.profiled:
            # Try and get number of samples from config
            n_profiler_samples = getconfig(model.config, self.ensemble,
                                           "profile_num_samples")

            # If it's not specified, calculate sensible default
            if n_profiler_samples is None:
                n_profiler_samples = (len(EnsembleSlice.profiler_tag_names) *
                                      n_steps * 2)

        # Create profiler region
        ens_regions[EnsembleRegions.profiler] = regions.Profiler(
            n_profiler_samples)
        ens_regions[EnsembleRegions.ensemble].n_profiler_samples = \
            n_profiler_samples

        # Manage probes
        for probe in self.local_probes:
            if probe.attr in ("output", "spikes"):
                self.record_spikes = True
            elif probe.attr == "voltage":
                self.record_voltages = True
            else:
                raise NotImplementedError(
                    "Cannot probe {} on Ensembles".format(probe.attr))

        # Set the flags
        ens_regions[EnsembleRegions.ensemble].record_spikes = \
            self.record_spikes
        ens_regions[EnsembleRegions.ensemble].record_voltages = \
            self.record_voltages

        # Create the probe recording regions
        ens_regions[EnsembleRegions.spikes] = regions.SpikeRecordingRegion(
            n_steps if self.record_spikes else 0)
        ens_regions[EnsembleRegions.voltages] = regions.VoltageRecordingRegion(
            n_steps if self.record_voltages else 0)

        # Create constraints against which to partition, initially assume that
        # we can devote 16 cores to every problem.
        sdram_constraint = partition.Constraint(128 * 2**20,
                                                0.9)  # 90% of 128MiB
        dtcm_constraint = partition.Constraint(16 * 64 * 2**10,
                                               0.9)  # 90% of 16 cores DTCM

        # The number of cycles available is 200MHz * the machine timestep; or
        # 200 * the machine timestep in microseconds.
        cycles = 200 * model.machine_timestep
        cpu_constraint = partition.Constraint(cycles * 16,
                                              0.8)  # 80% of 16 cores compute

        # Form the constraints dictionary
        def _make_constraint(f, size_in, size_out, **kwargs):
            """Wrap a usage computation method to work with the partitioner."""
            def f_(vertex_slice):
                # Calculate the number of neurons
                n_neurons = vertex_slice.stop - vertex_slice.start

                # Call the original method
                return f(size_in, size_out, n_neurons, **kwargs)

            return f_

        partition_constraints = {
            sdram_constraint:
            _make_constraint(_lif_sdram_usage, self.ensemble.size_in,
                             size_out),
            dtcm_constraint:
            _make_constraint(_lif_dtcm_usage, self.ensemble.size_in, size_out),
            cpu_constraint:
            _make_constraint(_lif_cpu_usage, self.ensemble.size_in, size_out),
        }

        # Partition the ensemble to create clusters of co-operating cores
        self.clusters = list()
        vertices = list()
        constraints = list()
        for sl in partition.partition(slice(0, self.ensemble.n_neurons),
                                      partition_constraints):
            # For each slice we create a cluster of co-operating cores.  We
            # instantiate the cluster and then ask it to produce vertices which
            # will be added to the netlist.
            cluster = EnsembleCluster(sl, self.ensemble.size_in, size_out,
                                      ens_regions)
            self.clusters.append(cluster)

            # Get the vertices for the cluster
            cluster_vertices = cluster.make_vertices(cycles)
            vertices.extend(cluster_vertices)

            # Create a constraint which forces these vertices to be present on
            # the same chip
            constraints.append(SameChipConstraint(cluster_vertices))

        # Return the vertices and callback methods
        return netlistspec(vertices,
                           self.load_to_machine,
                           after_simulation_function=self.after_simulation,
                           constraints=constraints)
    def test_multiple_source_vertices(self):
        """Test that each of the vertices associated with a source is correctly
        included in the sources of a net.
        """
        class MyVertexSlice(VertexSlice):
            def __init__(self, *args, **kwargs):
                super(MyVertexSlice, self).__init__(*args, **kwargs)
                self.args = None

            def transmits_signal(self, signal_parameters,
                                 transmission_parameters):
                self.args = (signal_parameters, transmission_parameters)
                return False

        # Create the first operator
        vertex_a0 = VertexSlice(slice(0, 1))
        vertex_a1 = VertexSlice(slice(1, 2))
        vertex_a2 = MyVertexSlice(slice(2, 3))
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")

        object_a = mock.Mock(name="object A")
        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec([vertex_a0, vertex_a1, vertex_a2],
                        load_fn_a, pre_fn_a, post_fn_a)

        # Create the second operator
        vertex_b = Vertex()
        load_fn_b = mock.Mock(name="load function B")

        object_b = mock.Mock(name="object B")
        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec((vertex_b, ), load_fn_b)

        # Create a signal between the operators
        keyspace = mock.Mock(name="keyspace")
        keyspace.length = 32
        signal_ab_parameters = SignalParameters(keyspace=keyspace, weight=43)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.connection_map.add_connection(
            operator_a, None, signal_ab_parameters, None,
            operator_b, None, None
        )
        netlist = model.make_netlist()

        # Check that the netlist is as expected
        assert netlist.operator_vertices == {
            operator_a: (vertex_a0, vertex_a1, vertex_a2),
            operator_b: (vertex_b, ),
        }
        assert len(netlist.nets) == 1
        for net in itervalues(netlist.nets):
            assert net.sources == [vertex_a0, vertex_a1]
            assert net.sinks == [vertex_b]

        assert len(netlist.constraints) == 0

        # Check that `transmit_signal` was called correctly
        sig, tp = vertex_a2.args
        assert sig.keyspace is keyspace
        assert tp is None
Exemple #25
0
    def test_single_vertices(self):
        """Test that operators which produce single vertices work correctly and
        that all functions and signals are correctly collected and included in
        the final netlist.
        """
        # Create the first operator
        vertex_a = mock.Mock(name="vertex A")
        load_fn_a = mock.Mock(name="load function A")
        pre_fn_a = mock.Mock(name="pre function A")
        post_fn_a = mock.Mock(name="post function A")
        constraint_a = mock.Mock(name="Constraint B")

        object_a = mock.Mock(name="object A")
        operator_a = mock.Mock(name="operator A", spec_set=["make_vertices"])
        operator_a.make_vertices.return_value = \
            netlistspec((vertex_a, ), load_fn_a, pre_fn_a, post_fn_a,
                        constraint_a)

        # Create the second operator
        vertex_b = mock.Mock(name="vertex B")
        load_fn_b = mock.Mock(name="load function B")
        constraint_b = mock.Mock(name="Constraint B")

        object_b = mock.Mock(name="object B")
        operator_b = mock.Mock(name="operator B", spec_set=["make_vertices"])
        operator_b.make_vertices.return_value = \
            netlistspec((vertex_b, ), load_fn_b, constraints=[constraint_b])

        # Create a signal between the operators
        keyspace = mock.Mock(name="keyspace")
        keyspace.length = 32
        signal_ab_parameters = SignalParameters(keyspace=keyspace, weight=43)

        # Create the model, add the items and then generate the netlist
        model = Model()
        model.object_operators[object_a] = operator_a
        model.object_operators[object_b] = operator_b
        model.connection_map.add_connection(operator_a, None,
                                            signal_ab_parameters, None,
                                            operator_b, None, None)
        netlist = model.make_netlist()

        # Check that the make_vertices functions were called
        operator_a.make_vertices.assert_called_once_with(model)
        operator_b.make_vertices.assert_called_once_with(model)

        # Check that the netlist is as expected
        assert len(netlist.nets) == 1
        for net in itervalues(netlist.nets):
            assert net.sources == [vertex_a]
            assert net.sinks == [vertex_b]
            assert net.weight == signal_ab_parameters.weight

        assert netlist.operator_vertices == {
            operator_a: (vertex_a, ),
            operator_b: (vertex_b, ),
        }

        assert netlist.keyspaces is model.keyspaces
        assert set(netlist.constraints) == set([constraint_a, constraint_b])
        assert set(netlist.load_functions) == set([load_fn_a, load_fn_b])
        assert netlist.before_simulation_functions == [pre_fn_a]
        assert netlist.after_simulation_functions == [post_fn_a]