Exemplo n.º 1
0
    def make_vertices(self, cycles):
        """Partition the neurons onto multiple cores."""
        # Make reduced constraints to partition against, we don't partition
        # against SDRAM as we're already sure that there is sufficient SDRAM
        # (and if there isn't we can't possibly fit all the vertices on a
        # single chip).
        dtcm_constraint = partition.Constraint(64 * 2**10, 0.9)  # 90% of DTCM
        cpu_constraint = partition.Constraint(cycles, 0.8)  # 80% of compute

        # Get the number of neurons in this cluster
        n_neurons = self.neuron_slice.stop - self.neuron_slice.start

        # Form the constraints dictionary
        def _make_constraint(f, size_in, **kwargs):
            """Wrap a usage computation method to work with the partitioner."""
            def f_(neuron_slice, output_slice):
                # Calculate the number of neurons
                n_neurons = neuron_slice.stop - neuron_slice.start

                # Calculate the number of outgoing dimensions
                size_out = output_slice.stop - output_slice.start

                # Call the original method
                return f(size_in, size_out, n_neurons, **kwargs)
            return f_

        constraints = {
            dtcm_constraint: _make_constraint(_lif_dtcm_usage, self.size_in,
                                              n_neurons_in_cluster=n_neurons),
            cpu_constraint: _make_constraint(_lif_cpu_usage, self.size_in,
                                             n_neurons_in_cluster=n_neurons),
        }

        # Partition the slice of neurons that we have
        self.neuron_slices = list()
        output_slices = list()
        for neurons, outputs in partition.partition_multiple(
                (self.neuron_slice, slice(self.size_out)), constraints):
            self.neuron_slices.append(neurons)
            output_slices.append(outputs)

        n_slices = len(self.neuron_slices)
        assert n_slices <= 16  # Too many cores in the cluster

        # Also partition the input space
        input_slices = partition.divide_slice(slice(0, self.size_in),
                                              n_slices)

        # Zip these together to create the vertices
        all_slices = zip(input_slices, output_slices)
        for i, (in_slice, out_slice) in enumerate(all_slices):
            # Create the vertex
            vertex = EnsembleSlice(i, self.neuron_slices, in_slice,
                                   out_slice, self.regions)

            # Add to the list of vertices
            self.vertices.append(vertex)

        # Return all the vertices
        return self.vertices
Exemplo n.º 2
0
    def test_no_partitioning(self):
        # Create the constraint
        constraint = pac.Constraint(100, 0.9)

        # Create the constraint -> usage mapping
        def cons(*slices):
            return sum(sl.stop - sl.start for sl in slices) + 10

        constraints = {constraint: cons}

        # Perform the partitioning
        assert (list(
            pac.partition_multiple(
                (slice(0, 40), slice(0, 30)), constraints)) == list(
                    pac.partition_multiple(
                        (slice(40), slice(30)), constraints)) == [(slice(
                            0, 40), slice(0, 30))])
    def test_no_partitioning(self):
        # Create the constraint
        constraint = pac.Constraint(100, 0.9)

        # Create the constraint -> usage mapping
        def cons(*slices):
            return sum(sl.stop - sl.start for sl in slices) + 10

        constraints = {constraint: cons}

        # Perform the partitioning
        assert (
            list(pac.partition_multiple((slice(0, 40), slice(0, 30)),
                                        constraints)) ==
            list(pac.partition_multiple((slice(40), slice(30)),
                                        constraints)) ==
            [(slice(0, 40), slice(0, 30))]
        )
Exemplo n.º 4
0
    def test_unpartitionable(self):
        # Create the constraint
        constraint = pac.Constraint(50)

        # Create the constraint -> usage mapping
        def cons(*slices):
            return sum(sl.stop - sl.start for sl in slices) + 50

        constraints = {constraint: cons}

        # Perform the partitioning
        with pytest.raises(pac.UnpartitionableError):
            list(pac.partition_multiple((slice(10), slice(2)), constraints))
    def test_unpartitionable(self):
        # Create the constraint
        constraint = pac.Constraint(50)

        # Create the constraint -> usage mapping
        def cons(*slices):
            return sum(sl.stop - sl.start for sl in slices) + 50

        constraints = {constraint: cons}

        # Perform the partitioning
        with pytest.raises(pac.UnpartitionableError):
            list(pac.partition_multiple((slice(10), slice(2)), constraints))
    def test_single_partition_step(self):
        # Create the constraint
        constraint = pac.Constraint(50)

        # Create the constraint -> usage mapping
        def cons(*slices):
            return sum(sl.stop - sl.start for sl in slices)

        constraints = {constraint: cons}

        # Perform the partitioning
        assert (
            list(pac.partition_multiple((slice(80), slice(20)), constraints))
            == [(slice(0, 40), slice(0, 10)), (slice(40, 80), slice(10, 20))]
        )
Exemplo n.º 7
0
    def test_single_partition_step(self):
        # Create the constraint
        constraint = pac.Constraint(50)

        # Create the constraint -> usage mapping
        def cons(*slices):
            return sum(sl.stop - sl.start for sl in slices)

        constraints = {constraint: cons}

        # Perform the partitioning
        assert (list(
            pac.partition_multiple(
                (slice(80), slice(20)),
                constraints)) == [(slice(0, 40), slice(0, 10)),
                                  (slice(40, 80), slice(10, 20))])
Exemplo n.º 8
0
    def make_vertices(self, cycles):
        """Partition the neurons onto multiple cores."""
        # Make reduced constraints to partition against, we don't partition
        # against SDRAM as we're already sure that there is sufficient SDRAM
        # (and if there isn't we can't possibly fit all the vertices on a
        # single chip).
        dtcm_constraint = partition.Constraint(64 * 2**10, 0.9)  # 90% of DTCM
        cpu_constraint = partition.Constraint(cycles, 0.8)  # 80% of compute

        # Get the number of neurons in this cluster
        n_neurons = self.neuron_slice.stop - self.neuron_slice.start

        # Form the constraints dictionary
        def _make_constraint(f, size_in, **kwargs):
            """Wrap a usage computation method to work with the partitioner."""
            def f_(neuron_slice, output_slice):
                # Calculate the number of neurons
                n_neurons = neuron_slice.stop - neuron_slice.start

                # Calculate the number of outgoing dimensions
                size_out = output_slice.stop - output_slice.start

                # Call the original method
                return f(size_in, size_out, n_neurons, **kwargs)

            return f_

        constraints = {
            dtcm_constraint:
            _make_constraint(_lif_dtcm_usage,
                             self.size_in,
                             n_neurons_in_cluster=n_neurons),
            cpu_constraint:
            _make_constraint(_lif_cpu_usage,
                             self.size_in,
                             n_neurons_in_cluster=n_neurons),
        }

        # Partition the slice of neurons that we have
        self.neuron_slices = list()
        output_slices = list()
        for neurons, outputs in partition.partition_multiple(
            (self.neuron_slice, slice(self.size_out)), constraints):
            self.neuron_slices.append(neurons)
            output_slices.append(outputs)

        n_slices = len(self.neuron_slices)
        assert n_slices <= 16  # Too many cores in the cluster

        # Also partition the input space
        input_slices = partition.divide_slice(slice(0, self.size_in), n_slices)

        # Zip these together to create the vertices
        all_slices = zip(input_slices, output_slices)
        for i, (in_slice, out_slice) in enumerate(all_slices):
            # Create the vertex
            vertex = EnsembleSlice(i, self.neuron_slices, in_slice, out_slice,
                                   self.regions)

            # Add to the list of vertices
            self.vertices.append(vertex)

        # Return all the vertices
        return self.vertices