Example #1
0
def test_reverse():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    a = list(reversed(o))
    assert a == [3, 2, 1]
Example #2
0
 def _get_all_possible_recordable_variables(self):
     variables = OrderedSet()
     if isinstance(self._population._vertex, AbstractSpikeRecordable):
         variables.add(SPIKES)
     if isinstance(self._population._vertex, AbstractNeuronRecordable):
         variables.update(
             self._population._vertex.get_recordable_variables())
     return variables
Example #3
0
def test_update():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    o.update([3, 4, 5])
    for item in (5, 4, 3, 2, 1):
        assert o.pop() == item
    with pytest.raises(KeyError):
        o.pop()
def create_vertices_groups(vertices, same_group_as_function):
    groups = list()
    for vertex in vertices:
        same_chip_as_vertices = same_group_as_function(vertex)
        if same_chip_as_vertices:
            same_chip_as_vertices = OrderedSet(same_chip_as_vertices)
            same_chip_as_vertices.add(vertex)
            # Singletons on interesting and added later if needed
            if len(same_chip_as_vertices) > 1:
                add_set(groups, same_chip_as_vertices)
    return groups
Example #5
0
    def get_all_possible_recordable_variables(self):
        """ All variables that could be recorded.

        :rtype: set(str)
        """
        variables = OrderedSet()
        if isinstance(self.__vertex, AbstractSpikeRecordable):
            variables.add(SPIKES)
        if isinstance(self.__vertex, AbstractNeuronRecordable):
            variables.update(self.__vertex.get_recordable_variables())
        return variables
Example #6
0
def test_obscure_stuff():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    assert [x for x in reversed(o)] == [3, 2, 1]
    o2 = OrderedSet(o)
    assert [x for x in o2] == [1, 2, 3]
    assert o == o2
    o2 |= [4]
    assert o != o2
    assert repr(OrderedSet()) == "OrderedSet()"
    def _find_one_to_one_vertices(vertex, graph):
        """ Find vertices which have one to one connections with the given\
            vertex, and where their constraints don't force them onto\
            different chips.

        :param MachineGraph graph:
            the graph to look for other one to one vertices
        :param MachineVertex vertex:
            the vertex to use as a basis for one to one connections
        :return: set of one to one vertices
        :rtype: set(MachineVertex)
        """
        # Virtual vertices can't be forced on other chips
        if isinstance(vertex, AbstractVirtual):
            return []
        found_vertices = OrderedSet()
        vertices_seen = {vertex}

        # look for one to ones leaving this vertex
        outgoing = graph.get_edges_starting_at_vertex(vertex)
        vertices_to_try = deque(
            edge.post_vertex for edge in outgoing
            if edge.post_vertex not in vertices_seen)
        while vertices_to_try:
            next_vertex = vertices_to_try.pop()
            if next_vertex not in vertices_seen and \
                    not isinstance(next_vertex, AbstractVirtual):
                vertices_seen.add(next_vertex)
                if is_single(graph.get_edges_ending_at_vertex(next_vertex)):
                    found_vertices.add(next_vertex)
                    outgoing = graph.get_edges_starting_at_vertex(next_vertex)
                    vertices_to_try.extend(
                        edge.post_vertex for edge in outgoing
                        if edge.post_vertex not in vertices_seen)

        # look for one to ones entering this vertex
        incoming = graph.get_edges_ending_at_vertex(vertex)
        vertices_to_try = deque(
            edge.pre_vertex for edge in incoming
            if edge.pre_vertex not in vertices_seen)
        while vertices_to_try:
            next_vertex = vertices_to_try.pop()
            if next_vertex not in vertices_seen:
                vertices_seen.add(next_vertex)
                if is_single(graph.get_edges_starting_at_vertex(next_vertex)):
                    found_vertices.add(next_vertex)
                    incoming = graph.get_edges_ending_at_vertex(next_vertex)
                    vertices_to_try.extend(
                        edge.pre_vertex for edge in incoming
                        if edge.pre_vertex not in vertices_seen)

        found_vertices.update(get_vertices_on_same_chip(vertex, graph))
        return found_vertices
Example #8
0
def test_peek():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    p1 = o.peek()
    p2 = o.pop()
    assert p1 == 3
    assert p1 == p2
    p3 = o.peek(last=False)
    assert p3 == 1
    p4 = o.pop(last=False)
    assert p4 == p3
Example #9
0
    def _sort_vertices_for_one_to_one_connection(self, machine_graph,
                                                 same_chip_vertex_groups):
        """

        :param machine_graph: the graph to place
        :return: list of sorted vertices
        """
        sorted_vertices = list()
        found_list = set()

        # order vertices based on constraint priority
        vertices = sort_vertices_by_known_constraints(machine_graph.vertices)

        for vertex in vertices:
            if vertex not in found_list:

                # vertices that are one to one connected with vertex and are
                # not forced off chip
                connected_vertices = self._find_one_to_one_vertices(
                    vertex, machine_graph)

                # create list for each vertex thats connected haven't already
                #  been seen before
                new_list = OrderedSet()
                for found_vertex in connected_vertices:
                    if found_vertex not in found_list:
                        new_list.add(found_vertex)

                # looks for vertices that have same chip constraints but not
                # found by the one to one connection search.
                same_chip_vertices = list()
                for found_vertex in new_list:
                    for same_chip_constrained_vertex in \
                            same_chip_vertex_groups[found_vertex]:
                        if same_chip_constrained_vertex not in new_list:
                            same_chip_vertices.append(
                                same_chip_constrained_vertex)

                # add these newly found vertices to the list
                new_list.update(same_chip_vertices)

                sorted_vertices.append(new_list)
                found_list.update(new_list)

        # locate vertices which have no output or input, and add them for
        # placement
        for vertex in vertices:
            if vertex not in found_list:
                sorted_vertices.append([vertex])
        return sorted_vertices
Example #10
0
 def _get_all_recording_variables(self):
     possibles = self._get_all_possible_recordable_variables()
     variables = OrderedSet()
     for possible in possibles:
         if possible == SPIKES:
             if isinstance(self._population._vertex,
                           AbstractSpikeRecordable) \
                     and self._population._vertex.is_recording_spikes():
                 variables.add(possible)
         elif isinstance(self._population._vertex,
                         AbstractNeuronRecordable) and \
                 self._population._vertex.is_recording(possible):
             variables.add(possible)
     return variables
Example #11
0
    def get_all_recording_variables(self):
        """ All variables that have been set to record.

        :rtype: set(str)
        """
        possibles = self.get_all_possible_recordable_variables()
        variables = OrderedSet()
        for possible in possibles:
            if possible == SPIKES:
                if isinstance(self.__vertex, AbstractSpikeRecordable) \
                        and self.__vertex.is_recording_spikes():
                    variables.add(possible)
            elif isinstance(self.__vertex, AbstractNeuronRecordable) \
                    and self.__vertex.is_recording(possible):
                variables.add(possible)
        return variables
def get_vertices_on_same_chip(vertex, graph):
    """ Get the vertices that must be on the same chip as the given vertex

    :param AbstractVertex vertex: The vertex to search with
    :param Graph graph: The graph containing the vertex
    :rtype: set(AbstractVertex)
    """
    # Virtual vertices can't be forced on different chips
    if isinstance(vertex, AbstractVirtual):
        return []
    same_chip_as_vertices = OrderedSet()
    for constraint in vertex.constraints:
        if isinstance(constraint, SameChipAsConstraint):
            same_chip_as_vertices.add(constraint.vertex)

    same_chip_as_vertices.update(
        edge.post_vertex for edge in graph.get_edges_starting_at_vertex(vertex)
        if edge.traffic_type == EdgeTrafficType.SDRAM)
    return same_chip_as_vertices
Example #13
0
def test_repr():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    s = "{}".format(o)
    assert s == "OrderedSet([12, 78, 56, 34, 90])"
    def _get_data_for_vertices_locked(self, vertices, progress=None):
        receivers = OrderedSet()
        if self._uses_advanced_monitors:

            # locate receivers
            for vertex in vertices:
                placement = self._placements.get_placement_of_vertex(vertex)
                receivers.add(
                    funs.locate_extra_monitor_mc_receiver(
                        self._machine, placement.x, placement.y,
                        self._extra_monitor_cores_to_ethernet_connection_map))

            # set time out
            for receiver in receivers:
                receiver.set_cores_for_data_extraction(
                    transceiver=self._transceiver,
                    placements=self._placements,
                    extra_monitor_cores_for_router_timeout=(
                        self._extra_monitor_cores))

        # get data
        for vertex in vertices:
            placement = self._placements.get_placement_of_vertex(vertex)
            for recording_region_id in vertex.get_recorded_region_ids():
                self.get_data_for_vertex(placement, recording_region_id)
                if progress is not None:
                    progress.update()

        # revert time out
        if self._uses_advanced_monitors:
            for receiver in receivers:
                receiver.unset_cores_for_data_extraction(
                    transceiver=self._transceiver,
                    placements=self._placements,
                    extra_monitor_cores_for_router_timeout=(
                        self._extra_monitor_cores))
Example #15
0
def test_pop():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    for item in [90, 34, 56, 78, 12]:
        assert o.pop() == item
    with pytest.raises(KeyError):  # @UndefinedVariable
        o.pop()
Example #16
0
def test_containment():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    for item in [12, 78, 56, 34, 90]:
        assert item in o
    for item in [123, 456, 789]:
        assert item not in o
Example #17
0
def test_ordered_ness():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    assert len(o) == 5
    assert list(o) == [12, 78, 56, 34, 90]
    result = []
    for item in o:
        result.append(item)
    assert result == [12, 78, 56, 34, 90]
Example #18
0
class AbstractEdgePartition(ConstrainedObject, metaclass=AbstractBase):
    """ A collection of edges which start at a single vertex which have the\
        same semantics and so can share a single key or block of SDRAM\
        (depending on edge type).
    """

    __slots__ = [
        # The partition identifier
        "_identifier",
        # The edges in the partition
        "_edges",
        # The type of edges to accept
        "_allowed_edge_types",
        # The weight of traffic going down this partition
        "_traffic_weight",
        # The label of the graph
        "_label",
        # class name
        "_class_name",
        # Safety code generated by the graph when added to that graph
        "_graph_code"
    ]

    def __init__(self, identifier, allowed_edge_types, constraints, label,
                 traffic_weight, class_name):
        """
        :param str identifier: The identifier of the partition
        :param allowed_edge_types: The types of edges allowed
        :type allowed_edge_types: type or tuple(type, ...)
        :param iterable(AbstractConstraint) constraints:
            Any initial constraints
        :param str label: An optional label of the partition
        :param int traffic_weight:
            The weight of traffic going down this partition
        """
        super().__init__(constraints)
        self._label = label
        self._identifier = identifier
        self._edges = OrderedSet()
        self._allowed_edge_types = allowed_edge_types
        self._traffic_weight = traffic_weight
        self._class_name = class_name
        self._graph_code = None

    @property
    def label(self):
        """ The label of the edge partition.

        :rtype: str
        """
        return self._label

    def add_edge(self, edge, graph_code):
        """ Add an edge to the edge partition.

        .. note::
            This method should only be called by the ``add_edge`` method of
            the graph that owns the partition. Calling it from anywhere else,
            even with the correct graph_code, will lead to unsupported
            inconsistency.

        :param AbstractEdge edge: the edge to add
        :param int graph_code:
            A code to check the correct graph is calling this method
        :raises PacmanInvalidParameterException:
            If the edge does not belong in this edge partition
        """
        if graph_code != self._graph_code:
            raise PacmanConfigurationException(
                "Only one graph should add edges")
        if self._graph_code is None:
            raise PacmanConfigurationException(
                "Only Graphs can add edges to partitions")

        # Check for an incompatible edge
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", str(edge.__class__),
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))
        self._edges.add(edge)

    def register_graph_code(self, graph_code):
        """
        Allows the graph to register its code when the partition is added
        """
        if self._graph_code is not None:
            raise PacmanConfigurationException(
                "Illegal attempt to add partition {} to a second "
                "graph".format(self))
        self._graph_code = graph_code

    @property
    def identifier(self):
        """ The identifier of this edge partition.

        :rtype: str
        """
        return self._identifier

    @property
    def edges(self):
        """ The edges in this edge partition.

        .. note::
            The order in which the edges are added is preserved for when they
            are requested later. If not, please talk to the software team.

        :rtype: iterable(AbstractEdge)
        """
        return self._edges

    @property
    def n_edges(self):
        """ The number of edges in the edge partition.

        :rtype: int
        """
        return len(self._edges)

    @property
    def traffic_weight(self):
        """ The weight of the traffic in this edge partition compared to\
            other partitions.

        :rtype: int
        """
        return self._traffic_weight

    def __repr__(self):
        edges = ""
        for edge in self._edges:
            if edge.label is not None:
                edges += edge.label + ","
            else:
                edges += str(edge) + ","
        return _REPR_TEMPLATE.format(self._class_name, self._identifier, edges,
                                     self.constraints, self.label)

    def __str__(self):
        return self.__repr__()

    def __contains__(self, edge):
        """ Check if the edge is contained within this partition

        :param AbstractEdge edge: the edge to search for.
        :rtype: bool
        """
        return edge in self._edges

    @abstractmethod
    def clone_without_edges(self):
        """ Make a copy of this edge partition without any of the edges in it

        This follows the design pattern that only the graph adds edges to
        partitions already added to the graph

        :return: The copied edge partition but excluding edges
        """

    @abstractproperty
    def pre_vertices(self):
        """
Example #19
0
class ApplicationEdge(AbstractEdge):
    """ A simple implementation of an application edge.
    """

    __slots__ = [
        # The edge at the start of the vertex
        "_pre_vertex",

        # The edge at the end of the vertex
        "_post_vertex",

        # Machine edge type
        "_machine_edge_type",

        # The label
        "_label",

        # Ordered set of associated machine edges
        "__machine_edges"
    ]

    def __init__(self,
                 pre_vertex,
                 post_vertex,
                 label=None,
                 machine_edge_type=MachineEdge):
        """
        :param ApplicationVertex pre_vertex:
            The application vertex at the start of the edge.
        :param ApplicationVertex post_vertex:
            The application vertex at the end of the edge.
        :param label: The name of the edge.
        :type label: str or None
        :param machine_edge_type:
            The type of machine edges made from this app edge. If ``None``,
            standard machine edges will be made.
        :type machine_edge_type: type(MachineEdge)
        """
        self._label = label
        self._pre_vertex = pre_vertex
        self._post_vertex = post_vertex
        if not issubclass(machine_edge_type, MachineEdge):
            raise ValueError(
                "machine_edge_type must be a kind of machine edge")
        self._machine_edge_type = machine_edge_type
        self.__machine_edges = OrderedSet()

    @property
    @overrides(AbstractEdge.label)
    def label(self):
        return self._label

    @property
    @overrides(AbstractEdge.pre_vertex)
    def pre_vertex(self):
        return self._pre_vertex

    @property
    @overrides(AbstractEdge.post_vertex)
    def post_vertex(self):
        return self._post_vertex

    @property
    def machine_edges(self):
        """ The machine

        :rtype: iterable(MachineEdge)
        """
        return self.__machine_edges

    def remember_associated_machine_edge(self, machine_edge):
        """ Adds the Machine Edge to the iterable returned by machine_edges

        :param MachineEdge machine_edge: A pointer to a machine_edge.
            This edge may not be fully initialised
        """
        self.__machine_edges.add(machine_edge)

    def forget_machine_edges(self):
        """ Clear the collection of machine edges created by this application
            edge.
        """
        self.__machine_edges = OrderedSet()
class CoreTracker(object):
    """ Represents the number of cores and sdram left to allocate
    """

    __slots__ = [

        # The number of cores available after preallocation
        "_n_cores",

        # cores available including ones needed for preallocation
        "_cores",

        # keep list of counts of the cores per n_cores_available
        "_cores_counter",
    ]

    def __init__(self, chip, preallocated_resources, cores_counter):
        """
        :param ~spinn_machine.Chip chip:
            chip whose resources can be allocated
        :param preallocated_resources:
        :type preallocated_resources: PreAllocatedResourceContainer or None
        """
        self._cores = OrderedSet()
        for processor in chip.processors:
            if not processor.is_monitor:
                self._cores.add(processor.processor_id)
        self._n_cores = len(self._cores)
        if preallocated_resources:
            if chip.ip_address:
                self._n_cores -= preallocated_resources.cores_ethernet
            else:
                self._n_cores -= preallocated_resources.cores_all
        if chip.virtual:
            self._cores_counter = None
        else:
            self._cores_counter = cores_counter
        if self._cores_counter:
            self._cores_counter[self._n_cores] += 1

    @property
    def n_cores_available(self):
        return self._n_cores

    def is_core_available(self, p):
        if p is None:
            return self.is_available
        else:
            return p in self._cores

    def available_core(self):
        return self._cores.peek()

    @property
    def is_available(self):
        return self._n_cores > 0

    def allocate(self, p):
        if p is None:
            p = self._cores.pop()
        else:
            self._cores.remove(p)
        if self._cores_counter:
            self._cores_counter[self._n_cores] -= 1
        self._n_cores -= 1
        if self._cores_counter:
            self._cores_counter[self._n_cores] += 1

        if self._n_cores <= 0:
            self._cores = OrderedSet()
        return p

    def deallocate(self, p):
        self._cores.add(p)
        if self._cores_counter:
            self._cores_counter[self._n_cores] -= 1
        self._n_cores += 1
        if self._cores_counter:
            self._cores_counter[self._n_cores] += 1
Example #21
0
class Graph(ConstrainedObject, AbstractGraph):
    """ A graph implementation that specifies the allowed types of the\
        vertices and edges.
    """

    __slots__ = [
        # The classes of vertex that are allowed in this graph
        "_allowed_vertex_types",
        # The classes of edges that are allowed in this graph
        "_allowed_edge_types",
        # The classes of outgoing edge partition that are allowed in this
        # graph
        "_allowed_partition_types",
        # The vertices of the graph
        "_vertices",
        # The outgoing edge partitions of the graph by name
        "_outgoing_edge_partitions_by_name",
        # The outgoing edges by pre-vertex
        "_outgoing_edges",
        # The incoming edges by post-vertex
        "_incoming_edges",
        # map between incoming edges and their associated partitions
        "_incoming_edges_by_partition_name",
        # The outgoing edge partitions by pre-vertex
        "_outgoing_edge_partitions_by_pre_vertex",
        # the outgoing partitions by edge
        "_outgoing_edge_partition_by_edge",
        # The label of the graph
        "_label"]

    def __init__(self, allowed_vertex_types, allowed_edge_types,
                 allowed_partition_types, label):
        """
        :param allowed_vertex_types:\
            A single or tuple of types of vertex to be allowed in the graph
        :param allowed_edge_types:\
            A single or tuple of types of edges to be allowed in the graph
        :param allowed_partition_types:\
            A single or tuple of types of partitions to be allowed in the graph
        :param label: The label on the graph, or None
        """
        super(Graph, self).__init__(None)
        self._allowed_vertex_types = allowed_vertex_types
        self._allowed_edge_types = allowed_edge_types
        self._allowed_partition_types = allowed_partition_types

        self._vertices = OrderedSet()
        self._outgoing_edge_partitions_by_name = OrderedDict()
        self._outgoing_edges = DefaultOrderedDict(OrderedSet)
        self._incoming_edges = DefaultOrderedDict(OrderedSet)
        self._incoming_edges_by_partition_name = DefaultOrderedDict(list)
        self._outgoing_edge_partitions_by_pre_vertex = \
            DefaultOrderedDict(OrderedSet)
        self._outgoing_edge_partition_by_edge = OrderedDict()
        self._label = label

    @property
    @overrides(AbstractGraph.label)
    def label(self):
        return self._label

    @overrides(AbstractGraph.add_vertex)
    def add_vertex(self, vertex):
        if not isinstance(vertex, self._allowed_vertex_types):
            raise PacmanInvalidParameterException(
                "vertex", vertex.__class__,
                "Vertices of this graph must be one of the following types:"
                " {}".format(self._allowed_vertex_types))
        self._vertices.add(vertex)

    @overrides(AbstractGraph.add_edge)
    def add_edge(self, edge, outgoing_edge_partition_name):
        # verify that the edge is one suitable for this graph
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", edge.__class__,
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))

        if edge.pre_vertex not in self._vertices:
            raise PacmanInvalidParameterException(
                "edge", edge.pre_vertex, "pre-vertex must be known in graph")
        if edge.post_vertex not in self._vertices:
            raise PacmanInvalidParameterException(
                "edge", edge.post_vertex, "post-vertex must be known in graph")

        # Add the edge to the partition
        partition = None
        if ((edge.pre_vertex, outgoing_edge_partition_name) not in
                self._outgoing_edge_partitions_by_name):
            partition = OutgoingEdgePartition(
                outgoing_edge_partition_name, self._allowed_edge_types)
            self._outgoing_edge_partitions_by_pre_vertex[
                edge.pre_vertex].add(partition)
            self._outgoing_edge_partitions_by_name[
                edge.pre_vertex, outgoing_edge_partition_name] = partition
        else:
            partition = self._outgoing_edge_partitions_by_name[
                edge.pre_vertex, outgoing_edge_partition_name]
        partition.add_edge(edge)

        # Add the edge to the indices
        self._outgoing_edges[edge.pre_vertex].add(edge)
        self._incoming_edges_by_partition_name[
            (edge.post_vertex, outgoing_edge_partition_name)].append(edge)
        self._incoming_edges[edge.post_vertex].add(edge)
        self._outgoing_edge_partition_by_edge[edge] = partition

    @overrides(AbstractGraph.add_outgoing_edge_partition)
    def add_outgoing_edge_partition(self, outgoing_edge_partition):

        # verify that this partition is suitable for this graph
        if not isinstance(
                outgoing_edge_partition, self._allowed_partition_types):
            raise PacmanInvalidParameterException(
                "outgoing_edge_partition", outgoing_edge_partition.__class__,
                "Partitions of this graph must be one of the following types:"
                " {}".format(self._allowed_partition_types))

        # check this partition doesn't already exist
        if ((outgoing_edge_partition.pre_vertex,
                outgoing_edge_partition.identifier) in
                self._outgoing_edge_partitions_by_name):
            raise PacmanAlreadyExistsException(
                "{}".format(OutgoingEdgePartition.__class__),
                (outgoing_edge_partition.pre_vertex,
                 outgoing_edge_partition.identifier))

        self._outgoing_edge_partitions_by_pre_vertex[
            outgoing_edge_partition.pre_vertex].add(outgoing_edge_partition)
        self._outgoing_edge_partitions_by_name[
            outgoing_edge_partition.pre_vertex,
            outgoing_edge_partition.identifier] = outgoing_edge_partition

    @property
    @overrides(AbstractGraph.vertices)
    def vertices(self):
        return self._vertices

    @property
    @overrides(AbstractGraph.n_vertices)
    def n_vertices(self):
        return len(self._vertices)

    @property
    @overrides(AbstractGraph.edges)
    def edges(self):
        return [
            edge
            for partition in self._outgoing_edge_partitions_by_name.values()
            for edge in partition.edges]

    @property
    @overrides(AbstractGraph.outgoing_edge_partitions)
    def outgoing_edge_partitions(self):
        return self._outgoing_edge_partitions_by_name.values()

    @property
    @overrides(AbstractGraph.n_outgoing_edge_partitions)
    def n_outgoing_edge_partitions(self):
        return len(self._outgoing_edge_partitions_by_name)

    @overrides(AbstractGraph.get_outgoing_partition_for_edge)
    def get_outgoing_partition_for_edge(self, edge):
        return self._outgoing_edge_partition_by_edge[edge]

    @overrides(AbstractGraph.get_edges_starting_at_vertex)
    def get_edges_starting_at_vertex(self, vertex):
        return self._outgoing_edges[vertex]

    @overrides(AbstractGraph.get_edges_ending_at_vertex)
    def get_edges_ending_at_vertex(self, vertex):
        if vertex not in self._incoming_edges:
            return []
        return self._incoming_edges[vertex]

    @overrides(AbstractGraph.get_edges_ending_at_vertex_with_partition_name)
    def get_edges_ending_at_vertex_with_partition_name(
            self, vertex, partition_name):
        key = (vertex, partition_name)
        if key not in self._incoming_edges_by_partition_name:
            return []
        return self._incoming_edges_by_partition_name[key]

    @overrides(AbstractGraph.get_outgoing_edge_partitions_starting_at_vertex)
    def get_outgoing_edge_partitions_starting_at_vertex(self, vertex):
        return self._outgoing_edge_partitions_by_pre_vertex[vertex]

    @overrides(AbstractGraph.get_outgoing_edge_partition_starting_at_vertex)
    def get_outgoing_edge_partition_starting_at_vertex(
            self, vertex, outgoing_edge_partition_name):
        return self._outgoing_edge_partitions_by_name.get(
            (vertex, outgoing_edge_partition_name), None)
Example #22
0
def test_set_ness():
    o = OrderedSet()
    assert len(o) == 0
    o.add(123)
    assert len(o) == 1
    o.add(123)
    assert len(o) == 1
    o.add(456)
    assert len(o) == 2
    o.add(456)
    assert len(o) == 2
    o.add(123)
    assert len(o) == 2
    assert o == set([123, 456])
    assert o == set([456, 123])
    assert o == [123, 456]
    assert o == [456, 123]
    o.remove(123)
    assert len(o) == 1
    o.remove(456)
    assert len(o) == 0
    with pytest.raises(KeyError):  # @UndefinedVariable
        o.remove(789)
    o.discard(789)
    assert len(o) == 0
    o.add(789)
    assert len(o) == 1
    assert 789 in o
    o.discard(789)
    assert 789 not in o
    assert len(o) == 0
def get_same_size_vertex_groups(vertices):
    """ Get a dictionary of vertex to vertex that must be partitioned the same\
        size
    """

    # Dict of vertex to list of vertices with same size
    # (repeated lists expected)
    same_size_vertices = OrderedDict()

    for vertex in vertices:

        # Find all vertices that have a same size constraint associated with
        #  this vertex
        same_size_as_vertices = list()
        for constraint in vertex.constraints:
            if isinstance(constraint, SameAtomsAsVertexConstraint):
                if vertex.n_atoms != constraint.vertex.n_atoms:
                    raise PacmanPartitionException(
                        "Vertices {} ({} atoms) and {} ({} atoms) must be of"
                        " the same size to partition them together".format(
                            vertex.label, vertex.n_atoms,
                            constraint.vertex.label,
                            constraint.vertex.n_atoms))
                same_size_as_vertices.append(constraint.vertex)

        if not same_size_as_vertices:
            same_size_vertices[vertex] = {vertex}
            continue

        # Go through all the vertices that want to have the same size
        # as the top level vertex
        for same_size_vertex in same_size_as_vertices:

            # Neither vertex has been seen
            if (same_size_vertex not in same_size_vertices and
                    vertex not in same_size_vertices):

                # add both to a new group
                group = OrderedSet([vertex, same_size_vertex])
                same_size_vertices[vertex] = group
                same_size_vertices[same_size_vertex] = group

            # Both vertices have been seen elsewhere
            elif (same_size_vertex in same_size_vertices and
                    vertex in same_size_vertices):

                # merge their groups
                group_1 = same_size_vertices[vertex]
                group_2 = same_size_vertices[same_size_vertex]
                group_1.update(group_2)
                for vert in group_1:
                    same_size_vertices[vert] = group_1

            # The current vertex has been seen elsewhere
            elif vertex in same_size_vertices:

                # add the new vertex to the existing group
                group = same_size_vertices[vertex]
                group.add(same_size_vertex)
                same_size_vertices[same_size_vertex] = group

            # The other vertex has been seen elsewhere
            elif same_size_vertex in same_size_vertices:

                #  so add this vertex to the existing group
                group = same_size_vertices[same_size_vertex]
                group.add(vertex)
                same_size_vertices[vertex] = group

    return same_size_vertices
def get_same_size_vertex_groups(vertices):
    """ Get a dictionary of vertex to vertex that must be partitioned the same\
        size.

    :param iterble(ApplicationVertex) vertices:
    :rtype: dict(ApplicationVertex, set(ApplicationVertex))
    """

    # Dict of vertex to list of vertices with same size
    # (repeated lists expected)
    same_size_vertices = dict()

    for vertex in vertices:

        # Find all vertices that have a same size constraint associated with
        #  this vertex
        same_size_as_vertices = list()
        for constraint in vertex.constraints:
            if isinstance(constraint, SameAtomsAsVertexConstraint):
                if vertex.n_atoms != constraint.vertex.n_atoms:
                    raise PacmanPartitionException(
                        VERTICES_NEED_TO_BE_SAME_SIZE_ERROR.format(
                            vertex.label, vertex.n_atoms,
                            constraint.vertex.label,
                            constraint.vertex.n_atoms))
                same_size_as_vertices.append(constraint.vertex)

        if not same_size_as_vertices:
            same_size_vertices[vertex] = {vertex}
            continue

        # Go through all the vertices that want to have the same size
        # as the top level vertex
        for same_size_vertex in same_size_as_vertices:

            # Neither vertex has been seen
            if (same_size_vertex not in same_size_vertices
                    and vertex not in same_size_vertices):

                # add both to a new group
                group = OrderedSet([vertex, same_size_vertex])
                same_size_vertices[vertex] = group
                same_size_vertices[same_size_vertex] = group

            # Both vertices have been seen elsewhere
            elif (same_size_vertex in same_size_vertices
                  and vertex in same_size_vertices):

                # merge their groups
                group_1 = same_size_vertices[vertex]
                group_2 = same_size_vertices[same_size_vertex]
                group_1.update(group_2)
                for vert in group_1:
                    same_size_vertices[vert] = group_1

            # The current vertex has been seen elsewhere
            elif vertex in same_size_vertices:

                # add the new vertex to the existing group
                group = same_size_vertices[vertex]
                group.add(same_size_vertex)
                same_size_vertices[same_size_vertex] = group

            # The other vertex has been seen elsewhere
            elif same_size_vertex in same_size_vertices:

                #  so add this vertex to the existing group
                group = same_size_vertices[same_size_vertex]
                group.add(vertex)
                same_size_vertices[vertex] = group

    return same_size_vertices
Example #25
0
class SDRAMSplitter(AbstractDependentSplitter):
    """ sdram splitter
    """

    N_VERTS = 3

    __slots__ = [
        "_partition_type",
        "_pre_vertices",
        "_pre_slices",
        "_post_slice",
        "_post_vertex",
        "_app_edge"]

    def __init__(self, partition_type, other_splitter):
        super().__init__(other_splitter, "")
        self._partition_type = partition_type
        self._pre_vertices = OrderedSet()
        self._post_vertex = None
        self._pre_slices = OrderedSet()
        self._post_slice = None
        self._app_edge = None

    def _get_new_map(self, edge_types, vertices):
        """ builds map of machine vertex to edge type

        :param edge_types: the type of edges to add to the dict.

        :return: dict of vertex as key, edge types as list in value
        """
        result = OrderedDict()
        for vertex in vertices:
            result[vertex] = edge_types
        return result

    @overrides(AbstractDependentSplitter.get_out_going_vertices)
    def get_out_going_vertices(self, edge, outgoing_edge_partition):
        if edge == self._app_edge:
            return {}
        return self._get_new_map([SDRAMMachineEdge], self._pre_vertices)

    @overrides(AbstractDependentSplitter.get_in_coming_vertices)
    def get_in_coming_vertices(
            self, edge, outgoing_edge_partition, src_machine_vertex):
        if edge == self._app_edge:
            return {}
        return self._get_new_map([SDRAMMachineEdge], [self._post_vertex])

    @inject_items({"app_graph": "ApplicationGraph"})
    @overrides(
        AbstractDependentSplitter.create_machine_vertices,
        additional_arguments=["app_graph"])
    def create_machine_vertices(
            self, resource_tracker, machine_graph, app_graph):

        # slices
        self._post_slice = Slice(
            0, int(self._governed_app_vertex.n_atoms / self.N_VERTS))

        for count in range(1, self.N_VERTS):
            self._pre_slices.add(Slice(
                self._post_slice.n_atoms * count,
                self._post_slice.n_atoms * count + self._post_slice.n_atoms))

        # mac verts
        self._post_vertex = (
            SDRAMMachineVertex(
                vertex_slice=self._post_slice, label=None,
                constraints=None, app_vertex=self._governed_app_vertex,
                sdram_cost=self._governed_app_vertex.fixed_sdram_value))
        resource_tracker.allocate_constrained_resources(
            self._post_vertex.resources_required,
            self._governed_app_vertex.constraints)
        machine_graph.add_vertex(self._post_vertex)

        for vertex_slice in self._pre_slices:
            pre_vertex = (
                SDRAMMachineVertex(
                    vertex_slice=vertex_slice, label=None,
                    constraints=None, app_vertex=self._governed_app_vertex,
                    sdram_cost=self._governed_app_vertex.fixed_sdram_value))
            self._pre_vertices.add(pre_vertex)

            # allocate res
            resource_tracker.allocate_constrained_resources(
                pre_vertex.resources_required,
                self._governed_app_vertex.constraints)

            # add to mac graph
            machine_graph.add_vertex(pre_vertex)

        # add outgoing edge partition to mac graph
        if self._other_splitter is not None:
            total_pre_verts = list()
            total_pre_verts.extend(self._pre_vertices)
            for incoming_edge in app_graph.get_edges_ending_at_vertex(
                    self._governed_app_vertex):
                if (incoming_edge.pre_vertex.splitter ==
                        self._other_splitter):
                    outgoing_edge_partition = (
                        app_graph.get_outgoing_partition_for_edge(
                            incoming_edge))
                    total_pre_verts.extend(
                        self._other_splitter.get_out_going_vertices(
                            incoming_edge, outgoing_edge_partition))
            machine_graph.add_outgoing_edge_partition(self._partition_type(
                identifier="sdram", pre_vertices=total_pre_verts,
                label="sdram"))

        # add edge between the two verts app and mac
        self._app_edge = ApplicationEdge(
            self._governed_app_vertex, self._governed_app_vertex)
        app_graph.add_edge(self._app_edge, "sdram_app")

        # mac add
        for pre_vertex in self._pre_vertices:
            edge = SDRAMMachineEdge(
                pre_vertex, self._post_vertex, label="sdram",
                app_edge=self._app_edge)
            machine_graph.add_edge(edge, "sdram")

        return [self._post_vertex].extend(self._pre_vertices)

    @overrides(AbstractDependentSplitter.get_out_going_slices)
    def get_out_going_slices(self):
        return self._post_vertex, True

    @overrides(AbstractDependentSplitter.get_in_coming_slices)
    def get_in_coming_slices(self):
        return self._pre_vertices, True

    @overrides(AbstractDependentSplitter.machine_vertices_for_recording)
    def machine_vertices_for_recording(self, variable_to_record):
        return [self._post_vertex].extend(self._pre_vertices)

    @overrides(AbstractDependentSplitter.reset_called)
    def reset_called(self):
        pass
Example #26
0
class ConstrainedObject(object):
    """ An implementation of an object which holds constraints.
    """

    __slots__ = [
        # The constraints of the object
        "_constraints"
    ]

    def __init__(self, constraints=None):
        """
        :param iterable(AbstractConstraint) constraints:
            Any initial constraints
        """

        # safety point for diamond inheritance
        if not hasattr(self, '_constraints') or self._constraints is None:
            self._constraints = OrderedSet()

        # add new constraints to the set
        self.add_constraints(constraints)

    def add_constraint(self, constraint):
        """ Add a new constraint to the collection of constraints

        :param AbstractConstraint constraint: constraint to add
        :raise PacmanInvalidParameterException:
            If the constraint is not valid
        """
        if constraint is None:
            raise PacmanInvalidParameterException("constraint", constraint,
                                                  "must not be None")
        if not isinstance(constraint, AbstractConstraint):
            raise PacmanInvalidParameterException(
                "constraint", constraint,
                "Must be a " + _get_class_name(AbstractConstraint))

        try:
            self._constraints.add(constraint)
        except Exception:  # pylint: disable=broad-except
            self._constraints = OrderedSet()
            self._constraints.add(constraint)

    def add_constraints(self, constraints):
        """ Add an iterable of constraints to the collection of constraints

        :param AbstractConstraint constraints: the constraints to add
        :raise PacmanInvalidParameterException:
            If one of the constraints is not valid
        """
        if constraints is not None:
            for next_constraint in constraints:
                self.add_constraint(next_constraint)

    @property
    def constraints(self):
        """ An iterable of constraints

        :rtype: iterable(AbstractConstraint)
        """
        try:
            return self._constraints
        except Exception:  # pylint: disable=broad-except
            return OrderedSet()
Example #27
0
class ApplicationVertex(AbstractVertex):
    """ A vertex that can be broken down into a number of smaller vertices\
        based on the resources that the vertex requires.
    """

    __slots__ = [
        # List of machine verts associated with this app vertex
        "_machine_vertices",

        # The splitter object associated with this app vertex
        "_splitter"
    ]

    SETTING_SPLITTER_ERROR_MSG = (
        "The splitter object on {} has already been set, it cannot be "
        "reset. Please fix and try again. ")

    def __init__(self,
                 label=None,
                 constraints=None,
                 max_atoms_per_core=sys.maxsize,
                 splitter=None):
        """
        :param str label: The optional name of the vertex.
        :param iterable(AbstractConstraint) constraints:
            The optional initial constraints of the vertex.
        :param int max_atoms_per_core: The max number of atoms that can be
            placed on a core, used in partitioning.
        :param splitter: The splitter object needed for this vertex.
            Leave as None to delegate the choice of splitter to the selector.
        :type splitter: None or
            ~pacman.model.partitioner_interfaces.AbstractSplitterPartitioner
        :raise PacmanInvalidParameterException:
            If one of the constraints is not valid
        """
        # Need to set to None temporarily as add_constraint checks splitter
        self._splitter = None
        super(ApplicationVertex, self).__init__(label, constraints)
        self._machine_vertices = OrderedSet()

        # Use setter as there is extra work to do
        self.splitter = splitter

        # add a constraint for max partitioning
        self.add_constraint(MaxVertexAtomsConstraint(max_atoms_per_core))

    def __str__(self):
        return self.label

    def __repr__(self):
        return "ApplicationVertex(label={}, constraints={}".format(
            self.label, self.constraints)

    @property
    def splitter(self):
        """
        :rtype:
            ~pacman.model.partitioner_interfaces.AbstractSplitterPartitioner
        """
        return self._splitter

    @splitter.setter
    def splitter(self, new_value):
        """ Sets the splitter object. Does not allow repeated settings.

        :param new_value: The new splitter object
        :type new_value:
            ~pacman.model.partitioner_interfaces.AbstractSplitterPartitioner
        :rtype: None
        """
        if self._splitter == new_value:
            return
        if self._splitter is not None:
            raise PacmanConfigurationException(
                self.SETTING_SPLITTER_ERROR_MSG.format(self._label))
        self._splitter = new_value
        self._splitter.set_governed_app_vertex(self)
        self._splitter.check_supported_constraints()

    @overrides(AbstractVertex.add_constraint)
    def add_constraint(self, constraint):
        AbstractVertex.add_constraint(self, constraint)
        if self._splitter is not None:
            self._splitter.check_supported_constraints()

    def remember_machine_vertex(self, machine_vertex):
        """
        Adds the Machine vertex the iterable returned by machine_vertices

        This method will be called by MachineVertex.app_vertex
        No other place should call it.

        :param MachineVertex machine_vertex: A pointer to a machine_vertex.
            This vertex may not be fully initialized but will have a slice
        :raises PacmanValueError: If the slice of the machine_vertex is too big
        """

        machine_vertex.index = len(self._machine_vertices)

        if machine_vertex in self._machine_vertices:
            raise PacmanAlreadyExistsException(str(machine_vertex),
                                               machine_vertex)
        self._machine_vertices.add(machine_vertex)

    @abstractproperty
    def n_atoms(self):
        """ The number of atoms in the vertex

        :rtype: int
        """

    def round_n_atoms(self, n_atoms, label="n_atoms"):
        """
        Utility function to allow supoer classes to make sure n_atom is an int

        :param n_atoms: Value convertable to int to be used for n_atoms
        :type n_atoms: int or float or numpy.
        :return:
        """
        if isinstance(n_atoms, int):
            return n_atoms
        # Allow a float which has a near int value
        temp = int(round(n_atoms))
        if abs(temp - n_atoms) < 0.001:
            if temp != n_atoms:
                logger.warning(
                    "Size of the {} rounded from {} to {}. "
                    "Please use int values for n_atoms", label, n_atoms, temp)
            return temp
        raise PacmanInvalidParameterException(
            label, n_atoms, "int value expected for {}".format(label))

    @property
    def machine_vertices(self):
        """ The machine vertices that this application vertex maps to.
            Will be the same length as :py:meth:`vertex_slices`.

        :rtype: iterable(MachineVertex)
        """
        return self._machine_vertices

    @property
    def vertex_slices(self):
        """ The slices of this vertex that each machine vertex manages.
            Will be the same length as :py:meth:`machine_vertices`.

        :rtype: iterable(Slice)
        """
        return list(map(lambda x: x.vertex_slice, self._machine_vertices))

    def get_max_atoms_per_core(self):
        """ Gets the maximum number of atoms per core, which is either the\
            number of atoms required across the whole application vertex,\
            or a lower value if a constraint lowers it.

        :rtype: int
        """
        for constraint in self.constraints:
            if isinstance(constraint, MaxVertexAtomsConstraint):
                return constraint.size

    def forget_machine_vertices(self):
        """ Arrange to forget all machine vertices that this application
            vertex maps to.
        """
        self._machine_vertices = OrderedSet()
        if self._splitter is not None:
            self._splitter.reset_called()
class CoreSubset(object):
    """ Represents a subset of the cores on a SpiNNaker chip.
    """

    __slots__ = (
        "_x", "_y", "_processor_ids"
    )

    def __init__(self, x, y, processor_ids):
        """
        :param x: The x-coordinate of the chip
        :type x: int
        :param y: The y-coordinate of the chip
        :type y: int
        :param processor_ids: The processor IDs on the chip
        :type processor_ids: iterable(int)
        """
        self._x = x
        self._y = y
        self._processor_ids = OrderedSet()
        for processor_id in processor_ids:
            self.add_processor(processor_id)

    def add_processor(self, processor_id):
        """ Adds a processor ID to this subset

        :param processor_id: A processor ID
        :type processor_id: int
        :return: Nothing is returned
        :rtype: None
        """
        self._processor_ids.add(processor_id)

    def __contains__(self, processor_id):
        return processor_id in self._processor_ids

    @property
    def x(self):
        """ The x-coordinate of the chip

        :return: The x-coordinate
        :rtype: int
        """
        return self._x

    @property
    def y(self):
        """ The y-coordinate of the chip

        :return: The y-coordinate
        :rtype: int
        """
        return self._y

    @property
    def processor_ids(self):
        """ The subset of processor IDs on the chip

        :return: An iterable of processor IDs
        :rtype: iterable(int)
        """
        return iter(self._processor_ids)

    def __repr__(self):
        return "{}:{}:{}".format(self._x, self._y, self._processor_ids)

    def __eq__(self, other):
        if not isinstance(other, CoreSubset):
            return False
        return self.x == other.x and self._y == other.y and \
            self._processor_ids == other.processor_ids

    def __ne__(self, other):
        return not self.__eq__(other)

    def __hash__(self):
        processors = frozenset(self._processor_ids)
        return (self._x, self._y, processors).__hash__()

    def __len__(self):
        """ The number of processors in this core subset
        """
        return len(self._processor_ids)

    def intersect(self, other):
        """ Returns a new CoreSubset which is an intersect of this and the\
            other.

        :param other: A second CoreSubset with possibly overlapping cores
        :type other: :py:class:`spinn_machine.CoreSubset`
        :return: A new CoreSubset with any overlap
        :rtype: :py:class:`spinn_machine.CoreSubset`
        """
        result = CoreSubset(self._x, self._y, [])
        for processor_id in self._processor_ids:
            if processor_id in other._processor_ids:
                result.add_processor(processor_id)
        return result
Example #29
0
def write_bitfield_init_data(spec, incoming_projections, vertex_slice,
                             routing_info, bit_field_builder_region,
                             master_pop_region_id, synaptic_matrix_region_id,
                             direct_matrix_region_id, bit_field_region_id,
                             bit_field_key_map_region_id,
                             structural_dynamics_region_id,
                             has_structural_dynamics_region):
    """ writes the init data needed for the bitfield generator

    :param ~data_specification.DataSpecificationGenerator spec:
        data spec writer
    :param list(~spynnaker.pyNN.models.Projection) incoming_projections:
        The projections to generate bitfields for
    :param ~pacman.model.graphs.common.slice vertex_slice:
        The slice of the target vertex
    :param ~pacman.model.routing_info.RoutingInfo routing_info: keys
    :param int bit_field_builder_region: the region id for the bitfield builder
    :param int master_pop_region_id: the region id for the master pop table
    :param int synaptic_matrix_region_id: the region id for the synaptic matrix
    :param int direct_matrix_region_id: the region id for the direct matrix
    :param int bit_field_region_id: the region id for the bit-fields
    :param int bit_field_key_map_region_id: the region id for the key map
    :param int structural_dynamics_region_id: the region id for the structural
    :param bool has_structural_dynamics_region:
        whether the core has a structural_dynamics region
    """
    spec.switch_write_focus(bit_field_builder_region)

    spec.write_value(master_pop_region_id)
    spec.write_value(synaptic_matrix_region_id)
    spec.write_value(direct_matrix_region_id)
    spec.write_value(bit_field_region_id)
    spec.write_value(bit_field_key_map_region_id)

    # save 4 bytes by making a key flag of full mask to avoid when not got
    # a structural
    if not has_structural_dynamics_region:
        spec.write_value(FULL_MASK)
    else:
        spec.write_value(structural_dynamics_region_id)

    spec.switch_write_focus(bit_field_key_map_region_id)

    # Gather the source vertices that target this core
    sources = OrderedSet()
    seen_app_edges = set()
    for proj in incoming_projections:
        in_edge = proj._projection_edge
        if in_edge not in seen_app_edges:
            seen_app_edges.add(in_edge)
            for machine_edge in in_edge.machine_edges:
                if machine_edge.post_vertex.vertex_slice == vertex_slice:
                    sources.add(machine_edge.pre_vertex)

    # write n keys max atom map
    spec.write_value(len(sources))

    # load in key to max atoms map
    for source_vertex in sources:
        spec.write_value(
            routing_info.get_first_key_from_pre_vertex(source_vertex,
                                                       SPIKE_PARTITION_ID))
        spec.write_value(source_vertex.vertex_slice.n_atoms)

    # ensure if nothing else that n bitfields in bitfield region set to 0
    spec.switch_write_focus(bit_field_region_id)
    spec.write_value(0)
Example #30
0
class MachineGraph(Graph):
    """ A graph whose vertices can fit on the chips of a machine.
    """

    __slots__ = [
        # Flags to say the application level is used so all machine vertices
        # will have an application vertex
        "_application_level_used",
        # Ordered set of partitions
        "_edge_partitions",
        # A double dictionary of MULTICAST edges by their
        # application id and then their (partition name)
        "_multicast_partitions",
        # The sets of multicast edge partitions by pre-vertex
        "_multicast_edge_partitions_by_pre_vertex",
        # The sets of fixed_point edge partitions by pre-vertex
        "_fixed_route_edge_partitions_by_pre_vertex",
        # The sdram outgoing edge partitions by pre-vertex
        "_sdram_edge_partitions_by_pre_vertex",
        # The sets of multicast edge partitions by pre-vertex
        "_multicast_edge_partitions_by_post_vertex",
        # The sets of fixed_point edge partitions by pre-vertex
        "_fixed_route_edge_partitions_by_post_vertex",
        # The sdram outgoing edge partitions by pre-vertex
        "_sdram_edge_partitions_by_post_vertex",
    ]

    MISSING_APP_VERTEX_ERROR_MESSAGE = (
        "The vertex does not have an app_vertex, "
        "which is required when other app_vertices exist.")

    UNEXPECTED_APP_VERTEX_ERROR_MESSAGE = (
        "The vertex has an app_vertex, "
        "which is not allowed when other vertices not have app_vertices.")

    def __init__(self, label, application_graph=None):
        """
        :param label: The label for the graph.
        :type label: str or None
        :param application_graph:
            The application graph that this machine graph is derived from, if
            it is derived from one at all.
        :type application_graph: ApplicationGraph or None
        """
        super(MachineGraph, self).__init__(MachineVertex, MachineEdge, label)
        if application_graph:
            application_graph.forget_machine_graph()
            # Check the first vertex added
            self._application_level_used = True
        else:
            # Must be false as there is no App_graph
            self._application_level_used = False
        self._multicast_partitions = DefaultOrderedDict(
            lambda: DefaultOrderedDict(set))
        self._edge_partitions = OrderedSet()
        self._fixed_route_edge_partitions_by_pre_vertex = (
            DefaultOrderedDict(OrderedSet))
        self._multicast_edge_partitions_by_pre_vertex = (
            DefaultOrderedDict(OrderedSet))
        self._sdram_edge_partitions_by_pre_vertex = (
            DefaultOrderedDict(OrderedSet))
        self._fixed_route_edge_partitions_by_post_vertex = (
            DefaultOrderedDict(OrderedSet))
        self._multicast_edge_partitions_by_post_vertex = (
            DefaultOrderedDict(OrderedSet))
        self._sdram_edge_partitions_by_post_vertex = (
            DefaultOrderedDict(OrderedSet))

    @overrides(Graph.add_edge)
    def add_edge(self, edge, outgoing_edge_partition_name):
        edge_partition = super(MachineGraph,
                               self).add_edge(edge,
                                              outgoing_edge_partition_name)
        if (isinstance(edge_partition, MulticastEdgePartition)):
            if edge.pre_vertex.app_vertex:
                by_app = self._multicast_partitions[edge.pre_vertex.app_vertex]
            else:
                by_app = self._multicast_partitions[edge.pre_vertex]
            by_partition = by_app[outgoing_edge_partition_name]
            by_partition.add(edge.pre_vertex)
            self._multicast_edge_partitions_by_post_vertex[
                edge.post_vertex].add(edge_partition)
        elif isinstance(edge_partition, FixedRouteEdgePartition):
            self._fixed_route_edge_partitions_by_post_vertex[
                edge.post_vertex].add(edge_partition)
        elif isinstance(edge_partition, AbstractSDRAMPartition):
            self._sdram_edge_partitions_by_post_vertex[edge.post_vertex].add(
                edge_partition)
        else:
            raise NotImplementedError(
                "Unexpected edge_partition: {}".format(edge_partition))
        return edge_partition

    @property
    def multicast_partitions(self):
        """
        Returns a double dictionary of app id then
        outgoing_edge_partition_name to a set of machine_vertex that act as
        pre vertices for these multicast edges

        The app_id is normally the (machine) edge.pre_vertex.app_vertex.
        This then groups the edges which come from the same app_vertex
        If the (machine) edge.pre_vertex has no app vertex then the app_id will
        be the machine vertex which will then form its own group of 1

        :rtype: dict(ApplicationVertex, dict(str, set(MachineVertex))
        """
        return self._multicast_partitions

    @overrides(Graph.add_vertex)
    def add_vertex(self, vertex):
        super(MachineGraph, self).add_vertex(vertex)
        if self._application_level_used:
            try:
                vertex.app_vertex.remember_machine_vertex(vertex)
            except AttributeError:
                if self.n_vertices == 1:
                    self._application_level_used = False
                else:
                    raise PacmanInvalidParameterException(
                        "vertex", str(vertex),
                        self.MISSING_APP_VERTEX_ERROR_MESSAGE)
        elif vertex.app_vertex:
            raise PacmanInvalidParameterException(
                "vertex", vertex, self.UNEXPECTED_APP_VERTEX_ERROR_MESSAGE)

    @overrides(Graph.add_outgoing_edge_partition)
    def add_outgoing_edge_partition(self, edge_partition):
        # verify that this partition is suitable for this graph
        if not isinstance(edge_partition, AbstractMachineEdgePartition):
            raise PacmanInvalidParameterException(
                "outgoing_edge_partition", str(edge_partition.__class__),
                "Partitions of this graph must be an "
                "AbstractMachineEdgePartition")

        # check this partition doesn't already exist
        if edge_partition in self._edge_partitions:
            raise PacmanAlreadyExistsException(
                str(AbstractMachineEdgePartition), edge_partition)

        self._edge_partitions.add(edge_partition)
        edge_partition.register_graph_code(id(self))

        for pre_vertex in edge_partition.pre_vertices:
            key = (pre_vertex, edge_partition.identifier)
            self._outgoing_edge_partitions_by_name[key] = edge_partition
            if isinstance(edge_partition, MulticastEdgePartition):
                self._multicast_edge_partitions_by_pre_vertex[pre_vertex].add(
                    edge_partition)
            elif isinstance(edge_partition, FixedRouteEdgePartition):
                self._fixed_route_edge_partitions_by_pre_vertex[
                    pre_vertex].add(edge_partition)
            elif isinstance(edge_partition, AbstractSDRAMPartition):
                self._sdram_edge_partitions_by_pre_vertex[pre_vertex].add(
                    edge_partition)
            else:
                raise NotImplementedError(
                    "Unexpected edge_partition: {}".format(edge_partition))
        for edge in edge_partition.edges:
            self._register_edge(edge, edge_partition)

    @overrides(Graph.new_edge_partition)
    def new_edge_partition(self, name, edge):
        if edge.traffic_type == EdgeTrafficType.FIXED_ROUTE:
            return FixedRouteEdgePartition(identifier=name,
                                           pre_vertex=edge.pre_vertex)
        elif edge.traffic_type == EdgeTrafficType.MULTICAST:
            return MulticastEdgePartition(identifier=name,
                                          pre_vertex=edge.pre_vertex)
        else:
            raise PacmanInvalidParameterException(
                "edge", edge,
                "Unable to add an Edge with traffic type {} unless you first "
                "add a partition for it".format(edge.traffic_type))

    @property
    @overrides(Graph.outgoing_edge_partitions)
    def outgoing_edge_partitions(self):
        return self._edge_partitions

    @property
    @overrides(Graph.n_outgoing_edge_partitions)
    def n_outgoing_edge_partitions(self):
        return len(self._edge_partitions)

    def get_fixed_route_edge_partitions_starting_at_vertex(self, vertex):
        """ Get only the fixed_route edge partitions that start at the vertex.

        :param AbstractVertex vertex:\
             The vertex at which the edge partitions to find starts
        :rtype: iterable(FixedRouteEdgePartition)
        """
        return self._fixed_route_edge_partitions_by_pre_vertex.get(vertex, [])

    def get_multicast_edge_partitions_starting_at_vertex(self, vertex):
        """ Get only the multicast edge partitions that start at the vertex.

        :param AbstractVertex vertex:\
            The vertex at which the edge partitions to find starts
        :rtype: iterable(MulticastEdgePartition)
        """
        return self._multicast_edge_partitions_by_pre_vertex.get(vertex, [])

    def get_sdram_edge_partitions_starting_at_vertex(self, vertex):
        """ Get all the sdram edge partitions that start at the given vertex.

        :param AbstractVertex vertex:\
            The vertex at which the sdram edge partitions to find starts
        :rtype: iterable(AbstractSDRAMPartition)
        """
        return self._sdram_edge_partitions_by_pre_vertex.get(vertex, [])

    @overrides(Graph.get_outgoing_edge_partitions_starting_at_vertex)
    def get_outgoing_edge_partitions_starting_at_vertex(self, vertex):
        for partition in self.\
                get_fixed_route_edge_partitions_starting_at_vertex(vertex):
            yield partition
        for partition in \
                self.get_multicast_edge_partitions_starting_at_vertex(vertex):
            yield partition
        for partition in \
                self.get_sdram_edge_partitions_starting_at_vertex(vertex):
            yield partition

    def get_fixed_route_edge_partitions_ending_at_vertex(self, vertex):
        """ Get only the fixed_route edge partitions that end at the vertex.

        :param AbstractVertex vertex:\
            The vertex at which the edge partitions to find starts
        :rtype: iterable(FixedRouteEdgePartition)
        """
        return self._fixed_route_edge_partitions_by_post_vertex.get(vertex, [])

    def get_multicast_edge_partitions_ending_at_vertex(self, vertex):
        """ Get only the multicast edge partitions that end at the vertex.

        :param AbstractVertex vertex:\
            The vertex at which the edge partitions to find starts
        :rtype: iterable(MulticastEdgePartition)
        """
        return self._multicast_edge_partitions_by_post_vertex.get(vertex, [])

    def get_sdram_edge_partitions_ending_at_vertex(self, vertex):
        """ Get all the sdram edge partitions that end at the given vertex.

        :param AbstractVertex vertex:\
            The vertex at which the sdram edge partitions to find starts
        :rtype: iterable(AbstractSDRAMPartition)
        """
        return self._sdram_edge_partitions_by_post_vertex.get(vertex, [])

    def get_edge_partitions_ending_at_vertex(self, vertex):
        """ Get all the edge partitions that end at the given vertex.

        :param AbstractVertex vertex:\
            The vertex at which the sdram edge partitions to find starts
        :rtype: iterable(AbstractPartition)
        """
        for partition in \
                self.get_fixed_route_edge_partitions_ending_at_vertex(vertex):
            yield partition
        for partition in \
                self.get_multicast_edge_partitions_ending_at_vertex(vertex):
            yield partition
        for partition in \
                self.get_sdram_edge_partitions_ending_at_vertex(vertex):
            yield partition

    def clone(self, frozen=False):
        """
        Makes as shallow as possible copy of the graph.

        Vertices and edges are copied over. Partition will be new objects.

        :param application_graph: The application graph with which the clone
            should be associated
        :return: A shallow copy of this graph
        :rtype: MachineGraph
        """
        if frozen:
            new_graph = _FrozenMachineGraph(self.label)
        else:
            new_graph = MachineGraph(self.label)
        for vertex in self.vertices:
            new_graph.add_vertex(vertex)
        for outgoing_partition in \
                self.outgoing_edge_partitions:
            new_outgoing_partition = outgoing_partition.clone_without_edges()
            new_graph.add_outgoing_edge_partition(new_outgoing_partition)
            for edge in outgoing_partition.edges:
                new_graph.add_edge(edge, outgoing_partition.identifier)
        if frozen:
            new_graph.freeze()
        return new_graph
Example #31
0
def validate_routes(machine_graph, placements, routing_infos, routing_tables,
                    machine):
    """ Go though the placements given and check that the routing entries\
        within the routing tables support reach the correction destinations\
        as well as not producing any cycles.

    :param MachineGraph machine_graph: the graph
    :param Placements placements: the placements container
    :param RoutingInfo routing_infos: the routing info container
    :param MulticastRoutingTables routing_tables:
        the routing tables generated by the routing algorithm
    :param ~spinn_machine.Machine machine: the python machine object
    :raises PacmanRoutingException: when either no routing table entry is
        found by the search on a given router, or a cycle is detected
    """
    def traffic_multicast(edge):
        return edge.traffic_type == EdgeTrafficType.MULTICAST

    progress = ProgressBar(
        placements.placements,
        "Verifying the routes from each core travel to the correct locations")
    for placement in progress.over(placements.placements):

        # locate all placements to which this placement/vertex will
        # communicate with for a given key_and_mask and search its
        # determined destinations

        # gather keys and masks per partition
        partitions = machine_graph.\
            get_multicast_edge_partitions_starting_at_vertex(placement.vertex)

        n_atoms = placement.vertex.vertex_slice.n_atoms

        for partition in partitions:
            r_info = routing_infos.get_routing_info_from_partition(partition)
            is_continuous = _check_if_partition_has_continuous_keys(partition)
            if not is_continuous:
                logger.warning(
                    "Due to the none continuous nature of the keys in this "
                    "partition {}, we cannot check all atoms will be routed "
                    "correctly, but will check the base key instead",
                    partition)

            destination_placements = OrderedSet()

            # filter for just multicast edges, we don't check other types of
            # edges here.
            out_going_edges = filter(traffic_multicast, partition.edges)

            # for every outgoing edge, locate its destination and store it.
            for outgoing_edge in out_going_edges:
                dest_placement = placements.get_placement_of_vertex(
                    outgoing_edge.post_vertex)
                destination_placements.add(
                    PlacementTuple(x=dest_placement.x,
                                   y=dest_placement.y,
                                   p=dest_placement.p))

            # search for these destinations
            for key_and_mask in r_info.keys_and_masks:
                _search_route(placement, destination_placements, key_and_mask,
                              routing_tables, machine, n_atoms, is_continuous)
Example #32
0
class ExecutableFinder(object):
    """ Manages a set of folders in which to search for binaries,\
        and allows for binaries to be discovered within this path
    """
    __slots__ = [
        "_binary_search_paths",
        "_binary_log",
        "_paths_log"]

    def __init__(self, binary_search_paths):
        """
        :param iterable(str) binary_search_paths:
            The initial set of folders to search for binaries.
        """
        binary_logs_path = os.environ.get("BINARY_LOGS_DIR", None)
        if binary_logs_path:
            self._paths_log = os.path.join(
                binary_logs_path, "binary_paths_used.log")
            self._binary_log = os.path.join(
                binary_logs_path, "binary_files_used.log")
        else:
            self._paths_log = None
            self._binary_log = None

        self._binary_search_paths = OrderedSet()
        for path in binary_search_paths:
            self.add_path(path)

    def add_path(self, path):
        """ Adds a path to the set of folders to be searched.  The path is\
            added to the end of the list, so it is searched after all the\
            paths currently in the list.

        :param str path: The path to add
        """
        self._binary_search_paths.add(path)
        if self._paths_log:
            try:
                with open(self._paths_log, "a") as log_file:
                    log_file.write(path)
                    log_file.write("\n")
            except Exception:  # pylint: disable=broad-except
                pass

    @property
    def binary_paths(self):
        """ The set of folders to search for binaries, as a printable\
            colon-separated string.

        :rtype: str
        """
        return " : ".join(self._binary_search_paths)

    def get_executable_path(self, executable_name):
        """ Finds an executable within the set of folders. The set of folders\
            is searched sequentially and the first match is returned.

        :param str executable_name: The name of the executable to find
        :return: The full path of the discovered executable
        :rtype: str
        :raises KeyError: If no executable was found in the set of folders
        """
        # Loop through search paths
        for path in self._binary_search_paths:
            # Rebuild filename
            potential_filename = os.path.join(path, executable_name)

            # If this filename exists, return it
            if os.path.isfile(potential_filename):
                if self._binary_log:
                    try:
                        with open(self._binary_log, "a") as log_file:
                            log_file.write(potential_filename)
                            log_file.write("\n")
                    except Exception:  # pylint: disable=broad-except
                        pass
                return potential_filename

        # No executable found
        raise KeyError("Executable {} not found in path".format(
            executable_name))

    def get_executable_paths(self, executable_names):
        """ Finds each executables within the set of folders.

        The names are assumed to be comma separated
        The set of folders is searched sequentially
        and the first match for each name is returned.

        Names not found are ignored and not added to the list.

        :param str executable_names: The name of the executable to find.
            Assumed to be comma separated.
        :return:
            The full path of the discovered executable, or ``None`` if no
            executable was found in the set of folders
        :rtype: list(str)
        """
        results = list()
        for name in executable_names.split(","):
            try:
                results.append(self.get_executable_path(name))
            except KeyError:
                pass
        return results

    def check_logs(self):
        if not self._paths_log:
            print("environ BINARY_LOGS_DIR not set!")
            return

        folders = set()
        with open(self._paths_log, "r") as log_file:
            for line in log_file:
                folders.add(line.strip())

        in_folders = set()
        for folder in folders:
            try:
                for file_name in os.listdir(folder):
                    if file_name.endswith(".aplx"):
                        in_folders.add(os.path.join(folder, file_name))
            except Exception:  # pylint: disable=broad-except
                # Skip folders not found
                pass

        used_binaries = set()
        with open(self._binary_log, "r") as log_file:
            for line in log_file:
                used_binaries.add(line.strip())

        missing = in_folders - used_binaries
        print("{} binaries asked for. {} binaries never asked for.".format(
            len(used_binaries), len(missing)))
        if len(missing) > 0:
            print("Binaries asked for are:")
            for binary in (used_binaries):
                print(binary)
            print("Binaries never asked for are:")
            for binary in (missing):
                print(binary)

    def clear_logs(self):
        if not self._paths_log:
            print("environ BINARY_LOGS_DIR not set!")
            return
        if os.path.isfile(self._paths_log):
            os.remove(self._paths_log)
        if os.path.isfile(self._binary_log):
            os.remove(self._binary_log)
Example #33
0
class OutgoingEdgePartition(ConstrainedObject, AbstractOutgoingEdgePartition):
    """ A collection of edges which start at a single vertex which have the
        same semantics and so can share a single key.
    """

    __slots__ = [
        # The partition identifier
        "_identifier",
        # The edges in the partition
        "_edges",
        # The vertex at the start of all the edges
        "_pre_vertex",
        # The traffic type of all the edges
        "_traffic_type",
        # The type of edges to accept
        "_allowed_edge_types",
        # The weight of traffic going down this partition
        "_traffic_weight",
        # The label of the graph
        "_label"
    ]

    def __init__(self,
                 identifier,
                 allowed_edge_types,
                 constraints=None,
                 label=None,
                 traffic_weight=1):
        """
        :param identifier: The identifier of the partition
        :param allowed_edge_types: The types of edges allowed
        :param constraints: Any initial constraints
        :param label: An optional label of the partition
        :param traffic_weight: The weight of traffic going down this partition
        """
        super(OutgoingEdgePartition, self).__init__(constraints)
        self._label = label
        self._identifier = identifier
        self._edges = OrderedSet()
        self._allowed_edge_types = allowed_edge_types
        self._pre_vertex = None
        self._traffic_type = None
        self._traffic_weight = traffic_weight

    @property
    @overrides(AbstractOutgoingEdgePartition.label)
    def label(self):
        return self._label

    @overrides(AbstractOutgoingEdgePartition.add_edge)
    def add_edge(self, edge):
        # Check for an incompatible edge
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", edge.__class__,
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))

        # Check for an incompatible pre vertex
        if self._pre_vertex is None:
            self._pre_vertex = edge.pre_vertex

        elif edge.pre_vertex != self._pre_vertex:
            raise PacmanConfigurationException(
                "A partition can only contain edges with the same"
                "pre_vertex")

        # Check for an incompatible traffic type
        if self._traffic_type is None:
            self._traffic_type = edge.traffic_type
        elif edge.traffic_type != self._traffic_type:
            raise PacmanConfigurationException(
                "A partition can only contain edges with the same"
                " traffic_type")

        self._edges.add(edge)

    @property
    @overrides(AbstractOutgoingEdgePartition.identifier)
    def identifier(self):
        return self._identifier

    @property
    @overrides(AbstractOutgoingEdgePartition.edges)
    def edges(self):
        return self._edges

    @property
    @overrides(AbstractOutgoingEdgePartition.n_edges)
    def n_edges(self):
        return len(self._edges)

    @property
    @overrides(AbstractOutgoingEdgePartition.pre_vertex)
    def pre_vertex(self):
        return self._pre_vertex

    @property
    @overrides(AbstractOutgoingEdgePartition.traffic_type)
    def traffic_type(self):
        return self._traffic_type

    @property
    @overrides(AbstractOutgoingEdgePartition.traffic_weight)
    def traffic_weight(self):
        return self._traffic_weight

    def __repr__(self):
        edges = ""
        for edge in self._edges:
            if edge.label is not None:
                edges += edge.label + ","
            else:
                edges += str(edge) + ","
        return _REPR_TEMPLATE.format(self._identifier, edges, self.constraints,
                                     self.label)

    def __str__(self):
        return self.__repr__()

    @overrides(AbstractOutgoingEdgePartition.__contains__)
    def __contains__(self, edge):
        """ Check if the edge is contained within this partition

        :param edge: the edge to search for.
        :return: boolean of true of false otherwise
        """
        return edge in self._edges
class OutgoingEdgePartition(ConstrainedObject, AbstractOutgoingEdgePartition):
    """ A collection of edges which start at a single vertex which have the
        same semantics and so can share a single key.
    """

    __slots__ = [
        # The partition identifier
        "_identifier",
        # The edges in the partition
        "_edges",
        # The vertex at the start of all the edges
        "_pre_vertex",
        # The traffic type of all the edges
        "_traffic_type",
        # The type of edges to accept
        "_allowed_edge_types",
        # The weight of traffic going down this partition
        "_traffic_weight",
        # The label of the graph
        "_label"
    ]

    def __init__(
            self, identifier, allowed_edge_types, constraints=None,
            label=None, traffic_weight=1):
        """
        :param identifier: The identifier of the partition
        :param allowed_edge_types: The types of edges allowed
        :param constraints: Any initial constraints
        :param label: An optional label of the partition
        :param traffic_weight: The weight of traffic going down this partition
        """
        super(OutgoingEdgePartition, self).__init__(constraints)
        self._label = label
        self._identifier = identifier
        self._edges = OrderedSet()
        self._allowed_edge_types = allowed_edge_types
        self._pre_vertex = None
        self._traffic_type = None
        self._traffic_weight = traffic_weight

    @property
    @overrides(AbstractOutgoingEdgePartition.label)
    def label(self):
        return self._label

    @overrides(AbstractOutgoingEdgePartition.add_edge)
    def add_edge(self, edge):
        # Check for an incompatible edge
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", edge.__class__,
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))

        # Check for an incompatible pre vertex
        if self._pre_vertex is None:
            self._pre_vertex = edge.pre_vertex

        elif edge.pre_vertex != self._pre_vertex:
            raise PacmanConfigurationException(
                "A partition can only contain edges with the same"
                "pre_vertex")

        # Check for an incompatible traffic type
        if self._traffic_type is None:
            self._traffic_type = edge.traffic_type
        elif edge.traffic_type != self._traffic_type:
            raise PacmanConfigurationException(
                "A partition can only contain edges with the same"
                " traffic_type")

        self._edges.add(edge)

    @property
    @overrides(AbstractOutgoingEdgePartition.identifier)
    def identifier(self):
        return self._identifier

    @property
    @overrides(AbstractOutgoingEdgePartition.edges)
    def edges(self):
        return self._edges

    @property
    @overrides(AbstractOutgoingEdgePartition.n_edges)
    def n_edges(self):
        return len(self._edges)

    @property
    @overrides(AbstractOutgoingEdgePartition.pre_vertex)
    def pre_vertex(self):
        return self._pre_vertex

    @property
    @overrides(AbstractOutgoingEdgePartition.traffic_type)
    def traffic_type(self):
        return self._traffic_type

    @property
    @overrides(AbstractOutgoingEdgePartition.traffic_weight)
    def traffic_weight(self):
        return self._traffic_weight

    def __repr__(self):
        edges = ""
        for edge in self._edges:
            if edge.label is not None:
                edges += edge.label + ","
            else:
                edges += str(edge) + ","
        return _REPR_TEMPLATE.format(
            self._identifier, edges, self.constraints, self.label)

    def __str__(self):
        return self.__repr__()

    @overrides(AbstractOutgoingEdgePartition.__contains__)
    def __contains__(self, edge):
        """ Check if the edge is contained within this partition

        :param edge: the edge to search for.
        :return: boolean of true of false otherwise
        """
        return edge in self._edges
Example #35
0
class Graph(ConstrainedObject, AbstractGraph):
    """ A graph implementation that specifies the allowed types of the\
        vertices and edges.
    """

    __slots__ = [
        # The classes of vertex that are allowed in this graph
        "_allowed_vertex_types",
        # The classes of edges that are allowed in this graph
        "_allowed_edge_types",
        # The classes of outgoing edge partition that are allowed in this
        # graph
        "_allowed_partition_types",
        # The vertices of the graph
        "_vertices",
        # The outgoing edge partitions of the graph by name
        "_outgoing_edge_partitions_by_name",
        # The outgoing edges by pre-vertex
        "_outgoing_edges",
        # The incoming edges by post-vertex
        "_incoming_edges",
        # map between incoming edges and their associated partitions
        "_incoming_edges_by_partition_name",
        # The outgoing edge partitions by pre-vertex
        "_outgoing_edge_partitions_by_pre_vertex",
        # The label of the graph
        "_label"]

    def __init__(self, allowed_vertex_types, allowed_edge_types,
                 allowed_partition_types, label):
        """
        :param allowed_vertex_types:\
            A single or tuple of types of vertex to be allowed in the graph
        :param allowed_edge_types:\
            A single or tuple of types of edges to be allowed in the graph
        :param allowed_partition_types:\
            A single or tuple of types of partitions to be allowed in the graph
        :param label: The label on the graph, or None
        """
        super(Graph, self).__init__(None)
        self._allowed_vertex_types = allowed_vertex_types
        self._allowed_edge_types = allowed_edge_types
        self._allowed_partition_types = allowed_partition_types

        self._vertices = OrderedSet()
        self._outgoing_edge_partitions_by_name = OrderedDict()
        self._outgoing_edges = defaultdict(OrderedSet)
        self._incoming_edges = defaultdict(OrderedSet)
        self._incoming_edges_by_partition_name = defaultdict(list)
        self._outgoing_edge_partitions_by_pre_vertex = defaultdict(OrderedSet)
        self._label = label

    @property
    @overrides(AbstractGraph.label)
    def label(self):
        return self._label

    @overrides(AbstractGraph.add_vertex)
    def add_vertex(self, vertex):
        if not isinstance(vertex, self._allowed_vertex_types):
            raise PacmanInvalidParameterException(
                "vertex", vertex.__class__,
                "Vertices of this graph must be one of the following types:"
                " {}".format(self._allowed_vertex_types))
        self._vertices.add(vertex)

    @overrides(AbstractGraph.add_edge)
    def add_edge(self, edge, outgoing_edge_partition_name):
        # verify that the edge is one suitable for this graph
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", edge.__class__,
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))

        if edge.pre_vertex not in self._vertices:
            raise PacmanInvalidParameterException(
                "edge", edge.pre_vertex, "pre-vertex must be known in graph")
        if edge.post_vertex not in self._vertices:
            raise PacmanInvalidParameterException(
                "edge", edge.post_vertex, "post-vertex must be known in graph")

        # Add the edge to the partition
        partition = None
        if ((edge.pre_vertex, outgoing_edge_partition_name) not in
                self._outgoing_edge_partitions_by_name):
            partition = OutgoingEdgePartition(
                outgoing_edge_partition_name, self._allowed_edge_types)
            self._outgoing_edge_partitions_by_pre_vertex[
                edge.pre_vertex].add(partition)
            self._outgoing_edge_partitions_by_name[
                edge.pre_vertex, outgoing_edge_partition_name] = partition
        else:
            partition = self._outgoing_edge_partitions_by_name[
                edge.pre_vertex, outgoing_edge_partition_name]
        partition.add_edge(edge)

        # Add the edge to the indices
        self._outgoing_edges[edge.pre_vertex].add(edge)
        self._incoming_edges_by_partition_name[
            (edge.post_vertex, outgoing_edge_partition_name)].append(edge)
        self._incoming_edges[edge.post_vertex].add(edge)

    @overrides(AbstractGraph.add_outgoing_edge_partition)
    def add_outgoing_edge_partition(self, outgoing_edge_partition):

        # verify that this partition is suitable for this graph
        if not isinstance(
                outgoing_edge_partition, self._allowed_partition_types):
            raise PacmanInvalidParameterException(
                "outgoing_edge_partition", outgoing_edge_partition.__class__,
                "Partitions of this graph must be one of the following types:"
                " {}".format(self._allowed_partition_types))

        # check this partition doesn't already exist
        if ((outgoing_edge_partition.pre_vertex,
                outgoing_edge_partition.identifier) in
                self._outgoing_edge_partitions_by_name):
            raise PacmanAlreadyExistsException(
                "{}".format(OutgoingEdgePartition.__class__),
                (outgoing_edge_partition.pre_vertex,
                 outgoing_edge_partition.identifier))

        self._outgoing_edge_partitions_by_pre_vertex[
            outgoing_edge_partition.pre_vertex].add(outgoing_edge_partition)
        self._outgoing_edge_partitions_by_name[
            outgoing_edge_partition.pre_vertex,
            outgoing_edge_partition.identifier] = outgoing_edge_partition

    @property
    @overrides(AbstractGraph.vertices)
    def vertices(self):
        return self._vertices

    @property
    @overrides(AbstractGraph.n_vertices)
    def n_vertices(self):
        return len(self._vertices)

    @property
    @overrides(AbstractGraph.edges)
    def edges(self):
        return [
            edge
            for partition in self._outgoing_edge_partitions_by_name.values()
            for edge in partition.edges]

    @property
    @overrides(AbstractGraph.outgoing_edge_partitions)
    def outgoing_edge_partitions(self):
        return self._outgoing_edge_partitions_by_name.values()

    @property
    @overrides(AbstractGraph.n_outgoing_edge_partitions)
    def n_outgoing_edge_partitions(self):
        return len(self._outgoing_edge_partitions_by_name)

    @overrides(AbstractGraph.get_edges_starting_at_vertex)
    def get_edges_starting_at_vertex(self, vertex):
        return self._outgoing_edges[vertex]

    @overrides(AbstractGraph.get_edges_ending_at_vertex)
    def get_edges_ending_at_vertex(self, vertex):
        if vertex not in self._incoming_edges:
            return []
        return self._incoming_edges[vertex]

    @overrides(AbstractGraph.get_edges_ending_at_vertex_with_partition_name)
    def get_edges_ending_at_vertex_with_partition_name(
            self, vertex, partition_name):
        key = (vertex, partition_name)
        if key not in self._incoming_edges_by_partition_name:
            return []
        return self._incoming_edges_by_partition_name[key]

    @overrides(AbstractGraph.get_outgoing_edge_partitions_starting_at_vertex)
    def get_outgoing_edge_partitions_starting_at_vertex(self, vertex):
        return self._outgoing_edge_partitions_by_pre_vertex[vertex]

    @overrides(AbstractGraph.get_outgoing_edge_partition_starting_at_vertex)
    def get_outgoing_edge_partition_starting_at_vertex(
            self, vertex, outgoing_edge_partition_name):
        return self._outgoing_edge_partitions_by_name.get(
            (vertex, outgoing_edge_partition_name), None)