Example #1
0
    def __init__(self,
                 label=None,
                 constraints=None,
                 max_atoms_per_core=sys.maxsize,
                 splitter=None):
        """
        :param str label: The optional name of the vertex.
        :param iterable(AbstractConstraint) constraints:
            The optional initial constraints of the vertex.
        :param int max_atoms_per_core: The max number of atoms that can be
            placed on a core, used in partitioning.
        :param splitter: The splitter object needed for this vertex.
            Leave as None to delegate the choice of splitter to the selector.
        :type splitter: None or
            ~pacman.model.partitioner_interfaces.AbstractSplitterPartitioner
        :raise PacmanInvalidParameterException:
            If one of the constraints is not valid
        """
        # Need to set to None temporarily as add_constraint checks splitter
        self._splitter = None
        super(ApplicationVertex, self).__init__(label, constraints)
        self._machine_vertices = OrderedSet()

        # Use setter as there is extra work to do
        self.splitter = splitter

        # add a constraint for max partitioning
        self.add_constraint(MaxVertexAtomsConstraint(max_atoms_per_core))
Example #2
0
 def __init__(self, label, application_graph=None):
     """
     :param label: The label for the graph.
     :type label: str or None
     :param application_graph:
         The application graph that this machine graph is derived from, if
         it is derived from one at all.
     :type application_graph: ApplicationGraph or None
     """
     super(MachineGraph, self).__init__(MachineVertex, MachineEdge, label)
     if application_graph:
         application_graph.forget_machine_graph()
         # Check the first vertex added
         self._application_level_used = True
     else:
         # Must be false as there is no App_graph
         self._application_level_used = False
     self._multicast_partitions = DefaultOrderedDict(
         lambda: DefaultOrderedDict(set))
     self._edge_partitions = OrderedSet()
     self._fixed_route_edge_partitions_by_pre_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._multicast_edge_partitions_by_pre_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._sdram_edge_partitions_by_pre_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._fixed_route_edge_partitions_by_post_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._multicast_edge_partitions_by_post_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._sdram_edge_partitions_by_post_vertex = (
         DefaultOrderedDict(OrderedSet))
Example #3
0
 def forget_machine_vertices(self):
     """ Arrange to forget all machine vertices that this application
         vertex maps to.
     """
     self._machine_vertices = OrderedSet()
     if self._splitter is not None:
         self._splitter.reset_called()
Example #4
0
 def __init__(self,
              pre_vertex,
              post_vertex,
              label=None,
              machine_edge_type=MachineEdge):
     """
     :param ApplicationVertex pre_vertex:
         The application vertex at the start of the edge.
     :param ApplicationVertex post_vertex:
         The application vertex at the end of the edge.
     :param label: The name of the edge.
     :type label: str or None
     :param machine_edge_type:
         The type of machine edges made from this app edge. If ``None``,
         standard machine edges will be made.
     :type machine_edge_type: type(MachineEdge)
     """
     self._label = label
     self._pre_vertex = pre_vertex
     self._post_vertex = post_vertex
     if not issubclass(machine_edge_type, MachineEdge):
         raise ValueError(
             "machine_edge_type must be a kind of machine edge")
     self._machine_edge_type = machine_edge_type
     self.__machine_edges = OrderedSet()
Example #5
0
 def __init__(self, partition_type, other_splitter):
     super().__init__(other_splitter, "")
     self._partition_type = partition_type
     self._pre_vertices = OrderedSet()
     self._post_vertex = None
     self._pre_slices = OrderedSet()
     self._post_slice = None
     self._app_edge = None
def create_vertices_groups(vertices, same_group_as_function):
    groups = list()
    for vertex in vertices:
        same_chip_as_vertices = same_group_as_function(vertex)
        if same_chip_as_vertices:
            same_chip_as_vertices = OrderedSet(same_chip_as_vertices)
            same_chip_as_vertices.add(vertex)
            # Singletons on interesting and added later if needed
            if len(same_chip_as_vertices) > 1:
                add_set(groups, same_chip_as_vertices)
    return groups
Example #7
0
    def __init__(self, constraints=None):
        """
        :param iterable(AbstractConstraint) constraints:
            Any initial constraints
        """

        # safety point for diamond inheritance
        if not hasattr(self, '_constraints') or self._constraints is None:
            self._constraints = OrderedSet()

        # add new constraints to the set
        self.add_constraints(constraints)
Example #8
0
def test_peek():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    p1 = o.peek()
    p2 = o.pop()
    assert p1 == 3
    assert p1 == p2
    p3 = o.peek(last=False)
    assert p3 == 1
    p4 = o.pop(last=False)
    assert p4 == p3
    def allocate(self, p):
        if p is None:
            p = self._cores.pop()
        else:
            self._cores.remove(p)
        if self._cores_counter:
            self._cores_counter[self._n_cores] -= 1
        self._n_cores -= 1
        if self._cores_counter:
            self._cores_counter[self._n_cores] += 1

        if self._n_cores <= 0:
            self._cores = OrderedSet()
        return p
Example #10
0
    def __init__(self, allowed_vertex_types, allowed_edge_types,
                 allowed_partition_types, label):
        """
        :param allowed_vertex_types:\
            A single or tuple of types of vertex to be allowed in the graph
        :param allowed_edge_types:\
            A single or tuple of types of edges to be allowed in the graph
        :param allowed_partition_types:\
            A single or tuple of types of partitions to be allowed in the graph
        :param label: The label on the graph, or None
        """
        super(Graph, self).__init__(None)
        self._allowed_vertex_types = allowed_vertex_types
        self._allowed_edge_types = allowed_edge_types
        self._allowed_partition_types = allowed_partition_types

        self._vertices = OrderedSet()
        self._outgoing_edge_partitions_by_name = OrderedDict()
        self._outgoing_edges = DefaultOrderedDict(OrderedSet)
        self._incoming_edges = DefaultOrderedDict(OrderedSet)
        self._incoming_edges_by_partition_name = DefaultOrderedDict(list)
        self._outgoing_edge_partitions_by_pre_vertex = \
            DefaultOrderedDict(OrderedSet)
        self._outgoing_edge_partition_by_edge = OrderedDict()
        self._label = label
def add_set(all_sets, new_set):
    """
    Adds a new set into the list of sets, concatenating sets if required.

    If the new set does not overlap any existing sets it is added.

    However if the new sets overlaps one or more existing sets, a superset is
    created combining all the overlapping sets.
    Existing overlapping sets are removed and only the new superset is added.

    :param list(set) all_sets: List of non-overlapping sets
    :param set new_set:
        A new set which may or may not overlap the previous sets.
    """

    union = OrderedSet()
    removes = []
    for a_set in all_sets:
        if not new_set.isdisjoint(a_set):
            removes.append(a_set)
            union |= a_set
    union |= new_set
    if removes:
        for a_set in removes:
            all_sets.remove(a_set)
    all_sets.append(union)
Example #12
0
def test_repr():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    s = "{}".format(o)
    assert s == "OrderedSet([12, 78, 56, 34, 90])"
def __allocate_tags_for_placement(placement, resource_tracker, tag_collector,
                                  ports_collector, tag_port_tasks):
    """
    :param Placement placement:
    :param ResourceTracker resource_tracker:
    :param Tags tag_collector:
    :param dict(str,set(int)) ports_collector:
    :param list(_Task) tag_port_tasks:
    """
    vertex = placement.vertex
    resources = vertex.resources_required

    # Get the constraint details for the tags
    (board_address, ip_tags, reverse_ip_tags) = \
        ResourceTracker.get_ip_tag_info(resources, vertex.constraints)

    # Allocate the tags, first-come, first-served, using the fixed
    # placement of the vertex, and the required resources
    chips = [(placement.x, placement.y)]
    (_, _, _, returned_ip_tags, returned_reverse_ip_tags) = \
        resource_tracker.allocate_resources(
            resources, chips, placement.p, board_address, ip_tags,
            reverse_ip_tags)

    # Put the allocated IP tag information into the tag object
    if returned_ip_tags is not None:
        for (tag_constraint, (board_address, tag, dest_x, dest_y)) in \
                zip(ip_tags, returned_ip_tags):
            ip_tag = IPTag(
                board_address=board_address,
                destination_x=dest_x,
                destination_y=dest_y,
                tag=tag,
                ip_address=tag_constraint.ip_address,
                port=tag_constraint.port,
                strip_sdp=tag_constraint.strip_sdp,
                traffic_identifier=tag_constraint.traffic_identifier)
            tag_collector.add_ip_tag(ip_tag, vertex)

    if returned_reverse_ip_tags is None:
        return

    # Put the allocated reverse IP tag information into the tag object
    for tag_constraint, (board_address, tag) in zip(reverse_ip_tags,
                                                    returned_reverse_ip_tags):
        if board_address not in ports_collector:
            ports_collector[board_address] = OrderedSet(_BOARD_PORTS)
        if tag_constraint.port is not None:
            reverse_ip_tag = ReverseIPTag(board_address, tag,
                                          tag_constraint.port, placement.x,
                                          placement.y, placement.p,
                                          tag_constraint.sdp_port)
            tag_collector.add_reverse_ip_tag(reverse_ip_tag, vertex)

            ports_collector[board_address].discard(tag_constraint.port)
        else:
            tag_port_tasks.append(
                _Task(tag_constraint, board_address, tag, vertex, placement))
Example #14
0
    def __init__(self, binary_search_paths):
        """
        :param iterable(str) binary_search_paths:
            The initial set of folders to search for binaries.
        """
        binary_logs_path = os.environ.get("BINARY_LOGS_DIR", None)
        if binary_logs_path:
            self._paths_log = os.path.join(
                binary_logs_path, "binary_paths_used.log")
            self._binary_log = os.path.join(
                binary_logs_path, "binary_files_used.log")
        else:
            self._paths_log = None
            self._binary_log = None

        self._binary_search_paths = OrderedSet()
        for path in binary_search_paths:
            self.add_path(path)
Example #15
0
    def constraints(self):
        """ An iterable of constraints

        :rtype: iterable(AbstractConstraint)
        """
        try:
            return self._constraints
        except Exception:  # pylint: disable=broad-except
            return OrderedSet()
Example #16
0
 def _allocate_ports_for_reverse_ip_tags(self, tasks, ports, tags):
     for tag_constraint, board_address, tag, vertex, placement in tasks:
         if board_address not in ports:
             ports[board_address] = OrderedSet(_BOARD_PORTS)
         port = ports[board_address].pop(last=False)
         reverse_ip_tag = ReverseIPTag(board_address, tag, port,
                                       placement.x, placement.y,
                                       placement.p, tag_constraint.sdp_port)
         tags.add_reverse_ip_tag(reverse_ip_tag, vertex)
Example #17
0
def test_containment():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    for item in [12, 78, 56, 34, 90]:
        assert item in o
    for item in [123, 456, 789]:
        assert item not in o
    def __unique_names(items, index):
        """ Produces an iterable of 1-tuples of the *unique* names in at \
            particular index into the provenance items' names.

        :param iterable(ProvenanceDataItem) items: The prov items
        :param int index: The index into the names
        :rtype: iterable(tuple(str))
        """
        return ((name,) for name in OrderedSet(
            item.names[index] for item in items))
Example #19
0
def test_obscure_stuff():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    assert [x for x in reversed(o)] == [3, 2, 1]
    o2 = OrderedSet(o)
    assert [x for x in o2] == [1, 2, 3]
    assert o == o2
    o2 |= [4]
    assert o != o2
    assert repr(OrderedSet()) == "OrderedSet()"
    def _find_one_to_one_vertices(vertex, graph):
        """ Find vertices which have one to one connections with the given\
            vertex, and where their constraints don't force them onto\
            different chips.

        :param MachineGraph graph:
            the graph to look for other one to one vertices
        :param MachineVertex vertex:
            the vertex to use as a basis for one to one connections
        :return: set of one to one vertices
        :rtype: set(MachineVertex)
        """
        # Virtual vertices can't be forced on other chips
        if isinstance(vertex, AbstractVirtual):
            return []
        found_vertices = OrderedSet()
        vertices_seen = {vertex}

        # look for one to ones leaving this vertex
        outgoing = graph.get_edges_starting_at_vertex(vertex)
        vertices_to_try = deque(
            edge.post_vertex for edge in outgoing
            if edge.post_vertex not in vertices_seen)
        while vertices_to_try:
            next_vertex = vertices_to_try.pop()
            if next_vertex not in vertices_seen and \
                    not isinstance(next_vertex, AbstractVirtual):
                vertices_seen.add(next_vertex)
                if is_single(graph.get_edges_ending_at_vertex(next_vertex)):
                    found_vertices.add(next_vertex)
                    outgoing = graph.get_edges_starting_at_vertex(next_vertex)
                    vertices_to_try.extend(
                        edge.post_vertex for edge in outgoing
                        if edge.post_vertex not in vertices_seen)

        # look for one to ones entering this vertex
        incoming = graph.get_edges_ending_at_vertex(vertex)
        vertices_to_try = deque(
            edge.pre_vertex for edge in incoming
            if edge.pre_vertex not in vertices_seen)
        while vertices_to_try:
            next_vertex = vertices_to_try.pop()
            if next_vertex not in vertices_seen:
                vertices_seen.add(next_vertex)
                if is_single(graph.get_edges_starting_at_vertex(next_vertex)):
                    found_vertices.add(next_vertex)
                    incoming = graph.get_edges_ending_at_vertex(next_vertex)
                    vertices_to_try.extend(
                        edge.pre_vertex for edge in incoming
                        if edge.pre_vertex not in vertices_seen)

        found_vertices.update(get_vertices_on_same_chip(vertex, graph))
        return found_vertices
Example #21
0
 def __active_chips(machine, placements):
     """
     :param ~.Machine machine:
     :param ~.Placements placements
     :rtype: set(~.Chip)
     """
     return OrderedSet(
         machine.get_chip_at(placement.x, placement.y)
         for placement in placements
         if isinstance(placement.vertex, ChipPowerMonitorMachineVertex))
Example #22
0
def test_ordered_ness():
    o = OrderedSet()
    o.add(12)
    o.add(78)
    o.add(56)
    o.add(34)
    o.add(90)
    assert len(o) == 5
    assert list(o) == [12, 78, 56, 34, 90]
    result = []
    for item in o:
        result.append(item)
    assert result == [12, 78, 56, 34, 90]
Example #23
0
    def add_constraint(self, constraint):
        """ Add a new constraint to the collection of constraints

        :param AbstractConstraint constraint: constraint to add
        :raise PacmanInvalidParameterException:
            If the constraint is not valid
        """
        if constraint is None:
            raise PacmanInvalidParameterException("constraint", constraint,
                                                  "must not be None")
        if not isinstance(constraint, AbstractConstraint):
            raise PacmanInvalidParameterException(
                "constraint", constraint,
                "Must be a " + _get_class_name(AbstractConstraint))

        try:
            self._constraints.add(constraint)
        except Exception:  # pylint: disable=broad-except
            self._constraints = OrderedSet()
            self._constraints.add(constraint)
Example #24
0
 def __init__(self, identifier, allowed_edge_types, constraints, label,
              traffic_weight, class_name):
     """
     :param str identifier: The identifier of the partition
     :param allowed_edge_types: The types of edges allowed
     :type allowed_edge_types: type or tuple(type, ...)
     :param iterable(AbstractConstraint) constraints:
         Any initial constraints
     :param str label: An optional label of the partition
     :param int traffic_weight:
         The weight of traffic going down this partition
     """
     super().__init__(constraints)
     self._label = label
     self._identifier = identifier
     self._edges = OrderedSet()
     self._allowed_edge_types = allowed_edge_types
     self._traffic_weight = traffic_weight
     self._class_name = class_name
     self._graph_code = None
Example #25
0
 def __init__(self,
              identifier,
              allowed_edge_types,
              constraints=None,
              label=None,
              traffic_weight=1):
     """
     :param identifier: The identifier of the partition
     :param allowed_edge_types: The types of edges allowed
     :param constraints: Any initial constraints
     :param label: An optional label of the partition
     :param traffic_weight: The weight of traffic going down this partition
     """
     super(OutgoingEdgePartition, self).__init__(constraints)
     self._label = label
     self._identifier = identifier
     self._edges = OrderedSet()
     self._allowed_edge_types = allowed_edge_types
     self._pre_vertex = None
     self._traffic_type = None
     self._traffic_weight = traffic_weight
Example #26
0
def test_reverse():
    o = OrderedSet()
    o.add(1)
    o.add(2)
    o.add(3)
    a = list(reversed(o))
    assert a == [3, 2, 1]
Example #27
0
 def _get_all_possible_recordable_variables(self):
     variables = OrderedSet()
     if isinstance(self._population._vertex, AbstractSpikeRecordable):
         variables.add(SPIKES)
     if isinstance(self._population._vertex, AbstractNeuronRecordable):
         variables.update(
             self._population._vertex.get_recordable_variables())
     return variables
Example #28
0
    def __init__(self, allowed_vertex_types, allowed_edge_types,
                 allowed_partition_types, label):
        """
        :param allowed_vertex_types:\
            A single or tuple of types of vertex to be allowed in the graph
        :param allowed_edge_types:\
            A single or tuple of types of edges to be allowed in the graph
        :param allowed_partition_types:\
            A single or tuple of types of partitions to be allowed in the graph
        :param label: The label on the graph, or None
        """
        super(Graph, self).__init__(None)
        self._allowed_vertex_types = allowed_vertex_types
        self._allowed_edge_types = allowed_edge_types
        self._allowed_partition_types = allowed_partition_types

        self._vertices = OrderedSet()
        self._outgoing_edge_partitions_by_name = OrderedDict()
        self._outgoing_edges = defaultdict(OrderedSet)
        self._incoming_edges = defaultdict(OrderedSet)
        self._incoming_edges_by_partition_name = defaultdict(list)
        self._outgoing_edge_partitions_by_pre_vertex = defaultdict(OrderedSet)
        self._label = label
 def __init__(self, x, y, processor_ids):
     """
     :param x: The x-coordinate of the chip
     :type x: int
     :param y: The y-coordinate of the chip
     :type y: int
     :param processor_ids: The processor IDs on the chip
     :type processor_ids: iterable(int)
     """
     self._x = x
     self._y = y
     self._processor_ids = OrderedSet()
     for processor_id in processor_ids:
         self.add_processor(processor_id)
 def __init__(self, chip, preallocated_resources, cores_counter):
     """
     :param ~spinn_machine.Chip chip:
         chip whose resources can be allocated
     :param preallocated_resources:
     :type preallocated_resources: PreAllocatedResourceContainer or None
     """
     self._cores = OrderedSet()
     for processor in chip.processors:
         if not processor.is_monitor:
             self._cores.add(processor.processor_id)
     self._n_cores = len(self._cores)
     if preallocated_resources:
         if chip.ip_address:
             self._n_cores -= preallocated_resources.cores_ethernet
         else:
             self._n_cores -= preallocated_resources.cores_all
     if chip.virtual:
         self._cores_counter = None
     else:
         self._cores_counter = cores_counter
     if self._cores_counter:
         self._cores_counter[self._n_cores] += 1
    def _get_data_for_vertices_locked(self, vertices, progress=None):
        receivers = OrderedSet()
        if self._uses_advanced_monitors:

            # locate receivers
            for vertex in vertices:
                placement = self._placements.get_placement_of_vertex(vertex)
                receivers.add(
                    funs.locate_extra_monitor_mc_receiver(
                        self._machine, placement.x, placement.y,
                        self._extra_monitor_cores_to_ethernet_connection_map))

            # set time out
            for receiver in receivers:
                receiver.set_cores_for_data_extraction(
                    transceiver=self._transceiver,
                    placements=self._placements,
                    extra_monitor_cores_for_router_timeout=(
                        self._extra_monitor_cores))

        # get data
        for vertex in vertices:
            placement = self._placements.get_placement_of_vertex(vertex)
            for recording_region_id in vertex.get_recorded_region_ids():
                self.get_data_for_vertex(placement, recording_region_id)
                if progress is not None:
                    progress.update()

        # revert time out
        if self._uses_advanced_monitors:
            for receiver in receivers:
                receiver.unset_cores_for_data_extraction(
                    transceiver=self._transceiver,
                    placements=self._placements,
                    extra_monitor_cores_for_router_timeout=(
                        self._extra_monitor_cores))
def __allocate_ports_for_reverse_ip_tags(tasks, ports, tags):
    """
    :param list(_Task) tag_port_tasks:
    :param dict(str,set(int)) ports:
    :param Tags tags:
    """
    for task in tasks:
        if task.board not in ports:
            ports[task.board] = OrderedSet(_BOARD_PORTS)
        port = ports[task.board].pop(last=False)
        reverse_ip_tag = ReverseIPTag(task.board, task.tag, port,
                                      task.placement.x, task.placement.y,
                                      task.placement.p,
                                      task.constraint.sdp_port)
        tags.add_reverse_ip_tag(reverse_ip_tag, task.vertex)
    def __old_get_data_for_placements_with_monitors(self, placements,
                                                    progress):
        # locate receivers
        receivers = list(
            OrderedSet(
                locate_extra_monitor_mc_receiver(
                    self._machine, placement.x, placement.y,
                    self._packet_gather_cores_to_ethernet_connection_map)
                for placement in placements))

        # Ugly, to avoid an import loop...
        with receivers[0].streaming(receivers, self._transceiver,
                                    self._extra_monitor_cores,
                                    self._placements):
            # get data
            self.__old_get_data_for_placements(placements, progress)
 def __init__(
         self, identifier, allowed_edge_types, constraints=None,
         label=None, traffic_weight=1):
     """
     :param identifier: The identifier of the partition
     :param allowed_edge_types: The types of edges allowed
     :param constraints: Any initial constraints
     :param label: An optional label of the partition
     :param traffic_weight: The weight of traffic going down this partition
     """
     super(OutgoingEdgePartition, self).__init__(constraints)
     self._label = label
     self._identifier = identifier
     self._edges = OrderedSet()
     self._allowed_edge_types = allowed_edge_types
     self._pre_vertex = None
     self._traffic_type = None
     self._traffic_weight = traffic_weight
class CoreSubset(object):
    """ Represents a subset of the cores on a SpiNNaker chip.
    """

    __slots__ = (
        "_x", "_y", "_processor_ids"
    )

    def __init__(self, x, y, processor_ids):
        """
        :param x: The x-coordinate of the chip
        :type x: int
        :param y: The y-coordinate of the chip
        :type y: int
        :param processor_ids: The processor IDs on the chip
        :type processor_ids: iterable(int)
        """
        self._x = x
        self._y = y
        self._processor_ids = OrderedSet()
        for processor_id in processor_ids:
            self.add_processor(processor_id)

    def add_processor(self, processor_id):
        """ Adds a processor ID to this subset

        :param processor_id: A processor ID
        :type processor_id: int
        :return: Nothing is returned
        :rtype: None
        """
        self._processor_ids.add(processor_id)

    def __contains__(self, processor_id):
        return processor_id in self._processor_ids

    @property
    def x(self):
        """ The x-coordinate of the chip

        :return: The x-coordinate
        :rtype: int
        """
        return self._x

    @property
    def y(self):
        """ The y-coordinate of the chip

        :return: The y-coordinate
        :rtype: int
        """
        return self._y

    @property
    def processor_ids(self):
        """ The subset of processor IDs on the chip

        :return: An iterable of processor IDs
        :rtype: iterable(int)
        """
        return iter(self._processor_ids)

    def __repr__(self):
        return "{}:{}:{}".format(self._x, self._y, self._processor_ids)

    def __eq__(self, other):
        if not isinstance(other, CoreSubset):
            return False
        return self.x == other.x and self._y == other.y and \
            self._processor_ids == other.processor_ids

    def __ne__(self, other):
        return not self.__eq__(other)

    def __hash__(self):
        processors = frozenset(self._processor_ids)
        return (self._x, self._y, processors).__hash__()

    def __len__(self):
        """ The number of processors in this core subset
        """
        return len(self._processor_ids)

    def intersect(self, other):
        """ Returns a new CoreSubset which is an intersect of this and the\
            other.

        :param other: A second CoreSubset with possibly overlapping cores
        :type other: :py:class:`spinn_machine.CoreSubset`
        :return: A new CoreSubset with any overlap
        :rtype: :py:class:`spinn_machine.CoreSubset`
        """
        result = CoreSubset(self._x, self._y, [])
        for processor_id in self._processor_ids:
            if processor_id in other._processor_ids:
                result.add_processor(processor_id)
        return result
def get_same_size_vertex_groups(vertices):
    """ Get a dictionary of vertex to vertex that must be partitioned the same\
        size
    """

    # Dict of vertex to list of vertices with same size
    # (repeated lists expected)
    same_size_vertices = OrderedDict()

    for vertex in vertices:

        # Find all vertices that have a same size constraint associated with
        #  this vertex
        same_size_as_vertices = list()
        for constraint in vertex.constraints:
            if isinstance(constraint, SameAtomsAsVertexConstraint):
                if vertex.n_atoms != constraint.vertex.n_atoms:
                    raise PacmanPartitionException(
                        "Vertices {} ({} atoms) and {} ({} atoms) must be of"
                        " the same size to partition them together".format(
                            vertex.label, vertex.n_atoms,
                            constraint.vertex.label,
                            constraint.vertex.n_atoms))
                same_size_as_vertices.append(constraint.vertex)

        if not same_size_as_vertices:
            same_size_vertices[vertex] = {vertex}
            continue

        # Go through all the vertices that want to have the same size
        # as the top level vertex
        for same_size_vertex in same_size_as_vertices:

            # Neither vertex has been seen
            if (same_size_vertex not in same_size_vertices and
                    vertex not in same_size_vertices):

                # add both to a new group
                group = OrderedSet([vertex, same_size_vertex])
                same_size_vertices[vertex] = group
                same_size_vertices[same_size_vertex] = group

            # Both vertices have been seen elsewhere
            elif (same_size_vertex in same_size_vertices and
                    vertex in same_size_vertices):

                # merge their groups
                group_1 = same_size_vertices[vertex]
                group_2 = same_size_vertices[same_size_vertex]
                group_1.update(group_2)
                for vert in group_1:
                    same_size_vertices[vert] = group_1

            # The current vertex has been seen elsewhere
            elif vertex in same_size_vertices:

                # add the new vertex to the existing group
                group = same_size_vertices[vertex]
                group.add(same_size_vertex)
                same_size_vertices[same_size_vertex] = group

            # The other vertex has been seen elsewhere
            elif same_size_vertex in same_size_vertices:

                #  so add this vertex to the existing group
                group = same_size_vertices[same_size_vertex]
                group.add(vertex)
                same_size_vertices[vertex] = group

    return same_size_vertices
Example #37
0
class Graph(ConstrainedObject, AbstractGraph):
    """ A graph implementation that specifies the allowed types of the\
        vertices and edges.
    """

    __slots__ = [
        # The classes of vertex that are allowed in this graph
        "_allowed_vertex_types",
        # The classes of edges that are allowed in this graph
        "_allowed_edge_types",
        # The classes of outgoing edge partition that are allowed in this
        # graph
        "_allowed_partition_types",
        # The vertices of the graph
        "_vertices",
        # The outgoing edge partitions of the graph by name
        "_outgoing_edge_partitions_by_name",
        # The outgoing edges by pre-vertex
        "_outgoing_edges",
        # The incoming edges by post-vertex
        "_incoming_edges",
        # map between incoming edges and their associated partitions
        "_incoming_edges_by_partition_name",
        # The outgoing edge partitions by pre-vertex
        "_outgoing_edge_partitions_by_pre_vertex",
        # the outgoing partitions by edge
        "_outgoing_edge_partition_by_edge",
        # The label of the graph
        "_label"]

    def __init__(self, allowed_vertex_types, allowed_edge_types,
                 allowed_partition_types, label):
        """
        :param allowed_vertex_types:\
            A single or tuple of types of vertex to be allowed in the graph
        :param allowed_edge_types:\
            A single or tuple of types of edges to be allowed in the graph
        :param allowed_partition_types:\
            A single or tuple of types of partitions to be allowed in the graph
        :param label: The label on the graph, or None
        """
        super(Graph, self).__init__(None)
        self._allowed_vertex_types = allowed_vertex_types
        self._allowed_edge_types = allowed_edge_types
        self._allowed_partition_types = allowed_partition_types

        self._vertices = OrderedSet()
        self._outgoing_edge_partitions_by_name = OrderedDict()
        self._outgoing_edges = DefaultOrderedDict(OrderedSet)
        self._incoming_edges = DefaultOrderedDict(OrderedSet)
        self._incoming_edges_by_partition_name = DefaultOrderedDict(list)
        self._outgoing_edge_partitions_by_pre_vertex = \
            DefaultOrderedDict(OrderedSet)
        self._outgoing_edge_partition_by_edge = OrderedDict()
        self._label = label

    @property
    @overrides(AbstractGraph.label)
    def label(self):
        return self._label

    @overrides(AbstractGraph.add_vertex)
    def add_vertex(self, vertex):
        if not isinstance(vertex, self._allowed_vertex_types):
            raise PacmanInvalidParameterException(
                "vertex", vertex.__class__,
                "Vertices of this graph must be one of the following types:"
                " {}".format(self._allowed_vertex_types))
        self._vertices.add(vertex)

    @overrides(AbstractGraph.add_edge)
    def add_edge(self, edge, outgoing_edge_partition_name):
        # verify that the edge is one suitable for this graph
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", edge.__class__,
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))

        if edge.pre_vertex not in self._vertices:
            raise PacmanInvalidParameterException(
                "edge", edge.pre_vertex, "pre-vertex must be known in graph")
        if edge.post_vertex not in self._vertices:
            raise PacmanInvalidParameterException(
                "edge", edge.post_vertex, "post-vertex must be known in graph")

        # Add the edge to the partition
        partition = None
        if ((edge.pre_vertex, outgoing_edge_partition_name) not in
                self._outgoing_edge_partitions_by_name):
            partition = OutgoingEdgePartition(
                outgoing_edge_partition_name, self._allowed_edge_types)
            self._outgoing_edge_partitions_by_pre_vertex[
                edge.pre_vertex].add(partition)
            self._outgoing_edge_partitions_by_name[
                edge.pre_vertex, outgoing_edge_partition_name] = partition
        else:
            partition = self._outgoing_edge_partitions_by_name[
                edge.pre_vertex, outgoing_edge_partition_name]
        partition.add_edge(edge)

        # Add the edge to the indices
        self._outgoing_edges[edge.pre_vertex].add(edge)
        self._incoming_edges_by_partition_name[
            (edge.post_vertex, outgoing_edge_partition_name)].append(edge)
        self._incoming_edges[edge.post_vertex].add(edge)
        self._outgoing_edge_partition_by_edge[edge] = partition

    @overrides(AbstractGraph.add_outgoing_edge_partition)
    def add_outgoing_edge_partition(self, outgoing_edge_partition):

        # verify that this partition is suitable for this graph
        if not isinstance(
                outgoing_edge_partition, self._allowed_partition_types):
            raise PacmanInvalidParameterException(
                "outgoing_edge_partition", outgoing_edge_partition.__class__,
                "Partitions of this graph must be one of the following types:"
                " {}".format(self._allowed_partition_types))

        # check this partition doesn't already exist
        if ((outgoing_edge_partition.pre_vertex,
                outgoing_edge_partition.identifier) in
                self._outgoing_edge_partitions_by_name):
            raise PacmanAlreadyExistsException(
                "{}".format(OutgoingEdgePartition.__class__),
                (outgoing_edge_partition.pre_vertex,
                 outgoing_edge_partition.identifier))

        self._outgoing_edge_partitions_by_pre_vertex[
            outgoing_edge_partition.pre_vertex].add(outgoing_edge_partition)
        self._outgoing_edge_partitions_by_name[
            outgoing_edge_partition.pre_vertex,
            outgoing_edge_partition.identifier] = outgoing_edge_partition

    @property
    @overrides(AbstractGraph.vertices)
    def vertices(self):
        return self._vertices

    @property
    @overrides(AbstractGraph.n_vertices)
    def n_vertices(self):
        return len(self._vertices)

    @property
    @overrides(AbstractGraph.edges)
    def edges(self):
        return [
            edge
            for partition in self._outgoing_edge_partitions_by_name.values()
            for edge in partition.edges]

    @property
    @overrides(AbstractGraph.outgoing_edge_partitions)
    def outgoing_edge_partitions(self):
        return self._outgoing_edge_partitions_by_name.values()

    @property
    @overrides(AbstractGraph.n_outgoing_edge_partitions)
    def n_outgoing_edge_partitions(self):
        return len(self._outgoing_edge_partitions_by_name)

    @overrides(AbstractGraph.get_outgoing_partition_for_edge)
    def get_outgoing_partition_for_edge(self, edge):
        return self._outgoing_edge_partition_by_edge[edge]

    @overrides(AbstractGraph.get_edges_starting_at_vertex)
    def get_edges_starting_at_vertex(self, vertex):
        return self._outgoing_edges[vertex]

    @overrides(AbstractGraph.get_edges_ending_at_vertex)
    def get_edges_ending_at_vertex(self, vertex):
        if vertex not in self._incoming_edges:
            return []
        return self._incoming_edges[vertex]

    @overrides(AbstractGraph.get_edges_ending_at_vertex_with_partition_name)
    def get_edges_ending_at_vertex_with_partition_name(
            self, vertex, partition_name):
        key = (vertex, partition_name)
        if key not in self._incoming_edges_by_partition_name:
            return []
        return self._incoming_edges_by_partition_name[key]

    @overrides(AbstractGraph.get_outgoing_edge_partitions_starting_at_vertex)
    def get_outgoing_edge_partitions_starting_at_vertex(self, vertex):
        return self._outgoing_edge_partitions_by_pre_vertex[vertex]

    @overrides(AbstractGraph.get_outgoing_edge_partition_starting_at_vertex)
    def get_outgoing_edge_partition_starting_at_vertex(
            self, vertex, outgoing_edge_partition_name):
        return self._outgoing_edge_partitions_by_name.get(
            (vertex, outgoing_edge_partition_name), None)
class OutgoingEdgePartition(ConstrainedObject, AbstractOutgoingEdgePartition):
    """ A collection of edges which start at a single vertex which have the
        same semantics and so can share a single key.
    """

    __slots__ = [
        # The partition identifier
        "_identifier",
        # The edges in the partition
        "_edges",
        # The vertex at the start of all the edges
        "_pre_vertex",
        # The traffic type of all the edges
        "_traffic_type",
        # The type of edges to accept
        "_allowed_edge_types",
        # The weight of traffic going down this partition
        "_traffic_weight",
        # The label of the graph
        "_label"
    ]

    def __init__(
            self, identifier, allowed_edge_types, constraints=None,
            label=None, traffic_weight=1):
        """
        :param identifier: The identifier of the partition
        :param allowed_edge_types: The types of edges allowed
        :param constraints: Any initial constraints
        :param label: An optional label of the partition
        :param traffic_weight: The weight of traffic going down this partition
        """
        super(OutgoingEdgePartition, self).__init__(constraints)
        self._label = label
        self._identifier = identifier
        self._edges = OrderedSet()
        self._allowed_edge_types = allowed_edge_types
        self._pre_vertex = None
        self._traffic_type = None
        self._traffic_weight = traffic_weight

    @property
    @overrides(AbstractOutgoingEdgePartition.label)
    def label(self):
        return self._label

    @overrides(AbstractOutgoingEdgePartition.add_edge)
    def add_edge(self, edge):
        # Check for an incompatible edge
        if not isinstance(edge, self._allowed_edge_types):
            raise PacmanInvalidParameterException(
                "edge", edge.__class__,
                "Edges of this graph must be one of the following types:"
                " {}".format(self._allowed_edge_types))

        # Check for an incompatible pre vertex
        if self._pre_vertex is None:
            self._pre_vertex = edge.pre_vertex

        elif edge.pre_vertex != self._pre_vertex:
            raise PacmanConfigurationException(
                "A partition can only contain edges with the same"
                "pre_vertex")

        # Check for an incompatible traffic type
        if self._traffic_type is None:
            self._traffic_type = edge.traffic_type
        elif edge.traffic_type != self._traffic_type:
            raise PacmanConfigurationException(
                "A partition can only contain edges with the same"
                " traffic_type")

        self._edges.add(edge)

    @property
    @overrides(AbstractOutgoingEdgePartition.identifier)
    def identifier(self):
        return self._identifier

    @property
    @overrides(AbstractOutgoingEdgePartition.edges)
    def edges(self):
        return self._edges

    @property
    @overrides(AbstractOutgoingEdgePartition.n_edges)
    def n_edges(self):
        return len(self._edges)

    @property
    @overrides(AbstractOutgoingEdgePartition.pre_vertex)
    def pre_vertex(self):
        return self._pre_vertex

    @property
    @overrides(AbstractOutgoingEdgePartition.traffic_type)
    def traffic_type(self):
        return self._traffic_type

    @property
    @overrides(AbstractOutgoingEdgePartition.traffic_weight)
    def traffic_weight(self):
        return self._traffic_weight

    def __repr__(self):
        edges = ""
        for edge in self._edges:
            if edge.label is not None:
                edges += edge.label + ","
            else:
                edges += str(edge) + ","
        return _REPR_TEMPLATE.format(
            self._identifier, edges, self.constraints, self.label)

    def __str__(self):
        return self.__repr__()

    @overrides(AbstractOutgoingEdgePartition.__contains__)
    def __contains__(self, edge):
        """ Check if the edge is contained within this partition

        :param edge: the edge to search for.
        :return: boolean of true of false otherwise
        """
        return edge in self._edges
def validate_routes(machine_graph, placements, routing_infos,
                    routing_tables, machine, graph_mapper=None):
    """ Go though the placements given and check that the routing entries\
        within the routing tables support reach the correction destinations\
        as well as not producing any cycles.

    :param machine_graph: the graph
    :param placements: the placements container
    :param routing_infos: the routing info container
    :param routing_tables: \
        the routing tables generated by the routing algorithm
    :param graph_mapper: \
        the mapping between graphs or none if only using a machine graph
    :param machine: the python machine object
    :type machine: spinn_machine.Machine object
    :rtype: None
    :raises PacmanRoutingException: when either no routing table entry is\
        found by the search on a given router, or a cycle is detected
    """
    traffic_multicast = (
        lambda edge: edge.traffic_type == EdgeTrafficType.MULTICAST)
    progress = ProgressBar(
        placements.placements,
        "Verifying the routes from each core travel to the correct locations")
    for placement in progress.over(placements.placements):

        # locate all placements to which this placement/vertex will
        # communicate with for a given key_and_mask and search its
        # determined destinations

        # gather keys and masks per partition
        partitions = machine_graph.\
            get_outgoing_edge_partitions_starting_at_vertex(placement.vertex)

        if graph_mapper is not None:
            n_atoms = graph_mapper.get_slice(placement.vertex).n_atoms
        else:
            n_atoms = 0

        for partition in partitions:
            r_info = routing_infos.get_routing_info_from_partition(
                partition)
            is_continuous = _check_if_partition_has_continuous_keys(partition)
            if not is_continuous:
                logger.warning(
                    "Due to the none continuous nature of the keys in this "
                    "partition {}, we cannot check all atoms will be routed "
                    "correctly, but will check the base key instead",
                    partition)

            destination_placements = OrderedSet()

            # filter for just multicast edges, we don't check other types of
            # edges here.
            out_going_edges = filter(traffic_multicast, partition.edges)

            # for every outgoing edge, locate its destination and store it.
            for outgoing_edge in out_going_edges:
                dest_placement = placements.get_placement_of_vertex(
                    outgoing_edge.post_vertex)
                destination_placements.append(
                    PlacementTuple(x=dest_placement.x,
                                   y=dest_placement.y,
                                   p=dest_placement.p))

            # search for these destinations
            for key_and_mask in r_info.keys_and_masks:
                _search_route(
                    placement, destination_placements, key_and_mask,
                    routing_tables, machine, n_atoms, is_continuous)