Exemplo n.º 1
0
 def _merge_constraints(subvertex_list):
     merged_placement = None
     ip_tag = list()
     reverse_ip_tags = list()
     for subvertex in subvertex_list:
         place_constraints = utility_calls.locate_constraints_of_type(
             subvertex.constraints, PlacerChipAndCoreConstraint)
         ip_tag_constraints = utility_calls.locate_constraints_of_type(
             subvertex.constraints, TagAllocatorRequireIptagConstraint)
         ip_tag.extend(ip_tag_constraints)
         reverse_ip_tag = utility_calls.locate_constraints_of_type(
             subvertex.constraints,
             TagAllocatorRequireReverseIptagConstraint)
         reverse_ip_tags.extend(reverse_ip_tag)
         if len(place_constraints) != 0:
             for place_constraint in place_constraints:
                 if merged_placement is None:
                     merged_placement = place_constraint
                 else:
                     x_level = merged_placement.x == \
                         place_constraint.x
                     y_level = merged_placement.y == \
                         place_constraint.y
                     p_level = merged_placement.p != \
                         place_constraint.p
                     if not x_level or not y_level or not p_level:
                         raise exceptions.PacmanConfigurationException(
                             "can't handle conflicting constraints")
     return merged_placement, ip_tag, reverse_ip_tags
    def _locate_connected_chip_data(vertex, machine):
        """ Finds the connected virtual chip

        :param vertex:
        :param machine:
        :return:
        """
        # locate the chip from the placement constraint
        placement_constraint = utility_calls.locate_constraints_of_type(
            vertex.constraints, PlacerChipAndCoreConstraint)
        router = machine.get_chip_at(placement_constraint.x,
                                     placement_constraint.y).router
        found_link = False
        link_id = 0
        while not found_link or link_id < 5:
            if router.is_link(link_id):
                found_link = True
            else:
                link_id += 1
        if not found_link:
            raise exceptions.PacmanConfigurationException(
                "Can't find the real chip this virtual chip is connected to."
                "Please fix and try again.")
        else:
            return ("[{}, {}]".format(
                router.get_link(link_id).destination_x,
                router.get_link(link_id).destination_y),
                    router.get_link(link_id).multicast_default_from)
Exemplo n.º 3
0
    def __init__(self, keys_and_masks, key_list_function=None):
        """

        :param keys_and_masks: The key and mask combinations to fix
        :type keys_and_masks: iterable of\
                    :py:class:`pacman.model.routing_info.key_and_mask.BaseKeyAndMask`
        :param key_list_function: Optional function which will be called to\
                    translate the keys_and_masks list into individual keys.\
                    If missing, the keys will be generated by iterating\
                    through the keys_and_masks list directly.  The function\
                    parameters are:
                    * An iterable of keys and masks
                    * A partitioned edge
                    * Number of keys to generate (may be None)
        :type key_list_function: (iterable of\
                    :py:class:`pacman.model.routing_info.key_and_mask.BaseKeyAndMask`,\
                    :py:class:`pacman.model.partitioned_graph.partitioned_edge.PartitionedEdge`,
                    int)\
                    -> iterable of int
        """
        AbstractKeyAllocatorConstraint.__init__(
            self, "key allocator constraint to fix the keys and masks to"
            " {}".format(keys_and_masks))

        for keys_and_mask in keys_and_masks:
            if not isinstance(keys_and_mask, BaseKeyAndMask):
                raise exceptions.PacmanConfigurationException(
                    "the keys and masks object contains a object that is not"
                    "a key_and_mask object")

        self._keys_and_masks = keys_and_masks
        self._key_list_function = key_list_function
Exemplo n.º 4
0
 def add_edge(self, edge):
     """ Add an edge into this outgoing edge partition
     :param edge: the instance of abstract edge to add to the list
     :return:
     """
     self._edges.append(edge)
     if self._type is None:
         self._type = self._deduce_type(edge)
     elif self._type != self._deduce_type(edge):
         raise exceptions.PacmanConfigurationException(
             "The edge {} was trying to be added to a partition {} which "
             "contains edges of type {}, yet the edge was of type {}. This"
             " is deemed an error. Please rectify this and try again.")
    def _create_python_object(algorithm):
        """ Create a python object for an algorithm from a specification

        :param algorithm: the algorithm specification
        :return: an instantiated object for the algorithm
        """
        if (algorithm.python_class is not None
                and algorithm.python_function is None):

            # if class, instantiate it
            python_algorithm = getattr(
                importlib.import_module(algorithm.python_module_import),
                algorithm.python_class)
            try:
                python_algorithm = python_algorithm()
            except TypeError as type_error:
                raise exceptions.PacmanConfigurationException(
                    "Failed to create instance of algorithm {}: {}".format(
                        algorithm.algorithm_id, type_error.message))
            except AttributeError as attribute_error:
                raise exceptions.PacmanConfigurationException(
                    "Failed to create instance of algorithm {}: {}".format(
                        algorithm.algorithm_id, attribute_error.message))

        elif (algorithm.python_function is not None
              and algorithm.python_class is None):

            # just a function, so no instantiation required
            python_algorithm = getattr(
                importlib.import_module(algorithm.python_module_import),
                algorithm.python_function)

        else:

            # neither, but is a python object.... error
            raise exceptions.PacmanConfigurationException(
                "Internal algorithm {} must be either a function or a class"
                "but not both".format(algorithm.algorithm_id))
        return python_algorithm
    def _get_algorithm_data(self, algorithm_names, algorithm_data_objects,
                            converter_algorithm_data_objects):
        algorithms = list()
        for algorithm_name in algorithm_names:
            if algorithm_name in algorithm_data_objects:
                algorithms.append(algorithm_data_objects[algorithm_name])

            elif algorithm_name in converter_algorithm_data_objects:
                algorithms.append(
                    converter_algorithm_data_objects[algorithm_name])
            else:
                raise exceptions.PacmanConfigurationException(
                    "Cannot find algorithm {}".format(algorithm_name))
        return algorithms
    def _generate_algorithm_data(self, element):
        """ Translates XML elements into tuples for the AlgorithmData object

        :param element: the xml element to translate
        :return: a AlgorithmData
        """
        external = False

        # determine if its a internal or external using if it is import-able or
        # command line based
        command_line_args = element.find("command_line_args")
        if command_line_args is not None:
            command_line_args = self._translate_args(command_line_args)
        python_module = element.find("python_module")
        if python_module is not None:
            python_module = python_module.text
        python_class = element.find("python_class")
        if python_class is not None:
            python_class = python_class.text
        python_function = element.find("python_function")
        if python_function is not None:
            python_function = python_function.text

        if python_module is None and command_line_args is not None:
            external = True
        elif python_module is not None and command_line_args is None:
            external = False
        elif ((python_module is None and command_line_args is None) or
                (python_module is not None and command_line_args is not None)):
            raise exceptions.PacmanConfigurationException(
                "Cannot deduce what to do when either both command line and "
                "python module are none or are filled in. Please rectify and "
                "try again")

        # get other params
        required_inputs = self._translate_parameters(
            element.find("required_inputs"))
        required_optional_inputs = self._translate_parameters(
            element.find("required_optional_inputs"))
        optional_inputs = self._translate_parameters(
            element.find("optional_inputs"))
        outputs = self._translate_parameters(
            element.find("produces_outputs"))
        return AlgorithmData(
            algorithm_id=element.get('name'),
            command_line_args=command_line_args, inputs=required_inputs,
            optional_inputs=optional_inputs,
            required_optional_inputs=required_optional_inputs, outputs=outputs,
            external=external, python_import=python_module,
            python_class=python_class, python_function=python_function)
Exemplo n.º 8
0
def handle_flexi_field(constraint, seen_fields, known_fields):
    """

    :param constraint:
    :param seen_fields:
    :param known_fields:
    :rtype: None:
    """
    # set the level of search
    current_level = seen_fields

    for constraint_field in constraint.fields:
        found_field = None

        # try to locate field in level
        for seen_field in current_level:
            if constraint_field.name == seen_field:
                found_field = seen_field

        # seen the field before but not at this level. error
        if found_field is None and constraint_field in known_fields:
            raise exceptions.PacmanConfigurationException(
                "Can't find the field {} in the expected position".format(
                    constraint_field))

        # if not seen the field before
        if found_field is None and constraint_field.name not in known_fields:
            next_level = dict()
            instance_level = dict()
            current_level[constraint_field.name] = instance_level
            instance_level[constraint_field] = next_level
            known_fields.append(constraint_field.name)
            current_level = next_level

        # if found a field, check if its instance has indeed been put in
        # before
        if found_field is not None:
            instances = current_level[constraint_field.name]
            if constraint_field in instances:
                current_level = instances[constraint_field]
            elif constraint_field.value not in instances:
                next_level = dict()
                instance_level = dict()
                instances[constraint_field] = instance_level
                instances[constraint_field] = next_level
                current_level = next_level
    def __init__(self, base_key, mask):
        """
        :param base_key: The routing key
        :type base_key: int
        :param mask: The routing mask
        :type mask: int
        :raise PacmanConfigurationException: If key & mask != key i.e. the key\
                    is not valid for the given mask
        """
        self._base_key = base_key
        self._mask = mask

        if base_key & mask != base_key:
            raise exceptions.PacmanConfigurationException(
                "This routing info is invalid as the mask and key together "
                "alters the key. This is deemed to be a error from "
                "spynnaker's point of view and therefore please rectify and"
                "try again")
Exemplo n.º 10
0
    def _deduce_type(edge):
        """ Deduce the enum from the edge type

        :param edge: the edge to deduce the type of
        :return: a enum type of edge_types
        """
        if isinstance(edge, MultiCastPartitionedEdge):
            return EDGE_TYPES.MULTI_CAST
        elif isinstance(edge, FixedRoutePartitionedEdge):
            return EDGE_TYPES.FIXED_ROUTE
        elif isinstance(edge, MultiCastPartitionableEdge):
            return EDGE_TYPES.MULTI_CAST
        elif isinstance(edge, FixedRoutePartitionableEdge):
            return EDGE_TYPES.FIXED_ROUTE
        else:
            raise exceptions.PacmanConfigurationException(
                "I don't recognise this type of edge, please rectify this and "
                "try again.")
    def extend(self, other):
        if not isinstance(other, PreAllocatedResourceContainer):
            raise exceptions.PacmanConfigurationException(
                "Only another preallocated resource container can extend a "
                "preallocated resource container")

        # add specific SDRAM usage
        self._specific_sdram_usage.extend(other.specific_sdram_usage)

        # add specific cores
        self._specific_core_resources.extend(other.specific_core_resources)

        # add non-specific cores
        self._core_resources.extend(other.core_resources)

        # add IP tag resources
        self._specific_iptag_resources.extend(other.specific_iptag_resources)

        # add reverse IP tag resources
        self._specific_reverse_iptag_resources.extend(
            other.specific_reverse_iptag_resources)
    def decode_algorithm_data_objects(self):
        """

        :return: the algorithm data objects which represent all the\
                    algorithm's inputs and outputs
        """
        # parse xmls
        algorithm_data_objects = dict()
        files_read_so_far = list()
        for xml_path in self._xml_paths:
            xml_root = etree.parse(xml_path)
            files_read_so_far.append(xml_path)
            elements = xml_root.findall(".//algorithm")
            for element in elements:
                if element.get('name') in algorithm_data_objects:
                    raise exceptions.PacmanConfigurationException(
                        "There are two algorithms with the same name {}"
                        " in these xml files {}. Please rectify and try again."
                        .format(element.get("name"), files_read_so_far))
                else:
                    algorithm_data_objects[element.get('name')] = \
                        self._generate_algorithm_data(element)
        return algorithm_data_objects
 def _handle_vertex_constraint(constraint, json_constraints_dictory_rep,
                               vertex):
     if not isinstance(vertex, AbstractVirtualVertex):
         if (isinstance(constraint, AbstractPlacerConstraint)
                 and not isinstance(constraint,
                                    AbstractTagAllocatorConstraint)):
             chip_loc_constraint = dict()
             chip_loc_constraint['type'] = "location"
             chip_loc_constraint['vertex'] = str(id(vertex))
             chip_loc_constraint['location'] = [constraint.x, constraint.y]
             json_constraints_dictory_rep.append(chip_loc_constraint)
         if (isinstance(constraint, PlacerChipAndCoreConstraint)
                 and constraint.p is not None):
             chip_loc_constraint = dict()
             chip_loc_constraint['type'] = "resource"
             chip_loc_constraint['vertex'] = str(id(vertex))
             chip_loc_constraint['resource'] = "cores"
             chip_loc_constraint['range'] = \
                 "[{}, {}]".format(constraint.p, constraint.p + 1)
             json_constraints_dictory_rep.append(chip_loc_constraint)
     if isinstance(constraint, AbstractTagAllocatorConstraint):
         tag_constraint = dict()
         tag_constraint['type'] = "resource"
         tag_constraint['vertex'] = str(id(vertex))
         if isinstance(constraint, TagAllocatorRequireIptagConstraint):
             tag_constraint['resource'] = "iptag"
             tag_constraint['range'] = [0, 1]
         elif isinstance(constraint,
                         TagAllocatorRequireReverseIptagConstraint):
             tag_constraint['resource'] = "reverse_iptag"
             tag_constraint['range'] = [0, 1]
         else:
             raise exceptions.PacmanConfigurationException(
                 "Converter does not recognise this tag constraint."
                 "Please update this algorithm and try again.")
         json_constraints_dictory_rep.append(tag_constraint)
    def __call__(self, placements, allocations, partitioned_graph,
                 extended_machine, constraints):
        """

        :param placements:
        :param allocations:
        :param partitioned_graph:
        :param extended_machine:
        :param constraints:
        :return:
        """

        # load the json files
        file_placements, core_allocations, constraints = \
            self._load_json_files(placements, allocations, constraints)

        # validate the json files against the schemas
        self._validate_file_read_data(file_placements, core_allocations,
                                      constraints)

        # drop the type and allocations bit of core allocations
        # (makes lower code simpler)
        core_allocations = core_allocations['allocations']

        memory_placements = Placements()

        # process placements
        for vertex_id in file_placements:
            subvertex = partitioned_graph.get_subvertex_by_id(vertex_id)
            if vertex_id not in core_allocations:
                if subvertex is not None:

                    # virtual chip or tag chip
                    constraints_for_vertex = self._locate_constraints(
                        vertex_id, constraints)
                    external_device_constraints = \
                        self._valid_constraints_for_external_device(
                            constraints_for_vertex)
                    if len(external_device_constraints) != 0:

                        # get data for virtual chip
                        route_constraint = \
                            external_device_constraints['end_point']
                        route_direction = constants.EDGES(
                            route_constraint['direction'].upper())
                        placement_constraint = \
                            external_device_constraints['placement']
                        coords = placement_constraint['location']

                        # locate virtual chip
                        link = extended_machine.get_chip_at(
                            coords[0],
                            coords[1]).router.get_link(route_direction.value)
                        destination_chip = extended_machine.get_chip_at(
                            link.destination_x, link.destination_y)

                        # create placement
                        placements.add_placement(
                            Placement(subvertex, destination_chip.x,
                                      destination_chip.y, None))
                    else:
                        raise exceptions.PacmanConfigurationException(
                            "I don't recognise this pattern of constraints for"
                            " a vertex which does not have a placement")
            else:
                if subvertex is None:
                    raise exceptions.PacmanConfigurationException(
                        "Failed to locate the partitioned vertex in the "
                        "partitioned graph with label {}".format(vertex_id))
                else:
                    memory_placements.add_placement(
                        Placement(x=file_placements[vertex_id][0],
                                  y=file_placements[vertex_id][1],
                                  p=core_allocations[vertex_id][0],
                                  subvertex=subvertex))

        # return the file format
        return {"placements": memory_placements}
    def _sort_out_order_of_algorithms(self, inputs, required_outputs,
                                      algorithm_data, optional_algorithms):
        """ Takes the algorithms and determines which order they need to be\
            executed to generate the correct data objects

        :param inputs: list of input types
        :type inputs: iterable of str
        :param required_outputs: the set of outputs that this workflow is\
                meant to generate
        :param optional_algorithms: the set of optional algorithms which\
                include the converters for the file formats which can be\
                inserted automatically if required
        :return: None
        """

        input_types = set(inputs.iterkeys())

        allocated_algorithms = list()
        generated_outputs = set()
        generated_outputs.union(input_types)
        allocated_a_algorithm = True
        algorithms_to_find = list(algorithm_data)
        outputs_to_find = self._remove_outputs_which_are_inputs(
            required_outputs, inputs)

        while ((len(algorithms_to_find) > 0 or len(outputs_to_find) > 0)
               and allocated_a_algorithm):
            allocated_a_algorithm = False

            # check each algorithm to see if its usable with current inputs
            # and without its optional required inputs
            suitable_algorithm = self._locate_suitable_algorithm(
                algorithms_to_find, input_types, generated_outputs, False,
                True)

            # add the suitable algorithms to the list and take there outputs
            #  as new inputs
            if suitable_algorithm is not None:
                allocated_algorithms.append(suitable_algorithm)
                allocated_a_algorithm = True
                self._remove_algorithm_and_update_outputs(
                    algorithms_to_find, suitable_algorithm, input_types,
                    generated_outputs, outputs_to_find)
            else:
                suitable_algorithm = self._locate_suitable_algorithm(
                    optional_algorithms, input_types, generated_outputs, True,
                    True)
                if suitable_algorithm is not None:
                    allocated_algorithms.append(suitable_algorithm)
                    allocated_a_algorithm = True
                    self._remove_algorithm_and_update_outputs(
                        optional_algorithms, suitable_algorithm, input_types,
                        generated_outputs, outputs_to_find)
                else:
                    algorithms_left_names = list()
                    for algorithm in algorithms_to_find:
                        algorithms_left_names.append(algorithm.algorithm_id)
                    for algorithm in optional_algorithms:
                        algorithms_left_names.append(algorithm.algorithm_id)
                    algorithms_used = list()
                    for algorithm in allocated_algorithms:
                        algorithms_used.append(algorithm.algorithm_id)
                    algorithm_input_requirement_breakdown = ""
                    for algorithm in algorithms_to_find:
                        if algorithm.algorithm_id in algorithms_left_names:
                            algorithm_input_requirement_breakdown += \
                                self._deduce_inputs_required_to_run(
                                    algorithm, input_types)
                    for algorithm in optional_algorithms:
                        if algorithm.algorithm_id in algorithms_left_names:
                            algorithm_input_requirement_breakdown += \
                                self._deduce_inputs_required_to_run(
                                    algorithm, input_types)

                    raise exceptions.PacmanConfigurationException(
                        "Unable to deduce a future algorithm to use.\n"
                        "    Inputs: {}\n"
                        "    Outputs: {}\n"
                        "    Functions available: {}\n"
                        "    Functions used: {}\n"
                        "    Inputs required per function: \n{}\n".format(
                            input_types, outputs_to_find,
                            algorithms_left_names, algorithms_used,
                            algorithm_input_requirement_breakdown))

        all_required_outputs_generated = True
        failed_to_generate_output_string = ""
        for output in outputs_to_find:
            if output not in generated_outputs:
                all_required_outputs_generated = False
                failed_to_generate_output_string += ":{}".format(output)

        if not all_required_outputs_generated:
            raise exceptions.PacmanConfigurationException(
                "Unable to generate outputs {}".format(
                    failed_to_generate_output_string))

        self._algorithms = allocated_algorithms
Exemplo n.º 16
0
    def __call__(self, subgraph, placements, n_keys_map, routing_paths):
        """
        Allocates routing information to the partitioned edges in a\
        partitioned graph

        :param subgraph: The partitioned graph to allocate the routing info for
        :type subgraph:\
                    :py:class:`pacman.model.partitioned_graph.partitioned_graph.PartitionedGraph`
        :param placements: The placements of the subvertices
        :type placements:\
                    :py:class:`pacman.model.placements.placements.Placements`
        :param n_keys_map: A map between the partitioned edges and the number\
                    of keys required by the edges
        :type n_keys_map:\
                    :py:class:`pacman.model.routing_info.abstract_partitioned_edge_n_keys_map.AbstractPartitionedEdgeNKeysMap`
        :param routing_paths: the paths each partitioned edge takes to get\
                from source to destination.
        :type routing_paths:
            :py:class:`pacman.model.routing_paths.multicast_routing_paths.MulticastRoutingPaths
        :return: The routing information
        :rtype: :py:class:`pacman.model.routing_info.routing_info.RoutingInfo`,
                :py:class:`pacman.model.routing_tables.multicast_routing_table.MulticastRoutingTable
        :raise pacman.exceptions.PacmanRouteInfoAllocationException: If\
                   something goes wrong with the allocation
        """

        # check that this algorithm supports the constraints put onto the
        # partitioned_edges
        supported_constraints = []
        utility_calls.check_algorithm_can_support_constraints(
            constrained_vertices=subgraph.subedges,
            supported_constraints=supported_constraints,
            abstract_constraint_type=AbstractKeyAllocatorConstraint)

        # take each subedge and create keys from its placement
        progress_bar = ProgressBar(len(subgraph.subedges),
                                   "Allocating routing keys")
        routing_infos = RoutingInfo()
        routing_tables = MulticastRoutingTables()

        for subedge in subgraph.subedges:
            destination = subedge.post_subvertex
            placement = placements.get_placement_of_subvertex(destination)
            key = self._get_key_from_placement(placement)
            keys_and_masks = list(
                [BaseKeyAndMask(base_key=key, mask=self.MASK)])
            n_keys = n_keys_map.n_keys_for_partitioned_edge(subedge)
            if n_keys > self.MAX_KEYS_SUPPORTED:
                raise exceptions.PacmanConfigurationException(
                    "Only edges which require less than {} keys are supported".
                    format(self.MAX_KEYS_SUPPORTED))

            partition_info = PartitionRoutingInfo(keys_and_masks, subedge)
            routing_infos.add_partition_info(partition_info)

            progress_bar.update()
        progress_bar.end()

        return {
            'routing_infos': routing_infos,
            'routing_tables': routing_tables
        }
Exemplo n.º 17
0
    def __call__(self, subgraph, n_keys_map, graph_mapper=None):

        # check that this algorithm supports the constraints
        utility_calls.check_algorithm_can_support_constraints(
            constrained_vertices=subgraph.partitions,
            supported_constraints=[
                KeyAllocatorFixedMaskConstraint,
                KeyAllocatorFixedKeyAndMaskConstraint,
                KeyAllocatorContiguousRangeContraint
            ],
            abstract_constraint_type=AbstractKeyAllocatorConstraint)

        # verify that no edge has more than 1 of a constraint ,and that
        # constraints are compatible
        routing_info_allocator_utilities.\
            check_types_of_edge_constraint(subgraph)

        routing_infos = RoutingInfo()

        # Get the partitioned edges grouped by those that require the same key
        (fixed_key_groups, fixed_mask_groups, fixed_field_groups,
         flexi_field_groups, continuous_groups, none_continuous_groups) = \
            routing_info_allocator_utilities.get_edge_groups(subgraph)

        # Even non-continuous keys will be continuous
        for group in none_continuous_groups:
            continuous_groups.add(group)

        # Go through the groups and allocate keys
        progress_bar = ProgressBar(len(subgraph.partitions),
                                   "Allocating routing keys")

        # allocate the groups that have fixed keys
        for group in fixed_key_groups:  # fixed keys groups

            # Get any fixed keys and masks from the group and attempt to
            # allocate them
            fixed_mask = None
            fixed_key_and_mask_constraint = \
                utility_calls.locate_constraints_of_type(
                    group.constraints,
                    KeyAllocatorFixedKeyAndMaskConstraint)[0]

            # attempt to allocate them
            self._allocate_fixed_keys_and_masks(
                fixed_key_and_mask_constraint.keys_and_masks, fixed_mask)

            # update the pacman data objects
            self._update_routing_objects(
                fixed_key_and_mask_constraint.keys_and_masks, routing_infos,
                group)

            continuous_groups.remove(group)

            progress_bar.update()

        for group in fixed_mask_groups:  # fixed mask groups

            # get mask and fields if need be
            fixed_mask = utility_calls.locate_constraints_of_type(
                group.constraints, KeyAllocatorFixedMaskConstraint)[0].mask

            fields = None
            if group in fixed_field_groups:
                fields = utility_calls.locate_constraints_of_type(
                    group.constraints,
                    KeyAllocatorFixedFieldConstraint)[0].fields
                fixed_field_groups.remove(group)

            # try to allocate
            keys_and_masks = self._allocate_keys_and_masks(
                fixed_mask, fields, n_keys_map.n_keys_for_partition(group))

            # update the pacman data objects
            self._update_routing_objects(keys_and_masks, routing_infos, group)

            continuous_groups.remove(group)

            progress_bar.update()

        for group in fixed_field_groups:
            fields = utility_calls.locate_constraints_of_type(
                group.constraints, KeyAllocatorFixedFieldConstraint)[0].fields

            # try to allocate
            keys_and_masks = self._allocate_keys_and_masks(
                None, fields, n_keys_map.n_keys_for_partition(group))

            # update the pacman data objects
            self._update_routing_objects(keys_and_masks, routing_infos, group)

            continuous_groups.remove(group)

            progress_bar.update()

        if len(flexi_field_groups) != 0:
            raise exceptions.PacmanConfigurationException(
                "MallocBasedRoutingInfoAllocator does not support FlexiField")

        # If there is a graph, group by source vertex and sort by vertex slice
        # (lo_atom)
        if graph_mapper is not None:
            vertex_groups = defaultdict(list)
            for partition in continuous_groups:
                vertex = graph_mapper.get_vertex_from_subvertex(
                    partition.edges[0].pre_subvertex)
                vertex_groups[vertex].append(partition)
            vertex_partitions = list()
            for vertex_group in vertex_groups.itervalues():
                sorted_partitions = sorted(
                    vertex_group,
                    key=lambda part: graph_mapper.get_subvertex_slice(
                        part.edges[0].pre_subvertex))
                vertex_partitions.extend(sorted_partitions)
            continuous_groups = vertex_partitions

        for group in continuous_groups:
            keys_and_masks = self._allocate_keys_and_masks(
                None, None, n_keys_map.n_keys_for_partition(group))

            # update the pacman data objects
            self._update_routing_objects(keys_and_masks, routing_infos, group)

        progress_bar.end()
        return {'routing_infos': routing_infos}
    def __call__(self, subgraph, n_keys_map, routing_tables):

        # check that this algorithm supports the constraints
        utility_calls.check_algorithm_can_support_constraints(
            constrained_vertices=subgraph.partitions,
            supported_constraints=[
                KeyAllocatorFixedMaskConstraint,
                KeyAllocatorFixedKeyAndMaskConstraint,
                KeyAllocatorContiguousRangeContraint
            ],
            abstract_constraint_type=AbstractKeyAllocatorConstraint)

        # verify that no edge has more than 1 of a constraint ,and that
        # constraints are compatible
        routing_info_allocator_utilities.\
            check_types_of_edge_constraint(subgraph)

        routing_infos = RoutingInfo()

        # Get the partitioned edges grouped by those that require the same key
        (fixed_key_groups, fixed_mask_groups, fixed_field_groups,
         flexi_field_groups, continuous_groups, none_continuous_groups) = \
            routing_info_allocator_utilities.get_edge_groups(subgraph)

        # Even non-continuous keys will be continuous
        for group in none_continuous_groups:
            continuous_groups.add(group)

        # Go through the groups and allocate keys
        progress_bar = ProgressBar(len(subgraph.partitions),
                                   "Allocating routing keys")

        # allocate the groups that have fixed keys
        for group in fixed_key_groups:  # fixed keys groups

            # Get any fixed keys and masks from the group and attempt to
            # allocate them
            fixed_mask = None
            fixed_key_and_mask_constraint = \
                utility_calls.locate_constraints_of_type(
                    group.constraints,
                    KeyAllocatorFixedKeyAndMaskConstraint)[0]

            # attempt to allocate them
            self._allocate_fixed_keys_and_masks(
                fixed_key_and_mask_constraint.keys_and_masks, fixed_mask)

            # update the pacman data objects
            self._update_routing_objects(
                fixed_key_and_mask_constraint.keys_and_masks, routing_infos,
                group)

            continuous_groups.remove(group)

            progress_bar.update()

        for group in fixed_mask_groups:  # fixed mask groups

            # get mask and fields if need be
            fixed_mask = utility_calls.locate_constraints_of_type(
                group.constraints, KeyAllocatorFixedMaskConstraint)[0].mask

            fields = None
            if group in fixed_field_groups:
                fields = utility_calls.locate_constraints_of_type(
                    group.constraints,
                    KeyAllocatorFixedFieldConstraint)[0].fields
                fixed_field_groups.remove(group)

            # try to allocate
            keys_and_masks = self._allocate_keys_and_masks(
                fixed_mask, fields, n_keys_map.n_keys_for_partition(group))

            # update the pacman data objects
            self._update_routing_objects(keys_and_masks, routing_infos, group)

            continuous_groups.remove(group)

            progress_bar.update()

        for group in fixed_field_groups:
            fields = utility_calls.locate_constraints_of_type(
                group.constraints, KeyAllocatorFixedFieldConstraint)[0].fields

            # try to allocate
            keys_and_masks = self._allocate_keys_and_masks(
                None, fields, n_keys_map.n_keys_for_partition(group))

            # update the pacman data objects
            self._update_routing_objects(keys_and_masks, routing_infos, group)

            continuous_groups.remove(group)

            progress_bar.update()

        if len(flexi_field_groups) != 0:
            raise exceptions.PacmanConfigurationException(
                "MallocBasedRoutingInfoAllocator does not support FlexiField")

        # Sort the rest of the groups, using the routing tables for guidance
        # Group partitions by those which share routes in any table
        partition_groups = OrderedDict()
        routers = reversed(
            sorted(
                routing_tables.get_routers(),
                key=lambda item: len(
                    routing_tables.get_entries_for_router(item[0], item[1]))))
        for x, y in routers:

            # Find all partitions that share a route in this table
            partitions_by_route = defaultdict(OrderedSet)
            routing_table = routing_tables.get_entries_for_router(x, y)
            for partition, entry in routing_table.iteritems():
                if partition in continuous_groups:
                    entry_hash = sum([1 << i for i in entry.out_going_links])
                    entry_hash += sum(
                        [1 << (i + 6) for i in entry.out_going_processors])
                    partitions_by_route[entry_hash].add(partition)

            for entry_hash, partitions in partitions_by_route.iteritems():

                found_groups = list()
                for partition in partitions:
                    if partition in partition_groups:
                        found_groups.append(partition_groups[partition])

                if len(found_groups) == 0:

                    # If no group was found, create a new one
                    for partition in partitions:
                        partition_groups[partition] = partitions

                elif len(found_groups) == 1:

                    # If a single other group was found, merge it
                    for partition in partitions:
                        found_groups[0].add(partition)
                        partition_groups[partition] = found_groups[0]

                else:

                    # Merge the groups
                    new_group = partitions
                    for group in found_groups:
                        for partition in group:
                            new_group.add(partition)
                    for partition in new_group:
                        partition_groups[partition] = new_group

        # Sort partitions by largest group
        continuous_groups = OrderedSet(
            tuple(group) for group in partition_groups.itervalues())
        continuous_groups = reversed(
            sorted([group for group in continuous_groups],
                   key=lambda group: len(group)))

        for group in continuous_groups:
            for partition in group:
                keys_and_masks = self._allocate_keys_and_masks(
                    None, None, n_keys_map.n_keys_for_partition(partition))

                # update the pacman data objects
                self._update_routing_objects(keys_and_masks, routing_infos,
                                             partition)
                progress_bar.update()

        progress_bar.end()
        return {'routing_infos': routing_infos}
Exemplo n.º 19
0
def check_types_of_edge_constraint(sub_graph):
    """ Go through the subgraph for operations and checks that the constraints\
        are compatible.

    :param sub_graph: the subgraph to search through
    :return:
    """
    for partition in sub_graph.partitions:
        fixed_key = utility_calls.locate_constraints_of_type(
            partition.constraints, KeyAllocatorFixedKeyAndMaskConstraint)

        fixed_mask = utility_calls.locate_constraints_of_type(
            partition.constraints, KeyAllocatorFixedMaskConstraint)

        fixed_field = utility_calls.locate_constraints_of_type(
            partition.constraints, KeyAllocatorFixedFieldConstraint)

        flexi_field = utility_calls.locate_constraints_of_type(
            partition.constraints, KeyAllocatorFlexiFieldConstraint)

        if (len(fixed_key) > 1 or len(fixed_field) > 1 or len(fixed_mask) > 1
                or len(flexi_field) > 1):
            raise exceptions.PacmanConfigurationException(
                "There are more than one of the same constraint type on "
                "the partition {} for edges {}. Please fix and try again.".
                format(partition.identifer, partition.edges))

        fixed_key = len(fixed_key) == 1
        fixed_mask = len(fixed_mask) == 1
        fixed_field = len(fixed_field) == 1
        flexi_field = len(flexi_field) == 1

        # check for fixed key and a fixed mask. as these should have been
        # merged before now
        if fixed_key and fixed_mask:
            raise exceptions.PacmanConfigurationException(
                "The partition {} with edges {} has a fixed key and fixed "
                "mask constraint. These can be merged together, but is "
                "deemed an error here".format(partition.identifer,
                                              partition.edges))

        # check for a fixed key and fixed field, as these are incompatible
        if fixed_key and fixed_field:
            raise exceptions.PacmanConfigurationException(
                "The partition {} for edges {} has a fixed key and fixed "
                "field constraint. These may be merge-able together, but "
                "is deemed an error here".format(partition.identifer,
                                                 partition.edges))

        # check that a fixed mask and fixed field have compatible masks
        if fixed_mask and fixed_field:
            _check_masks_are_correct(partition)

        # check that if there's a flexible field, and something else, throw
        # error
        if flexi_field and (fixed_mask or fixed_key or fixed_field):
            raise exceptions.PacmanConfigurationException(
                "The partition {} for edges {} has a flexible field and "
                "another fixed constraint. These maybe be merge-able, but "
                "is deemed an error here".format(partition.identifer,
                                                 partition.edges))