def test_special_methods():
    o = DefaultOrderedDict(list)
    o["gamma"].append("bacon")
    # test _-reduce
    pickle.dumps(o)
    # test copy
    o2 = o.copy()
    assert o2["gamma"] == ["bacon"]
    o3 = copy.deepcopy(o)
    assert o3["gamma"] == ["bacon"]
    a = repr(o)
    b = repr(o3)
    assert a == b
Ejemplo n.º 2
0
 def __init__(self, label, application_graph=None):
     """
     :param label: The label for the graph.
     :type label: str or None
     :param application_graph:
         The application graph that this machine graph is derived from, if
         it is derived from one at all.
     :type application_graph: ApplicationGraph or None
     """
     super(MachineGraph, self).__init__(MachineVertex, MachineEdge, label)
     if application_graph:
         application_graph.forget_machine_graph()
         # Check the first vertex added
         self._application_level_used = True
     else:
         # Must be false as there is no App_graph
         self._application_level_used = False
     self._multicast_partitions = DefaultOrderedDict(
         lambda: DefaultOrderedDict(set))
     self._edge_partitions = OrderedSet()
     self._fixed_route_edge_partitions_by_pre_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._multicast_edge_partitions_by_pre_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._sdram_edge_partitions_by_pre_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._fixed_route_edge_partitions_by_post_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._multicast_edge_partitions_by_post_vertex = (
         DefaultOrderedDict(OrderedSet))
     self._sdram_edge_partitions_by_post_vertex = (
         DefaultOrderedDict(OrderedSet))
def test_keys_in_order():
    o = DefaultOrderedDict(lambda: bytes(b"abc"))
    a = o["a"]
    b = o["b"]
    c = o["c"]
    assert a == b == c
    assert tuple(o) == ("a", "b", "c")
def test_ordered_set_default():
    o = DefaultOrderedDict(OrderedSet)
    assert o is not None
    o["foo"].add(2)
    o["foo"].add(1)
    assert 2 in o["foo"]
    assert 1 not in o["bar"]
def test_list_default():
    o = DefaultOrderedDict(list)
    assert o is not None
    o["bar"] = 2
    assert isinstance(o["FOO"], list)
    o["gamma"].append("beacon")
    assert o["bar"] == 2
Ejemplo n.º 6
0
 def _create_routing_table(self, chip, partitions_in_table, routing_infos,
                           info_by_app_vertex):
     """
     :param ~spinn_machine.Chip chip:
     :param partitions_in_table:
     :type partitions_in_table:
         dict(AbstractSingleSourcePartition,
         MulticastRoutingTableByPartitionEntry)
     :param RoutingInfo routing_infos:
     :param dict(ApplicationVertex,BaseKeyAndMask) info_by_app_vertex:
     :rtype: MulticastRoutingTable
     """
     table = UnCompressedMulticastRoutingTable(chip.x, chip.y)
     partitions_by_app_vertex = DefaultOrderedDict(set)
     for partition in partitions_in_table:
         partitions_by_app_vertex[partition.pre_vertex.app_vertex].add(
             partition)
     for app_vertex in partitions_by_app_vertex:
         if app_vertex in info_by_app_vertex:
             shared_entry = self._find_shared_entry(
                 partitions_by_app_vertex[app_vertex], partitions_in_table)
         else:
             shared_entry = None
         if shared_entry is None:
             self._add_partition_based(partitions_by_app_vertex[app_vertex],
                                       routing_infos, partitions_in_table,
                                       table)
         else:
             self.__add_key_and_mask(info_by_app_vertex[app_vertex],
                                     shared_entry, table)
     return table
def test_standard_default():
    o = DefaultOrderedDict(None)
    assert o is not None
    o["bar"] = 2
    with pytest.raises(KeyError):  # @UndefinedVariable
        x = o["FOO"]
        assert x is None
    assert o["bar"] == 2
Ejemplo n.º 8
0
 def __init__(self, label):
     """
     :param label: The label on the graph, or None
     :type label: str or None
     """
     super(ApplicationGraph, self).__init__(ApplicationVertex,
                                            ApplicationEdge, label)
     self._outgoing_edge_partitions_by_pre_vertex = \
         DefaultOrderedDict(OrderedSet)
Ejemplo n.º 9
0
 def __init__(self, allowed_vertex_types, allowed_edge_types, label):
     """
     :param allowed_vertex_types:
         A single or tuple of types of vertex to be allowed in the graph
     :type allowed_vertex_types: type or tuple(type, ...)
     :param allowed_edge_types:
         A single or tuple of types of edges to be allowed in the graph
     :type allowed_edge_types: type or tuple(type, ...)
     :param label: The label on the graph, or None
     :type label: str or None
     """
     super(Graph, self).__init__(None)
     self._allowed_vertex_types = allowed_vertex_types
     self._allowed_edge_types = allowed_edge_types
     self._vertices = []
     self._vertex_by_label = dict()
     self._unlabelled_vertex_count = 0
     self._outgoing_edge_partitions_by_name = OrderedDict()
     self._outgoing_edges = DefaultOrderedDict(OrderedSet)
     self._incoming_edges = DefaultOrderedDict(OrderedSet)
     self._incoming_edges_by_partition_name = DefaultOrderedDict(list)
     self._outgoing_edge_partition_by_edge = OrderedDict()
     self._label = label
Ejemplo n.º 10
0
    def __init__(self, constraint_order):
        """
        :param list(ConstraintOrder) constraint_order:
            The order in which the constraints are to be sorted
        """
        # Group constraints based on the class
        self._constraints = DefaultOrderedDict(list)
        for c in constraint_order:
            self._constraints[c.constraint_class].append(
                (c.relative_order, c.required_optional_properties))

        # Sort each list of constraint by the number of optional properties,
        # largest first
        for constraints in itervalues(self._constraints):
            constraints.sort(key=len, reverse=True)
Ejemplo n.º 11
0
    def __init__(self, pre_vertices, identifier, allowed_edge_types,
                 constraints, label, traffic_weight, class_name):
        AbstractEdgePartition.__init__(self,
                                       identifier=identifier,
                                       allowed_edge_types=allowed_edge_types,
                                       constraints=constraints,
                                       label=label,
                                       traffic_weight=traffic_weight,
                                       class_name=class_name)
        self._pre_vertices = OrderedDict()
        self._destinations = DefaultOrderedDict(OrderedSet)

        # hard code dict of lists so that only these are acceptable.
        for pre_vertex in pre_vertices:
            self._pre_vertices[pre_vertex] = OrderedSet()

        # handle clones
        if len(self._pre_vertices.keys()) != len(pre_vertices):
            raise PacmanConfigurationException(
                "There were clones in your list of acceptable pre vertices")
Ejemplo n.º 12
0
    def _binary_search_check(self, mid_point, sorted_bit_fields, routing_table,
                             target_length, time_to_try_for_each_iteration,
                             use_timer_cut_off, key_to_n_atoms_map):
        """ check function for fix max success

        :param int mid_point: the point if the list to stop at
        :param list(_BitFieldData) sorted_bit_fields: lists of bitfields
        :param ~.UnCompressedMulticastRoutingTable routing_table:
            the basic routing table
        :param int target_length: the target length to reach
        :param int time_to_try_for_each_iteration:
            the time in seconds to run for
        :param bool use_timer_cut_off:
            whether the timer cutoff should be used by the compressor.
        :param dict(int,int) key_to_n_atoms_map:
        :return: true if it compresses
        :rtype: bool
        """

        # find new set of bitfields to try from midpoint
        new_bit_field_by_processor = DefaultOrderedDict(list)

        for element in range(0, mid_point):
            bf_data = sorted_bit_fields[element]
            new_bit_field_by_processor[bf_data.master_pop_key].append(bf_data)

        # convert bitfields into router tables
        bit_field_router_tables = self._convert_bitfields_into_router_tables(
            routing_table, new_bit_field_by_processor, key_to_n_atoms_map)

        # try to compress
        try:
            self._best_routing_table = self._run_algorithm(
                bit_field_router_tables, target_length,
                time_to_try_for_each_iteration, use_timer_cut_off)
            self._best_bit_fields_by_processor = new_bit_field_by_processor
            return True
        except MinimisationFailedError:
            return False
        except PacmanElementAllocationException:
            return False
def test_callable():
    try:
        DefaultOrderedDict("Not callable")
        assert False
    except TypeError:
        pass