示例#1
0
def test_create_constraints_to_file(tmpdir):
    # Construct the sample machine and graph
    machine = VirtualMachine(version=3, with_wrap_arounds=None)
    # TODO: define some extra monitor cores (how?)
    graph = MachineGraph("foo")
    tag1 = IPtagResource("1.2.3.4", 5, False, tag="footag")
    tag2 = ReverseIPtagResource(tag="bartag")
    v0 = SimpleMachineVertex(ResourceContainer(iptags=[tag1],
                                               reverse_iptags=[tag2]),
                             constraints=[ChipAndCoreConstraint(1, 1, 3)])
    graph.add_vertex(v0)
    v0_id = ident(v0)
    v1 = MachineSpiNNakerLinkVertex(2,
                                    constraints=[ChipAndCoreConstraint(1, 1)])
    v1.set_virtual_chip_coordinates(0, 2)
    graph.add_vertex(v1)
    v1_id = ident(v1)

    algo = CreateConstraintsToFile()
    fn = tmpdir.join("foo.json")
    filename, mapping = algo(graph, machine, str(fn))
    assert filename == str(fn)
    for vid in mapping:
        assert vid in [v0_id, v1_id]
        assert vid == ident(mapping[vid])
    obj = json.loads(fn.read())
    baseline = [{
        "type": "reserve_resource",
        "location": None,
        "reservation": [0, 1],
        "resource": "cores"
    }, {
        "type": "location",
        "location": [1, 1],
        "vertex": v0_id
    }, {
        "type": "resource",
        "resource": "cores",
        "range": [3, 4],
        "vertex": v0_id
    }, {
        "type": "resource",
        "resource": "iptag",
        "range": [0, 1],
        "vertex": v0_id
    }, {
        "type": "resource",
        "resource": "reverse_iptag",
        "range": [0, 1],
        "vertex": v0_id
    }, {
        "type": "route_endpoint",
        "direction": "south",
        "vertex": v1_id
    }, {
        "type": "location",
        "location": [1, 0],
        "vertex": v1_id
    }]
    assert obj == baseline
def test_create_constraints_to_file(tmpdir):
    # Construct the sample machine and graph
    machine = VirtualMachine(version=3, with_wrap_arounds=None)
    # TODO: define some extra monitor cores (how?)
    graph = MachineGraph("foo")
    tag1 = IPtagResource("1.2.3.4", 5, False, tag="footag")
    tag2 = ReverseIPtagResource(tag="bartag")
    v0 = SimpleMachineVertex(ResourceContainer(
        iptags=[tag1], reverse_iptags=[tag2]), constraints=[
        ChipAndCoreConstraint(1, 1, 3)])
    graph.add_vertex(v0)
    v0_id = ident(v0)
    v1 = MachineSpiNNakerLinkVertex(0)
    v1.set_virtual_chip_coordinates(0, 2)
    graph.add_vertex(v1)
    v1_id = ident(v1)

    machine = MallocBasedChipIdAllocator()(machine, graph)
    algo = CreateConstraintsToFile()
    fn = tmpdir.join("foo.json")
    filename, mapping = algo(graph, machine, str(fn))
    assert filename == str(fn)
    for vid in mapping:
        assert vid in [v0_id, v1_id]
        assert vid == ident(mapping[vid])
    obj = json.loads(fn.read())
    baseline = [
        {
            "type": "reserve_resource",
            "location": None, "reservation": [0, 1], "resource": "cores"},
        {
            "type": "location",
            "location": [1, 1], "vertex": v0_id},
        {
            "type": "resource",
            "resource": "cores", "range": [3, 4], "vertex": v0_id},
        {
            "type": "resource",
            "resource": "iptag", "range": [0, 1], "vertex": v0_id},
        {
            "type": "resource",
            "resource": "reverse_iptag", "range": [0, 1], "vertex": v0_id},
        {
            "type": "route_endpoint",
            "direction": "west", "vertex": v1_id},
        {
            "type": "location",
            "location": [0, 0], "vertex": v1_id}]
    assert obj == baseline
示例#3
0
    def __call__(self, placements, file_path):
        """
        :param placements: the memory placements object
        :param file_path: the file path for the placements.json
        :return: file path for the placements.json
        """

        # write basic stuff
        json_obj = dict()
        vertex_by_id = dict()

        progress = ProgressBar(placements.n_placements + 1,
                               "converting to JSON placements")

        # process placements
        for placement in progress.over(placements, False):
            vertex_id = ident(placement.vertex)
            vertex_by_id[vertex_id] = placement.vertex
            json_obj[vertex_id] = [placement.x, placement.y]

        # dump dict into json file
        with open(file_path, "w") as file_to_write:
            json.dump(json_obj, file_to_write)
        progress.update()

        # validate the schema
        file_format_schemas.validate(json_obj, "placements.json")
        progress.end()

        # return the file format
        return file_path, vertex_by_id
    def __call__(self, placements, file_path):
        """
        :param placements: the memory placements object
        :param file_path: the file path for the placements.json
        :return: file path for the placements.json
        """

        # write basic stuff
        json_obj = dict()
        vertex_by_id = dict()

        progress = ProgressBar(placements.n_placements + 1,
                               "converting to JSON placements")

        # process placements
        for placement in progress.over(placements, False):
            vertex_id = ident(placement.vertex)
            vertex_by_id[vertex_id] = placement.vertex
            json_obj[vertex_id] = [placement.x, placement.y]

        # dump dict into json file
        with open(file_path, "w") as file_to_write:
            json.dump(json_obj, file_to_write)
        progress.update()

        # validate the schema
        file_format_schemas.validate(json_obj, "placements.json")
        progress.end()

        # return the file format
        return file_path, vertex_by_id
    def _convert_vertex(self, vertex, vertex_by_id, vertices, edges,
                        machine_graph, plan_n_timesteps, partition_by_id):
        vertex_id = id(vertex)
        vertex_by_id[ident(vertex)] = vertex

        # handle external devices
        if isinstance(vertex, AbstractVirtualVertex):
            vertices[vertex_id] = {
                "cores": 0}
        elif vertex.resources_required.iptags or \
                vertex.resources_required.reverse_iptags:
            # handle tagged vertices:
            # handle the edge between the tag-able vertex and the fake vertex
            tag_id = md5(ident(vertex) + "_tag")
            edges[tag_id] = {
                "source": ident(vertex),
                "sinks": [tag_id],
                "weight": 1.0,
                "type": "FAKE_TAG_EDGE"}
            # add the tag-able vertex
            vertices[vertex_id] = {
                "cores": DEFAULT_NUMBER_OF_CORES_USED_PER_VERTEX,
                "sdram": int(vertex.resources_required.sdram.get_total_sdram(
                    plan_n_timesteps))}
            # add fake vertex
            vertices[tag_id] = {
                "cores": 0,
                "sdram": 0}
        else:
            # handle standard vertices
            vertices[vertex_id] = {
                "cores": DEFAULT_NUMBER_OF_CORES_USED_PER_VERTEX,
                "sdram": int(vertex.resources_required.sdram.get_total_sdram(
                    plan_n_timesteps))}

        # handle the vertex edges
        for partition in machine_graph\
                .get_outgoing_edge_partitions_starting_at_vertex(vertex):
            p_id = str(id(partition))
            partition_by_id[p_id] = partition
            edges[p_id] = {
                "source": ident(vertex),
                "sinks": [
                    ident(edge.post_vertex) for edge in partition.edges],
                "weight": sum(edge.traffic_weight for edge in partition.edges),
                "type": partition.traffic_type.name.lower()}
    def _convert_vertex(self, vertex, vertex_by_id, vertices, edges,
                        machine_graph, partition_by_id):
        vertex_id = id(vertex)
        vertex_by_id[ident(vertex)] = vertex

        # handle external devices
        if isinstance(vertex, AbstractVirtualVertex):
            vertices[vertex_id] = {"cores": 0}
        elif vertex.resources_required.iptags or \
                vertex.resources_required.reverse_iptags:
            # handle tagged vertices:
            # handle the edge between the tag-able vertex and the fake vertex
            tag_id = md5(ident(vertex) + "_tag")
            edges[tag_id] = {
                "source": ident(vertex),
                "sinks": [tag_id],
                "weight": 1.0,
                "type": "FAKE_TAG_EDGE"
            }
            # add the tag-able vertex
            vertices[vertex_id] = {
                "cores": DEFAULT_NUMBER_OF_CORES_USED_PER_VERTEX,
                "sdram": int(vertex.resources_required.sdram.get_value())
            }
            # add fake vertex
            vertices[tag_id] = {"cores": 0, "sdram": 0}
        else:
            # handle standard vertices
            vertices[vertex_id] = {
                "cores": DEFAULT_NUMBER_OF_CORES_USED_PER_VERTEX,
                "sdram": int(vertex.resources_required.sdram.get_value())
            }

        # handle the vertex edges
        for partition in machine_graph\
                .get_outgoing_edge_partitions_starting_at_vertex(vertex):
            p_id = str(id(partition))
            partition_by_id[p_id] = partition
            edges[p_id] = {
                "source": ident(vertex),
                "sinks": [ident(edge.post_vertex) for edge in partition.edges],
                "weight": sum(edge.traffic_weight for edge in partition.edges),
                "type": partition.traffic_type.name.lower()
            }
def test_convert_to_file_core_allocations(tmpdir):
    algo = ConvertToFileCoreAllocations()
    fn = tmpdir.join("foo.json")
    algo([], str(fn))
    assert fn.read() == '{"type": "cores"}'

    v = SimpleMachineVertex(ResourceContainer())
    pl = Placement(v, 1, 2, 3)
    filename, _ = algo([pl], str(fn))
    assert filename == str(fn)
    assert fn.read() == '{"type": "cores", "%s": [3, 4]}' % ident(v)
示例#8
0
def test_convert_to_file_core_allocations(tmpdir):
    algo = ConvertToFileCoreAllocations()
    fn = tmpdir.join("foo.json")
    algo([], str(fn))
    assert fn.read() == '{"type": "cores"}'

    v = SimpleMachineVertex(ResourceContainer())
    pl = Placement(v, 1, 2, 3)
    filename, _ = algo([pl], str(fn))
    assert filename == str(fn)
    assert fn.read() == '{"type": "cores", "%s": [3, 4]}' % ident(v)
示例#9
0
def test_convert_to_file_placement(tmpdir):
    v = SimpleMachineVertex(ResourceContainer())
    pl = Placement(v, 1, 2, 3)
    placements = Placements([pl])
    algo = ConvertToFilePlacement()
    fn = tmpdir.join("foo.json")
    filename, _vertex_by_id = algo(placements, str(fn))
    assert filename == str(fn)
    obj = json.loads(fn.read())
    baseline = {ident(v): [1, 2]}
    assert obj == baseline
def test_convert_to_file_placement(tmpdir):
    v = SimpleMachineVertex(ResourceContainer())
    pl = Placement(v, 1, 2, 3)
    placements = Placements([pl])
    algo = ConvertToFilePlacement()
    fn = tmpdir.join("foo.json")
    filename, _vertex_by_id = algo(placements, str(fn))
    assert filename == str(fn)
    obj = json.loads(fn.read())
    baseline = {
        ident(v): [1, 2]}
    assert obj == baseline
 def _search_graph_for_placement_constraints(
         self, json_obj, machine_graph, machine, progress):
     vertex_by_id = dict()
     for vertex in progress.over(machine_graph.vertices, False):
         vertex_id = ident(vertex)
         vertex_by_id[vertex_id] = vertex
         for constraint in vertex.constraints:
             self._handle_vertex_constraint(
                 constraint, json_obj, vertex, vertex_id)
         self._handle_vertex_resources(
             vertex.resources_required, json_obj, vertex_id)
         if isinstance(vertex, AbstractVirtualVertex):
             self._handle_virtual_vertex(
                 vertex, vertex_id, json_obj, machine)
     return vertex_by_id
 def _search_graph_for_placement_constraints(
         self, json_obj, machine_graph, machine, progress):
     vertex_by_id = dict()
     for vertex in progress.over(machine_graph.vertices, False):
         vertex_id = ident(vertex)
         vertex_by_id[vertex_id] = vertex
         for constraint in vertex.constraints:
             self._handle_vertex_constraint(
                 constraint, json_obj, vertex, vertex_id)
         self._handle_vertex_resources(
             vertex.resources_required, json_obj, vertex_id)
         if isinstance(vertex, AbstractVirtualVertex):
             self._handle_virtual_vertex(
                 vertex, vertex_id, json_obj, machine)
     return vertex_by_id
def test_convert_to_file_machine_graph(tmpdir):
    # Construct the sample graph
    graph = MachineGraph("foo")
    v0 = SimpleMachineVertex(ResourceContainer())
    graph.add_vertex(v0)
    tag = IPtagResource("1.2.3.4", 5, False, tag="footag")
    v1 = SimpleMachineVertex(ResourceContainer(iptags=[tag]))
    graph.add_vertex(v1)
    t1id = md5("%s_tag" % ident(v1))
    tag = ReverseIPtagResource(tag="bartag")
    v2 = SimpleMachineVertex(ResourceContainer(reverse_iptags=[tag]))
    graph.add_vertex(v2)
    t2id = md5("%s_tag" % ident(v2))
    graph.add_edge(MachineEdge(v1, v0), "part1")
    p1 = graph.get_outgoing_edge_partition_starting_at_vertex(v1, "part1")
    graph.add_edge(MachineEdge(v0, v2, label="foobar"), "part2")
    p2 = graph.get_outgoing_edge_partition_starting_at_vertex(v0, "part2")

    # Convert it to JSON
    algo = ConvertToFileMachineGraph()
    fn = tmpdir.join("foo.json")
    filename, _vertex_by_id, _partition_by_id = algo(
        graph, plan_n_timesteps=None, file_path=str(fn))
    assert filename == str(fn)

    # Rebuild and compare; simplest way of checking given that order is not
    # preserved in the underlying string and altering that is hard
    obj = json.loads(fn.read())
    baseline = {
        "vertices_resources": {
            ident(v0): {"cores": 1, "sdram": 0},
            ident(v1): {"cores": 1, "sdram": 0},
            t1id:      {"cores": 0, "sdram": 0},
            ident(v2): {"cores": 1, "sdram": 0},
            t2id:      {"cores": 0, "sdram": 0}},
        "edges": {
            ident(p1): {
                "source": ident(v1), "sinks": [ident(v0)],
                "type": "multicast", "weight": 1},
            ident(p2): {
                "source": ident(v0), "sinks": [ident(v2)],
                "type": "multicast", "weight": 1},
            t1id: {
                "source": ident(v1), "sinks": [t1id],
                "weight": 1.0, "type": "FAKE_TAG_EDGE"},
            t2id: {
                "source": ident(v2), "sinks": [t2id],
                "weight": 1.0, "type": "FAKE_TAG_EDGE"}}}
    assert obj == baseline
示例#14
0
def test_convert_to_file_machine_graph_pure_multicast(tmpdir):
    # Construct the sample graph
    graph = MachineGraph("foo")
    v0 = SimpleMachineVertex(ResourceContainer())
    graph.add_vertex(v0)
    tag = IPtagResource("1.2.3.4", 5, False, tag="footag")
    v1 = SimpleMachineVertex(ResourceContainer(iptags=[tag]))
    graph.add_vertex(v1)
    t1id = md5("%s_tag" % ident(v1))
    tag = ReverseIPtagResource(tag="bartag")
    v2 = SimpleMachineVertex(ResourceContainer(reverse_iptags=[tag]))
    graph.add_vertex(v2)
    t2id = md5("%s_tag" % ident(v2))
    graph.add_edge(MachineEdge(v1, v0), "part1")
    p1 = graph.get_outgoing_edge_partition_starting_at_vertex(v1, "part1")
    graph.add_edge(MachineEdge(v0, v2, label="foobar"), "part2")
    p2 = graph.get_outgoing_edge_partition_starting_at_vertex(v0, "part2")

    # Convert it to JSON
    algo = ConvertToFileMachineGraphPureMulticast()
    fn = tmpdir.join("foo.json")
    filename, _vertex_by_id, _partition_by_id = algo(graph, str(fn))
    assert filename == str(fn)

    # Rebuild and compare; simplest way of checking given that order is not
    # preserved in the underlying string and altering that is hard
    obj = json.loads(fn.read())
    baseline = {
        "vertices_resources": {
            ident(v0): {
                "cores": 1,
                "sdram": 0
            },
            ident(v1): {
                "cores": 1,
                "sdram": 0
            },
            t1id: {
                "cores": 0,
                "sdram": 0
            },
            ident(v2): {
                "cores": 1,
                "sdram": 0
            },
            t2id: {
                "cores": 0,
                "sdram": 0
            }
        },
        "edges": {
            ident(p1): {
                "source": ident(v1),
                "sinks": [ident(v0)],
                "type": "multicast",
                "weight": 1
            },
            ident(p2): {
                "source": ident(v0),
                "sinks": [ident(v2)],
                "type": "multicast",
                "weight": 1
            },
            t1id: {
                "source": ident(v1),
                "sinks": [t1id],
                "weight": 1.0,
                "type": "FAKE_TAG_EDGE"
            },
            t2id: {
                "source": ident(v2),
                "sinks": [t2id],
                "weight": 1.0,
                "type": "FAKE_TAG_EDGE"
            }
        }
    }
    assert obj == baseline
 def _convert_placement(self, placement, vertex_map, allocations_dict):
     vertex_id = ident(placement.vertex)
     vertex_map[vertex_id] = placement.vertex
     allocations_dict[vertex_id] = [placement.p, placement.p + 1]