Пример #1
0
def test_check_applying_patterns_level(
        patterns_level: _ir.patterns.PatternsLevel) -> None:
    """Test you can set the patterns object via the PatternsLevelEnum

    Args:
        patterns_level (_ir.patterns.PatternsLevel): The patterns level enum.
    """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    _ = ir.createGraph(g1Id)
    g2Id = _ir.GraphId("g2")
    _ = ir.createGraph(g2Id)

    for g in ir.getAllGraphs():
        in0 = add_actgrad_tensor("in0", [1, 2, 3], g)
        in1 = add_random_tensor("in1", _ir.TensorType.Variable, [1, 2, 3], g)
        out0 = add_actgrad_tensor("out0", [1, 2, 3], g)
        ins = {0: in0, 1: in1}
        outs = {0: out0}
        _ = create_new_op(ins, outs, "AddOp", g)

    p = _ir.patterns.Patterns(patterns_level)

    if patterns_level == _ir.patterns.PatternsLevel.NoPatterns:
        p = p.enableRuntimeAsserts(False)

    ir.setPatterns(p)

    for g in ir.getAllGraphs():
        ir.applyPreAliasPatterns(g)
        ir.applyInplacePattern(g)
Пример #2
0
def test_ir_graph_management4():
    """ Test we can get the main graph. """
    ir = _ir.Ir()
    a = _ir.GraphId("A")
    b = _ir.GraphId("B")

    mainGraph = ir.getMainGraph()
    assert ir.hasGraph(mainGraph.id)
Пример #3
0
def test_ir_graph_management3():
    """ Test we can get all graphs. """
    ir = _ir.Ir()
    a = _ir.GraphId("A")
    b = _ir.GraphId("B")

    check_existing_graphs(ir, [''])
    _ = ir.createGraph(a)
    check_existing_graphs(ir, ['', 'A'])
    _ = ir.createGraph(b)
    check_existing_graphs(ir, ['', 'A', 'B'])
Пример #4
0
def test_graphid_operator_eq_and_neq():
    """ Test the == and != operators. """

    for xstr, ystr in itertools.product(["g1", "g2", "y7", "z123"], repeat=2):
        x = _ir.GraphId(xstr)
        y = _ir.GraphId(ystr)

        if xstr == ystr:
            assert x == y
            assert not (x != y)
        else:
            assert not (x == y)
            assert x != y
Пример #5
0
def test_ir_graph_management0():
    """ Test that we can create / test for presence of graphs. """
    ir = _ir.Ir()
    a = _ir.GraphId("A")
    b = _ir.GraphId("B")

    check_does_not_have_graph(ir, a)
    check_does_not_have_graph(ir, b)
    _ = ir.createGraph(a)
    check_has_graph(ir, a)
    check_does_not_have_graph(ir, b)
    _ = ir.createGraph(b)
    check_has_graph(ir, a)
    check_has_graph(ir, b)
Пример #6
0
def test_ir_graph_management2():
    """ Test that we can / can't get graphs. """
    ir = _ir.Ir()
    a = _ir.GraphId("A")
    b = _ir.GraphId("B")

    check_cant_get_graph(ir, a)
    check_cant_get_graph(ir, b)
    _ = ir.createGraph(a)
    check_can_get_graph(ir, a)
    check_cant_get_graph(ir, b)
    _ = ir.createGraph(b)
    check_can_get_graph(ir, a)
    check_can_get_graph(ir, b)
Пример #7
0
def test_graph_id_member():
    """ Test .id member binding. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = ir.createGraph(g1Id)

    assert g1.id == g1Id
Пример #8
0
def autodiff(graph: Graph,
             grads_provided: Optional[Iterable[Tensor]] = None,
             grads_required: Optional[Iterable[Tensor]] = None,
             called_graphs_grad_info: Optional[Mapping[Graph,
                                                       GradGraphInfo]] = None,
             return_all_grad_graphs: bool = False):
    """Differentiate a Graph.

        The graph will be differentiated using the chain rule starting from `grads_provided`.
        The outputs of the returned graph will be the gradient of the Tensors in `grads_required`.
        By default `gradProvided` will be all of the outputs of the forward graph and `grads_required` will
        be all of the inputs to the forward graph.

        Any Tensors in the forward graph that are needed to compute the gradients will be added as outputs
        to the forward graph (if not already an input/output).

        The returned `GradGraphInfo` contains the gradient graph and information regarding all required inputs
        to the gradient graph. This can include tensors which are outputs of the forward graph `ExpectedConnectionType.Fwd`,
        or a gradient of an output of the forwards graph `ExpectedConnectionType.FwdGrad`.

        Any graphs called in the forward graph will recursively have `autodiff` called on it. Arg `called_graphs_grad_info` can be
        used to specify the result of `autodiff` on a called graph that has already been differentiated.
        By default GradGraphInfo will only be returned for the provided forward graph. Arg `return_all_grad_graphs` can be set to `True` to return
        info on all graphs that `autodiff` as executed on as a result of this transformation.

    Args:
        graph (pir.Graph
        grads_provided (Optional[Iterable[pir.Tensor]], optional) Defaults to all outputs of the provided graph.
        grads_required (Optional[Iterable[pir.Tensor]], optional). Defaults to all inputs of the provided graph.
        called_graphs_grad_info (Optional[Mapping[pir.Graph, GradGraphInfo]], optional). Defaults to None.
        return_all_grad_graphs (bool, optional). Defaults to False.

    Returns:
        grad_info: GradGraphInfo
    """

    grads_provided = graph.get_output_tensors(
    ) if grads_provided is None else grads_provided
    grads_required = graph.get_input_tensors(
    ) if grads_required is None else grads_required
    called_graphs_grad_info = {} if called_graphs_grad_info is None else called_graphs_grad_info

    _pb_ir = graph.ir()._pb_ir
    transform = _ir.transforms.Autodiff()

    _pb_result = transform.apply(
        _pb_ir, _ir.GraphId(graph.name), [t.id for t in grads_provided],
        _ir.OptionalTensors([t.id for t in grads_required]),
        {k: v._pb_bwd_info
         for k, v in called_graphs_grad_info.items()})

    result: Mapping[Graph, GradGraphInfo] = {}
    for k, v in _pb_result.items():
        _graph = Graph._from_pb(_pb_ir.getGraph(k))
        result[_graph] = GradGraphInfo._from_pb(_pb_ir, _graph._pb_graph, v)

    if return_all_grad_graphs:
        return result

    return result[graph]
Пример #9
0
def test_graph_get_graph_string():
    """ Test getGraphString binding. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = ir.createGraph(g1Id)

    assert ir.getMainGraph().getGraphString() == "the main graph"
    assert g1.getGraphString() == "subgraph 'g1'"
Пример #10
0
def test_graphid_operator_lt():
    """ Test the < operator. """
    for xstr, ystr in itertools.product(["g1", "g2", "y7", "z123"], repeat=2):
        x = _ir.GraphId(xstr)
        y = _ir.GraphId(ystr)

        x_le_y = x < y
        y_le_x = y < x

        # We can't violate assymetry
        assert not (x_le_y and y_le_x)

        if xstr == ystr:
            # Expect irreflexivity: neither x < y or y < x
            assert (not x_le_y) and (not y_le_x)
        else:
            # Expect totality: one of x < y or y < x
            assert x_le_y or y_le_x
Пример #11
0
def test_ir_graph_management1():
    """ Test that we can remove graphs. """
    ir = _ir.Ir()
    a = _ir.GraphId("A")

    check_does_not_have_graph(ir, a)
    _ = ir.createGraph(a)
    check_has_graph(ir, a)
    _ = ir.removeGraph(a)
    check_does_not_have_graph(ir, a)
Пример #12
0
def _setup_call_and_repeat(
    pb_ir: _ir.Ir, pb_top_graph: _ir.Graph, pb_bottom_graph: _ir.Graph
) -> Tuple[_ir.Graph, _ir.op.CallOp, _ir.op.LoopOp]:
    """Setup the call and repeat ops, as well as the middle graph that the loop op will loop.

    Args:
        pb_ir (_ir.Ir): The _ir level Ir
        pb_top_graph (_ir.Graph): The _ir top level graph that will contain the loop op.
        pb_bottom_graph (_ir.Graph): The _ir user defined subgraph that will be called.

    Returns:
        Tuple[_ir.Graph, _ir.op.CallOp, _ir.op.LoopOp]: The created _ir-level middle graph, call op
            and loop op.
    """
    # This is the graph we will repeat.
    pb_middle_graph = pb_ir.createGraph(
        _ir.GraphId(
            pb_ir.createUniqueSubgraphId(
                f"{pb_bottom_graph.id.str()}__loop_wrapper")))

    opid = _ir.OperatorIdentifier("ai.graphcore", "Call", 1, _ir.NumInputs(),
                                  0)
    op_name = pb_middle_graph.id.str() + '__call__' + pb_bottom_graph.id.str()

    ctx = get_current_context()
    # Call the bottom_graph
    pb_callop = pb_middle_graph.createOp_CallOp(opid, pb_bottom_graph,
                                                ctx._get_op_settings(op_name))

    opid = _ir.OperatorIdentifier("ai.onnx", "Loop", 11, _ir.NumInputs(), 0)
    op_name = pb_top_graph.id.str() + '__loop__' + pb_middle_graph.id.str()

    # Loop the middle_graph
    pb_loop_op = pb_top_graph.createOp_LoopOp(opid,
                                              ctx._get_op_settings(op_name),
                                              pb_middle_graph)

    # Add mandatory loop iterator tensor to subgraph (is not an output)
    repeatIterId = _ir.addScope(pb_middle_graph, "Iterator___")
    pb_middle_graph.addInput(repeatIterId,
                             _ir.TensorInfo(_ir.DataType.INT32, ()))

    # Add mandatory loop condition tensor to subgraph (is also an output)
    repeatCondId = _ir.addScope(pb_middle_graph, "LoopCond___")
    pb_middle_graph.addInput(repeatCondId,
                             _ir.TensorInfo(_ir.DataType.BOOL, ()))
    pb_middle_graph.markAsOutput(repeatCondId)

    return pb_middle_graph, pb_callop, pb_loop_op
Пример #13
0
def create_ir(graph_ids: List[str] = []):
    """Small helper function to create an Ir with some graphs

    Args:
        graph_ids (List[str]): List of graph Ids to create

    Returns:
        Ir: An Ir with the required graphs.
    """
    ir = _ir.Ir()
    graphs = [ir.getMainGraph()]
    for name in graph_ids:
        id = _ir.GraphId(name)
        g = _ir.Graph(ir, id)
        graphs.append(g)

    return ir, graphs
Пример #14
0
def test_graph_scope_functions():
    """ Test we can scope functions. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = ir.createGraph(g1Id)

    # Test addScope
    assert _ir.addScope(g1, "tensor1") == "g1/tensor1"
    assert _ir.addScope(g1, "foobar") == "g1/foobar"

    # Test removeScope
    assert _ir.removeScope(g1, "g1/tensor1") == "tensor1"
    assert _ir.removeScope(g1, "g1/foobar") == "foobar"

    with pytest.raises(popart.popart_exception) as excinfo:
        _ir.removeScope(g1, "h1/tensor1")

    # Test getScope
    assert g1.getScope().str() == "g1"
Пример #15
0
def test_graph_graph_outputs():
    """ Test we can add/remove graph outputs. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = ir.createGraph(g1Id)

    # We add inputs as a way of adding tensors to the graph that we can mark as
    # outputs.
    g1.addInput("t0", _ir.TensorInfo(_ir.DataType.FLOAT16, [5, 5]))
    g1.addInput("t1", _ir.TensorInfo(_ir.DataType.FLOAT16, [5, 5]))
    g1.addInput("t2", _ir.TensorInfo(_ir.DataType.FLOAT16, [5, 5]))

    # Check markAsInput.
    check_graph_outputs(g1, [])
    g1.markAsOutput("t0")
    check_graph_outputs(g1, ["t0"])
    g1.markAsOutput(0, "t1", False)
    check_graph_outputs(g1, ["t1", "t0"])
    g1.markAsOutput(0, "t2", True)
    check_graph_outputs(g1, ["t2", "t0"])

    # Check getOutputId.
    assert g1.getOutputId(0) == "t2"
    assert g1.getOutputId(1) == "t0"

    # Check getOutputIndex
    assert g1.getOutputIndex("t2") == 0
    assert g1.getOutputIndex("t0") == 1
    with pytest.raises(popart.popart_exception) as excinfo:
        g1.getOutputIndex("nonExistingTensor")

    # Check hasOutputId.
    assert g1.hasOutputId("t0")
    assert g1.hasOutputId("t2")
    assert not g1.hasOutputId("t1")

    # Check removeInput.
    g1.removeOutput(1)
    check_graph_outputs(g1, ["t2"])
    g1.removeOutput("t2")
    check_graph_outputs(g1, [])
Пример #16
0
def test_graph_graph_inputs():
    """ Test we can add/remove graph inputs. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = ir.createGraph(g1Id)

    # Check initially graph inputs are empty.
    check_graph_inputs(g1, [])

    # Check addInput.
    g1.addInput("inputA", _ir.TensorInfo(_ir.DataType.FLOAT16, [5, 5]))
    check_graph_inputs(g1, ["inputA"])
    g1.addInput("inputB", _ir.TensorInfo(_ir.DataType.FLOAT, [65, 5]))
    check_graph_inputs(g1, ["inputA", "inputB"])
    g1.addInput(1, "input1", _ir.TensorInfo(_ir.DataType.FLOAT, [65, 5]),
                False)
    check_graph_inputs(g1, ["inputA", "input1", "inputB"])
    g1.addInput(1, "input2", _ir.TensorInfo(_ir.DataType.FLOAT, [65, 5]), True)
    check_graph_inputs(g1, ["inputA", "input2", "inputB"])

    # Check getInputId.
    assert g1.getInputId(0) == "inputA"
    assert g1.getInputId(1) == "input2"
    assert g1.getInputId(2) == "inputB"

    # Check getInputIndex
    assert g1.getInputIndex("inputA") == 0
    assert g1.getInputIndex("input2") == 1
    assert g1.getInputIndex("inputB") == 2
    with pytest.raises(popart.popart_exception) as excinfo:
        g1.getInputIndex("nonExistingTensor")

    # Check hasInputId.
    assert g1.hasInputId("inputA")
    assert not g1.hasInputId("input1")

    # Check removeInput.
    g1.removeInput(1)
    check_graph_inputs(g1, ["inputA", "inputB"])
    g1.removeInput("inputA")
    check_graph_inputs(g1, ["inputB"])
Пример #17
0
def make_sub_graph(ir: _ir.Ir, ins: Dict[int, _ir.TensorInfo]) -> _ir.Graph:
    """
    Makes the following subgraph, with len(ins) inputs. 

    input0  input1  input2  ...  input n
    │       │       │            │
    │       │       │            │
    │       │       │            │
    └─►add ◄┘       │            │
        │           │            │
        └──────►add◄┘            │
                │                │
                │                │
                │                │
                └────►add ...    ▼


                               add
                                │
                                ▼
                             softmax
                                │
                                ▼
                               out

    Args:
        ir (_ir.Ir): The ir to add the subgraph to
        ins (Dict[int, _ir.TensorInfo]): The map of in indices to tensorinfos.

    Returns:
        _ir.Graph: The subgraph in question.
    """
    g = ir.createGraph(_ir.GraphId("fwd"))

    for i, tinfo in ins.items():
        g.addInput(_ir.addScope(g, f"in{i}"), tinfo)

    inputs = g.getInputIds()

    t = g.getTensor(inputs[0])
    for i in range(1, len(ins)):
        settings = _ir.Settings(g, f"add{i}")
        opid = _ir.OperatorIdentifier("ai.onnx", f"Add{i}", 1,
                                      _ir.NumInputs(2, 2), 1)
        add = g.createConnectedOp_AddOp({
            0: t.id,
            1: inputs[i]
        }, {0: _ir.addScope(g, f"add{i}")}, opid, settings)
        t = add.outTensor(0)

    settings = _ir.Settings(g, "softmax0")
    opid = _ir.OperatorIdentifier("ai.onnx", "SoftMax", 1, _ir.NumInputs(1, 1),
                                  1)
    sm = g.createConnectedOp_SoftmaxOp({0: t.id}, {0: _ir.addScope(g, "sm0")},
                                       opid=opid,
                                       axis_=0,
                                       settings=settings)

    g.markAsOutput(sm.outTensor(0).id)

    return g
Пример #18
0
def test_graphid_str():
    """ Test GraphId.str() returns the ID as a string. """
    id1 = _ir.GraphId("g1")
    assert id1.str() == "g1"
    id2 = _ir.GraphId("foobar")
    assert id2.str() == "foobar"
Пример #19
0
def test_graphid_construction():
    """ Test that we can construct a popart._internal.ir.GraphId object. """
    _ = _ir.GraphId("g")
Пример #20
0
def test_graph_construction():
    """ Test that we can construct a popart._internal.ir.Graph object. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = _ir.Graph(ir, g1Id)