Exemple #1
0
def _setup_call_and_repeat(
    pb_ir: _ir.Ir, pb_top_graph: _ir.Graph, pb_bottom_graph: _ir.Graph
) -> Tuple[_ir.Graph, _ir.op.CallOp, _ir.op.LoopOp]:
    """Setup the call and repeat ops, as well as the middle graph that the loop op will loop.

    Args:
        pb_ir (_ir.Ir): The _ir level Ir
        pb_top_graph (_ir.Graph): The _ir top level graph that will contain the loop op.
        pb_bottom_graph (_ir.Graph): The _ir user defined subgraph that will be called.

    Returns:
        Tuple[_ir.Graph, _ir.op.CallOp, _ir.op.LoopOp]: The created _ir-level middle graph, call op
            and loop op.
    """
    # This is the graph we will repeat.
    pb_middle_graph = pb_ir.createGraph(
        _ir.GraphId(
            pb_ir.createUniqueSubgraphId(
                f"{pb_bottom_graph.id.str()}__loop_wrapper")))

    opid = _ir.OperatorIdentifier("ai.graphcore", "Call", 1, _ir.NumInputs(),
                                  0)
    op_name = pb_middle_graph.id.str() + '__call__' + pb_bottom_graph.id.str()

    ctx = get_current_context()
    # Call the bottom_graph
    pb_callop = pb_middle_graph.createOp_CallOp(opid, pb_bottom_graph,
                                                ctx._get_op_settings(op_name))

    opid = _ir.OperatorIdentifier("ai.onnx", "Loop", 11, _ir.NumInputs(), 0)
    op_name = pb_top_graph.id.str() + '__loop__' + pb_middle_graph.id.str()

    # Loop the middle_graph
    pb_loop_op = pb_top_graph.createOp_LoopOp(opid,
                                              ctx._get_op_settings(op_name),
                                              pb_middle_graph)

    # Add mandatory loop iterator tensor to subgraph (is not an output)
    repeatIterId = _ir.addScope(pb_middle_graph, "Iterator___")
    pb_middle_graph.addInput(repeatIterId,
                             _ir.TensorInfo(_ir.DataType.INT32, ()))

    # Add mandatory loop condition tensor to subgraph (is also an output)
    repeatCondId = _ir.addScope(pb_middle_graph, "LoopCond___")
    pb_middle_graph.addInput(repeatCondId,
                             _ir.TensorInfo(_ir.DataType.BOOL, ()))
    pb_middle_graph.markAsOutput(repeatCondId)

    return pb_middle_graph, pb_callop, pb_loop_op
Exemple #2
0
def test_get_of_type():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    for tid in 'abcd':
        ts.addActGrad(tid)

    data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).astype(np.float32)
    tinfo = TensorInfo(DataType.FLOAT, data.shape)
    for tid in 'efgh':
        ts.addVarInit(tid, tinfo, data)

    for tid in 'ijkl':
        ts.addStream(tid, tinfo)

    actGrads = ts.getOfType(TensorType.ActGrad)
    assert len(actGrads) == 4
    assert set([i.id for i in actGrads]) == set([i for i in 'abcd'])

    variables = ts.getOfType(TensorType.Variable)
    assert len(variables) == 4
    assert set([i.id for i in variables]) == set([i for i in 'efgh'])

    streams = ts.getOfType(TensorType.Stream)
    assert len(streams) == 4
    assert set([i.id for i in streams]) == set([i for i in 'ijkl'])

    actGradsAndVars = ts.getOfType([TensorType.ActGrad, TensorType.Variable])
    assert len(actGradsAndVars) == 8
    assert set([i.id for i in actGradsAndVars]) == set([i for i in 'abcdefgh'])
Exemple #3
0
def test_add_stream():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    tinfo = TensorInfo(DataType.FLOAT, [10])
    ts.addStream("data", tinfo)
Exemple #4
0
def test_add_const_init():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).astype(np.float32)
    tinfo = TensorInfo(DataType.FLOAT, data.shape)
    ts.addConstInit("data", tinfo, data)
Exemple #5
0
def test_adding_actGrads():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add some tensors.
    ids = [i for i in "abcdefghi"]
    for tid in ids:
        ts.addActGrad(tid)

    # Check the number of tensors is correct.
    assert ts.n() == len(ids)
Exemple #6
0
def test_getAllTensorIds():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add some tensors.
    ids = [i for i in "abcdefghi"]
    for tid in ids:
        ts.addActGrad(tid)

    # Check ids returned from getAllTensorIds.
    assert set(ts.getAllTensorIds()) == set(ids)
Exemple #7
0
def test_contains_with_scope():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    ts.addActGrad('a/b/c/foo')
    ts.addActGrad('a/b/bar')

    scope = Scope() / 'a' / 'b' / 'c'

    assert ts.contains('foo', scope)
    assert ts.contains('bar', scope)
    assert ts.contains('fizz', scope) == False
Exemple #8
0
def test_remove_all_isolated():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add some tensors.
    ids = [i for i in "abcdefghi"]
    for tid in ids:
        ts.addActGrad(tid)

    # All these tensors should be isolated
    ts.removeIsolated(False)
    assert ts.n() == 0
Exemple #9
0
def test_get():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add some tensors.
    ids = [i for i in "abcdefghi"]
    for tid in ids:
        ts.addActGrad(tid)

    # Get the tensors one by one and confirm we have been returned the correct tensor.
    for tid in ids:
        t = ts.get(tid)
        assert t.id == tid
Exemple #10
0
def test_make_const_init():
    data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).astype(np.float32)

    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add a tensor and check the value returned by `tensorType()`.
    ts.addActGrad('foo')
    t = ts.get('foo')
    assert t.tensorType() == TensorType.ActGrad

    # Make the tensor const init and check the value returned by `tensorType()` has changed.
    t.info = TensorInfo(DataType.FLOAT, data.shape)
    ts.makeConstInit('foo', data)
    assert t.tensorType() == TensorType.Const
Exemple #11
0
def test_contains():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add some tensors.
    ids = [i for i in "abcdefghi"]
    for tid in ids:
        ts.addActGrad(tid)

    # Check all expected tensors are in ts.
    for tid in ids:
        assert ts.contains(tid)

    # Check `ts.contains` is not just returning true.
    for tid in 'xyz':
        assert not ts.contains(tid)
Exemple #12
0
def test_find():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add three tensors called foo with different scopes.
    ts.addActGrad('foo')
    ts.addActGrad('a/foo')
    ts.addActGrad('a/b/c/foo')

    # Make sure we can find all three tensors.
    foo = ts.find('foo', Scope())
    assert foo == 'foo'
    foo = ts.find('foo', Scope() / 'a')
    assert foo == 'a/foo'
    foo = ts.find('foo', Scope() / 'a' / 'b' / 'c')
    assert foo == 'a/b/c/foo'
Exemple #13
0
def test_remove():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    # Add some tensors.
    ids = [i for i in "abcdefghi"]
    for tid in ids:
        ts.addActGrad(tid)

    # Test removing tensors
    while ids:
        x = ids[0]
        del ids[0]
        ts.remove(x)
        assert not ts.contains(x)
        assert ts.n() == len(ids)
Exemple #14
0
def test_get_ids():
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)

    for tid in 'abcd':
        ts.addActGrad(tid)

    data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).astype(np.float32)
    tinfo = TensorInfo(DataType.FLOAT, data.shape)
    for tid in 'efgh':
        ts.addVarInit(tid, tinfo, data)

    actGrads = ts.getIds(TensorType.ActGrad)
    assert len(actGrads) == 4
    assert set(actGrads) == set([i for i in 'abcd'])

    variables = ts.getIds(TensorType.Variable)
    assert len(variables) == 4
    assert set(variables) == set([i for i in 'efgh'])
Exemple #15
0
def make_sub_graph(ir: _ir.Ir, ins: Dict[int, _ir.TensorInfo]) -> _ir.Graph:
    """
    Makes the following subgraph, with len(ins) inputs. 

    input0  input1  input2  ...  input n
    │       │       │            │
    │       │       │            │
    │       │       │            │
    └─►add ◄┘       │            │
        │           │            │
        └──────►add◄┘            │
                │                │
                │                │
                │                │
                └────►add ...    ▼


                               add
                                │
                                ▼
                             softmax
                                │
                                ▼
                               out

    Args:
        ir (_ir.Ir): The ir to add the subgraph to
        ins (Dict[int, _ir.TensorInfo]): The map of in indices to tensorinfos.

    Returns:
        _ir.Graph: The subgraph in question.
    """
    g = ir.createGraph(_ir.GraphId("fwd"))

    for i, tinfo in ins.items():
        g.addInput(_ir.addScope(g, f"in{i}"), tinfo)

    inputs = g.getInputIds()

    t = g.getTensor(inputs[0])
    for i in range(1, len(ins)):
        settings = _ir.Settings(g, f"add{i}")
        opid = _ir.OperatorIdentifier("ai.onnx", f"Add{i}", 1,
                                      _ir.NumInputs(2, 2), 1)
        add = g.createConnectedOp_AddOp({
            0: t.id,
            1: inputs[i]
        }, {0: _ir.addScope(g, f"add{i}")}, opid, settings)
        t = add.outTensor(0)

    settings = _ir.Settings(g, "softmax0")
    opid = _ir.OperatorIdentifier("ai.onnx", "SoftMax", 1, _ir.NumInputs(1, 1),
                                  1)
    sm = g.createConnectedOp_SoftmaxOp({0: t.id}, {0: _ir.addScope(g, "sm0")},
                                       opid=opid,
                                       axis_=0,
                                       settings=settings)

    g.markAsOutput(sm.outTensor(0).id)

    return g
Exemple #16
0
def test_tensors_construction():
    """ Test that we can construct a popart._internal.ir.Graph object. """
    ir = Ir()
    gId = GraphId("g")
    graph = Graph(ir, gId)
    ts = Tensors(graph)