Ejemplo n.º 1
0
def _setup_outputs(pb_top_graph: _ir.Graph, pb_bottom_graph: _ir.Graph,
                   pb_middle_graph: _ir.Graph, pb_callop: _ir.op.CallOp,
                   pb_loop_op: _ir.op.LoopOp) -> List[str]:
    """3. Connect outputs. We introspect the subgraph to get its outputs then,
         for each one, create an output tensor of the call op in the middle
         graph.
    Args:
        pb_top_graph (_ir.Graph): Top _ir graph
        pb_bottom_graph (_ir.Graph): Bottom _ir Graph
        pb_middle_graph (_ir.Graph): Middle _ir Graph
        pb_callop (_ir.op.CallOp): Previously created Call op
        pb_loop_op (_ir.op.LoopOp): Previously created Loop op

    Returns:
        List[str]: The output tensor ids.
    """

    outnames: List[str] = []

    for pb_repeat_subgraph_out_id in pb_bottom_graph.getOutputIds():
        top_tensor_id = _ir.addScope(
            pb_top_graph,
            _ir.removeScope(pb_bottom_graph, pb_repeat_subgraph_out_id))
        # Already has scope added
        middle_tensor_id = _ir.removeScope(pb_bottom_graph,
                                           pb_repeat_subgraph_out_id)
        bottom_tensor_id = _ir.addScope(
            pb_bottom_graph,
            _ir.removeScope(pb_bottom_graph, pb_repeat_subgraph_out_id))

        sgOutIdx = pb_bottom_graph.getOutputIndex(bottom_tensor_id)
        callOutIdx = pb_callop.subgraphOutToOpOutIndex(sgOutIdx)

        # Avoid tensor name collisions
        middle_tensor_id = pb_middle_graph.getIr().createIntermediateTensorId(
            middle_tensor_id)
        pb_callop.createAndConnectOutTensor(callOutIdx, middle_tensor_id)

        pb_middle_graph.markAsOutput(middle_tensor_id)
        sgOutIdx = pb_middle_graph.getOutputIndex(middle_tensor_id)
        repeatOutIdx = pb_loop_op.subgraphOutToOpOutIndex(sgOutIdx)
        # Avoid tensor name collisions
        top_tensor_id = pb_middle_graph.getIr().createIntermediateTensorId(
            top_tensor_id)
        # We overwrite here as we added the middle_tensor_id as an output above, but we want to make
        # sure the loop op is setup correctly.
        pb_loop_op.addLoopOutput(repeatOutIdx, top_tensor_id, middle_tensor_id,
                                 True)

        outnames.append(top_tensor_id)
    return outnames
Ejemplo n.º 2
0
def _setup_call_and_repeat(
    pb_ir: _ir.Ir, pb_top_graph: _ir.Graph, pb_bottom_graph: _ir.Graph
) -> Tuple[_ir.Graph, _ir.op.CallOp, _ir.op.LoopOp]:
    """Setup the call and repeat ops, as well as the middle graph that the loop op will loop.

    Args:
        pb_ir (_ir.Ir): The _ir level Ir
        pb_top_graph (_ir.Graph): The _ir top level graph that will contain the loop op.
        pb_bottom_graph (_ir.Graph): The _ir user defined subgraph that will be called.

    Returns:
        Tuple[_ir.Graph, _ir.op.CallOp, _ir.op.LoopOp]: The created _ir-level middle graph, call op
            and loop op.
    """
    # This is the graph we will repeat.
    pb_middle_graph = pb_ir.createGraph(
        _ir.GraphId(
            pb_ir.createUniqueSubgraphId(
                f"{pb_bottom_graph.id.str()}__loop_wrapper")))

    opid = _ir.OperatorIdentifier("ai.graphcore", "Call", 1, _ir.NumInputs(),
                                  0)
    op_name = pb_middle_graph.id.str() + '__call__' + pb_bottom_graph.id.str()

    ctx = get_current_context()
    # Call the bottom_graph
    pb_callop = pb_middle_graph.createOp_CallOp(opid, pb_bottom_graph,
                                                ctx._get_op_settings(op_name))

    opid = _ir.OperatorIdentifier("ai.onnx", "Loop", 11, _ir.NumInputs(), 0)
    op_name = pb_top_graph.id.str() + '__loop__' + pb_middle_graph.id.str()

    # Loop the middle_graph
    pb_loop_op = pb_top_graph.createOp_LoopOp(opid,
                                              ctx._get_op_settings(op_name),
                                              pb_middle_graph)

    # Add mandatory loop iterator tensor to subgraph (is not an output)
    repeatIterId = _ir.addScope(pb_middle_graph, "Iterator___")
    pb_middle_graph.addInput(repeatIterId,
                             _ir.TensorInfo(_ir.DataType.INT32, ()))

    # Add mandatory loop condition tensor to subgraph (is also an output)
    repeatCondId = _ir.addScope(pb_middle_graph, "LoopCond___")
    pb_middle_graph.addInput(repeatCondId,
                             _ir.TensorInfo(_ir.DataType.BOOL, ()))
    pb_middle_graph.markAsOutput(repeatCondId)

    return pb_middle_graph, pb_callop, pb_loop_op
Ejemplo n.º 3
0
def test_graph_scope_functions():
    """ Test we can scope functions. """
    ir = _ir.Ir()
    g1Id = _ir.GraphId("g1")
    g1 = ir.createGraph(g1Id)

    # Test addScope
    assert _ir.addScope(g1, "tensor1") == "g1/tensor1"
    assert _ir.addScope(g1, "foobar") == "g1/foobar"

    # Test removeScope
    assert _ir.removeScope(g1, "g1/tensor1") == "tensor1"
    assert _ir.removeScope(g1, "g1/foobar") == "foobar"

    with pytest.raises(popart.popart_exception) as excinfo:
        _ir.removeScope(g1, "h1/tensor1")

    # Test getScope
    assert g1.getScope().str() == "g1"
Ejemplo n.º 4
0
    def _create_tensor_id(self, name: Optional[str] = None) -> str:
        """Generate a unique tensor id.

        If the name already exists in the graph it will be modified
        to be made unique in the graph.
        The graph scope will then be added to construct the global unique id.

        Args:
            name (Optional[str]):
                A name which will be appended with an id to make unique.
                Defaults to `t`

        Returns:
            str:
                The unique id of the tensor.
        """
        name = name if name else 't'
        name = "/".join((*get_current_context().name_scopes, name))
        _id = _ir.addScope(self._pb_graph, name)
        if _id in self._pb_graph:
            _id = self._pb_graph.getIr().createIntermediateTensorId(_id)
        return _id
Ejemplo n.º 5
0
def make_sub_graph(ir: _ir.Ir, ins: Dict[int, _ir.TensorInfo]) -> _ir.Graph:
    """
    Makes the following subgraph, with len(ins) inputs. 

    input0  input1  input2  ...  input n
    │       │       │            │
    │       │       │            │
    │       │       │            │
    └─►add ◄┘       │            │
        │           │            │
        └──────►add◄┘            │
                │                │
                │                │
                │                │
                └────►add ...    ▼


                               add
                                │
                                ▼
                             softmax
                                │
                                ▼
                               out

    Args:
        ir (_ir.Ir): The ir to add the subgraph to
        ins (Dict[int, _ir.TensorInfo]): The map of in indices to tensorinfos.

    Returns:
        _ir.Graph: The subgraph in question.
    """
    g = ir.createGraph(_ir.GraphId("fwd"))

    for i, tinfo in ins.items():
        g.addInput(_ir.addScope(g, f"in{i}"), tinfo)

    inputs = g.getInputIds()

    t = g.getTensor(inputs[0])
    for i in range(1, len(ins)):
        settings = _ir.Settings(g, f"add{i}")
        opid = _ir.OperatorIdentifier("ai.onnx", f"Add{i}", 1,
                                      _ir.NumInputs(2, 2), 1)
        add = g.createConnectedOp_AddOp({
            0: t.id,
            1: inputs[i]
        }, {0: _ir.addScope(g, f"add{i}")}, opid, settings)
        t = add.outTensor(0)

    settings = _ir.Settings(g, "softmax0")
    opid = _ir.OperatorIdentifier("ai.onnx", "SoftMax", 1, _ir.NumInputs(1, 1),
                                  1)
    sm = g.createConnectedOp_SoftmaxOp({0: t.id}, {0: _ir.addScope(g, "sm0")},
                                       opid=opid,
                                       axis_=0,
                                       settings=settings)

    g.markAsOutput(sm.outTensor(0).id)

    return g
Ejemplo n.º 6
0
def _setup_inputs(subgraph_fn_param_inputs: Iterable[Tensor],
                  subgraph_in_to_parent_in: Mapping[Tensor, Tensor],
                  pb_top_graph: _ir.Graph, pb_bottom_graph: _ir.Graph,
                  pb_middle_graph: _ir.Graph, pb_callop: _ir.op.CallOp,
                  pb_loop_op: _ir.op.LoopOp) -> None:
    """Do the following:

    1. Connect explicitly passed inputs. These would have been created first
       by ir.get_graph, so we do them first. ir.get_graph will have created
       the input tensors t_0,...,t_N at input indices 0,..,N, respectively. We
       require that the user has passed the parent tensors that correspond to
       these inputs in the exact same order, so we can trivially reconstruct
       the input indices here.

    2. Connect internally created inputs.

    Args:
        subgraph_fn_param_inputs (Iterable[Tensor]): User defined explicit inputs.
        subgraph_in_to_parent_in (Mapping[Tensor, Tensor]):
            Mapping of `subgraph tensor -> parent tensor` that corresponds to
            the inputs that the callable defined internally, e.g. by using
            popart.ir.subgraph_input. Defaults to an empty dictionary.
            Works effectively the same as the call op's `subgraph_in_to_parent_in` argument.
        pb_top_graph (_ir.Graph): Top _ir graph
        pb_bottom_graph (_ir.Graph): Bottom _ir Graph
        pb_middle_graph (_ir.Graph): Middle _ir Graph
        pb_callop (_ir.op.CallOp): Previously created Call op
        pb_loop_op (_ir.op.LoopOp): Previously created Loop op
    """

    # Note: Only bottom_graph (which is called) has gone through the ir.get_graph process.
    # middle_graph (intentionally) has not, so we need to add loop inputs/outputs.
    # User defined indices start at 2 for loop ops.
    sgInIdx = 0
    for t in subgraph_fn_param_inputs:
        callInIdx = pb_callop.subgraphInToOpInIndex(sgInIdx)
        # Note the + 2 here
        pb_loop_op.addLoopInput(sgInIdx + 2,
                                _ir.addScope(pb_top_graph, t.name),
                                _ir.addScope(pb_middle_graph, t.name), False)
        pb_callop.connectInTensor(callInIdx,
                                  _ir.addScope(pb_middle_graph, t.name))
        sgInIdx += 1

    # 2. Connect internally created inputs.
    for sg_tensor, parent_tensor in subgraph_in_to_parent_in.items():
        sgInIdx = pb_bottom_graph.getInputIndex(sg_tensor.id)
        callInIdx = pb_callop.subgraphInToOpInIndex(sgInIdx)

        top_tensor_id = _ir.addScope(pb_top_graph, parent_tensor.id)
        pb_loop_op.addLoopInput(
            sgInIdx + 2, top_tensor_id,
            _ir.addScope(pb_middle_graph,
                         _ir.removeScope(pb_bottom_graph, sg_tensor.id)),
            False)
        set_input_modified(pb_loop_op, pb_loop_op.inTensor(sgInIdx + 2))
        pb_callop.connectInTensor(
            callInIdx,
            _ir.addScope(pb_middle_graph,
                         _ir.removeScope(pb_bottom_graph, sg_tensor.id)))
        set_input_modified(pb_callop, pb_callop.inTensor(callInIdx))