Example #1
0
    def MakeOnnxGraphIO(self, ids):
        """Create tensor_value_info for passed input/output ids."""
        tensor_value_infos = []
        for name in ids:
            dtype = self.get_dtype(name)
            shape = self.get_shape(name)

            util.MakeSure(dtype is not None, "missing output dtype for " + name)
            util.MakeSure(shape is not None, "missing output shape for " + name)

            v = util.MakeOnnxInputsOutputs(name, dtype, shape)
            tensor_value_infos.append(v)
        return tensor_value_infos
Example #2
0
def InferOnnxShapeDtype(
    node, opset_version, input_shapes, input_dtypes, initializers=None
):
    """
    Infer shapes and dtypes for outputs of the node.
    Sometimes, shape inference needs the values of node's inputs, so initializers are used.
    """

    def BuildOnnxOp(node):
        """Build onnx op"""
        onnx_node = helper.make_node(node.type, node.input, node.output, name=node.name)
        # deal with attributes
        attr = []
        attr_graphs = node.get_body_graphs()
        if attr_graphs:
            for attr_name, sub_graph in attr_graphs.items():
                copied_sub_graph = copy.deepcopy(sub_graph)
                graph_proto = copied_sub_graph.MakeGraph(
                    "graph for " + node.name + " " + attr_name
                )
                attr.append(helper.make_attribute(attr_name, graph_proto))
        attr.extend(node.attr_onnx.values())
        if attr:
            onnx_node.attribute.extend(attr)
        return onnx_node

    inputs = []
    outputs = []
    for inp, shape, dtype in zip(node.input, input_shapes, input_dtypes):
        inputs.append(util.MakeOnnxInputsOutputs(inp, dtype, shape))
    for output in node.output:
        outputs.append(util.MakeOnnxInputsOutputs(output, TensorProto.UNDEFINED, None))
    graph_proto = helper.make_graph(
        [BuildOnnxOp(node)], "infer-graph", inputs, outputs, initializer=initializers
    )
    imp = OperatorSetIdProto()
    imp.version = opset_version
    model_proto = helper.make_model(graph_proto, opset_imports=[imp])

    inferred_model = None
    try:
        inferred_model = shape_inference.infer_shapes(model_proto)
    except Exception:  # pylint: disable=broad-except
        logger.warning(
            "ONNX Failed to infer shapes and dtypes for [%s, type: %s]",
            node.name,
            node.type,
            exc_info=1,
        )
        return None, None

    shapes = {}
    dtypes = {}
    for output in inferred_model.graph.output:
        tensor_type = output.type.tensor_type
        if tensor_type.HasField("elem_type"):
            dtypes[output.name] = tensor_type.elem_type
        else:
            dtypes[output.name] = TensorProto.UNDEFINED
        # 0 in shapes of onnx means unknown which is -1 in our convertor
        if tensor_type.HasField("shape"):
            shapes[output.name] = [
                dim.dim_value if dim.dim_value != 0 else util.ONNX_UNKNOWN_DIMENSION
                for dim in tensor_type.shape.dim
            ]
        else:
            shapes[output.name] = None
    output_shapes = []
    output_dtypes = []
    for output in node.output:
        if output in shapes:
            output_shapes.append(shapes[output])
        else:
            output_shapes.append(None)
        if output in dtypes:
            output_dtypes.append(dtypes[output])
        else:
            output_dtypes.append(TensorProto.UNDEFINED)
    return output_shapes, output_dtypes