Ejemplo n.º 1
0
    def add_output(self, output_ids):
        if not isinstance(output_ids, (tuple, list, set)):
            output_ids = [
                output_ids,
            ]

        inferred_model = shape_inference.infer_shapes(self.model)
        all_blobs_info = {
            value_info.name: value_info
            for value_info in inferred_model.graph.value_info
        }

        extra_outputs = []
        for output_id in output_ids:
            value_info = all_blobs_info.get(output_id, None)
            if value_info is None:
                print('WARNING! No blob with name {}'.format(output_id))
                extra_outputs.append(
                    helper.make_empty_tensor_value_info(output_id))
            else:
                extra_outputs.append(value_info)

        self.model.graph.output.extend(extra_outputs)
        self.output_names.extend(output_ids)
        self.session = onnxruntime.InferenceSession(
            self.model.SerializeToString(), self.sess_options)
Ejemplo n.º 2
0
 def _make_graph(self, seed_values, nodes, value_info):
     input_value_infos = []
     # introduce the starting values as the output of reshape,
     # so that the sizes are guaranteed to be unknown
     for seed_value in seed_values:
         if isinstance(seed_value, tuple):
             name = seed_value[0]
             value_info.append(make_tensor_value_info(*seed_value))
         else:
             name = seed_value
             value_info.append(make_empty_tensor_value_info(seed_value))
         input_value_infos.append(
             make_tensor_value_info('SEED_' + name, TensorProto.UNDEFINED,
                                    ()))
         input_value_infos.append(
             make_tensor_value_info('UNKNOWN_SHAPE_' + name,
                                    TensorProto.UNDEFINED, ()))
         nodes[:0] = [
             make_node("Reshape", ['SEED_' + name, 'UNKNOWN_SHAPE_' + name],
                       [name])
         ]
     return helper.make_graph(nodes,
                              "test",
                              input_value_infos, [],
                              value_info=value_info)
Ejemplo n.º 3
0
    def _make_graph(self,
                    seed_values,  # type: Sequence[Union[Text, Tuple[Text, TensorProto.DataType, Any]]]
                    nodes,  # type: List[NodeProto]
                    value_info,  # type: List[ValueInfoProto]
                    initializer=None  # type: Optional[Sequence[TensorProto]]
                    ):  # type: (...) -> GraphProto
        if initializer is None:
            initializer = []
        names_in_initializer = set(x.name for x in initializer)
        input_value_infos = []
        # If the starting values are not also initializers,
        # introduce the starting values as the output of reshape,
        # so that the sizes are guaranteed to be unknown
        for seed_value in seed_values:
            if isinstance(seed_value, tuple):
                seed_name = seed_value[0]
                seed_value_info = make_tensor_value_info(*seed_value)
            else:
                seed_name = seed_value
                seed_value_info = make_empty_tensor_value_info(seed_value)

            if seed_name in names_in_initializer:
                input_value_infos.append(seed_value_info)
            else:
                value_info.append(seed_value_info)
                input_value_infos.append(make_tensor_value_info('SEED_' + seed_name, TensorProto.UNDEFINED, ()))
                input_value_infos.append(make_tensor_value_info('UNKNOWN_SHAPE_' + seed_name, TensorProto.UNDEFINED, ()))
                nodes[:0] = [make_node("Reshape", ['SEED_' + seed_name, 'UNKNOWN_SHAPE_' + seed_name], [seed_name])]
        return helper.make_graph(nodes, "test", input_value_infos, [], initializer=initializer, value_info=value_info)
Ejemplo n.º 4
0
def extract_nodes_shapes_ort(model: ModelProto) -> Dict[str, List[List[int]]]:
    """
    Creates a modified model to expose intermediate outputs and runs an onnxruntime
    InferenceSession to obtain the output shape of each node.

    :param model: an onnx model
    :return: a list of NodeArg with their shape exposed
    """
    model_copy = make_model(model.graph)

    for node in model_copy.graph.node:
        intermediate_layer_value_info = make_empty_tensor_value_info(
            extract_node_id(node))
        model_copy.graph.output.append(intermediate_layer_value_info)

    sess_options = onnxruntime.SessionOptions()
    sess_options.log_severity_level = 3
    sess = onnxruntime.InferenceSession(model_copy.SerializeToString(),
                                        sess_options)

    output_shapes = {}
    for node in sess.get_outputs() + sess.get_inputs():
        output_shapes[node.name] = (node.shape if node.shape is not None
                                    and len(node.shape) > 0 else None)
    return output_shapes
 def _make_graph(
     self,
     seed_values,  # type: Sequence[Union[Text, Tuple[Text, TensorProto.DataType, Any]]]
     nodes,  # type: List[NodeProto]
     value_info  # type: List[ValueInfoProto]
 ):  # type: (...) -> GraphProto
     input_value_infos = []
     # introduce the starting values as the output of reshape,
     # so that the sizes are guaranteed to be unknown
     for seed_value in seed_values:
         if isinstance(seed_value, tuple):
             name = seed_value[0]
             value_info.append(make_tensor_value_info(*seed_value))
         else:
             name = seed_value
             value_info.append(make_empty_tensor_value_info(seed_value))
         input_value_infos.append(
             make_tensor_value_info('SEED_' + name, TensorProto.UNDEFINED,
                                    ()))
         input_value_infos.append(
             make_tensor_value_info('UNKNOWN_SHAPE_' + name,
                                    TensorProto.UNDEFINED, ()))
         nodes[:0] = [
             make_node("Reshape", ['SEED_' + name, 'UNKNOWN_SHAPE_' + name],
                       [name])
         ]
     return helper.make_graph(nodes,
                              "test",
                              input_value_infos, [],
                              value_info=value_info)
Ejemplo n.º 6
0
    def add_extra_output(self, target_node, output_name):
        extra_output = helper.make_empty_tensor_value_info(output_name)
        '''
            # NOTE
            # if we know the value type and shape, we can alse use this
	    def make_tensor_value_info(
		    name,  # type: Text
		    elem_type,  # type: int
		    shape,  # type: Optional[Sequence[Union[Text, int]]]
		    doc_string="",  # type: Text
		    shape_denotation=None,  # type: Optional[List[Text]]
	    ):
        '''
        target_output = target_node.output[0]
        identity_node = helper.make_node('Identity', inputs=[target_output], outputs=[output_name], name=output_name)
        self.model.graph.node.append(identity_node)
        self.model.graph.output.append(extra_output)
Ejemplo n.º 7
0
 def _make_graph(self,
                 seed_values,  # type: Sequence[Union[Text, Tuple[Text, TensorProto.DataType, Any]]]
                 nodes,  # type: List[NodeProto]
                 value_info  # type: List[ValueInfoProto]
                 ):  # type: (...) -> GraphProto
     input_value_infos = []
     # introduce the starting values as the output of reshape,
     # so that the sizes are guaranteed to be unknown
     for seed_value in seed_values:
         if isinstance(seed_value, tuple):
             name = seed_value[0]
             value_info.append(make_tensor_value_info(*seed_value))
         else:
             name = seed_value
             value_info.append(make_empty_tensor_value_info(seed_value))
         input_value_infos.append(make_tensor_value_info('SEED_' + name, TensorProto.UNDEFINED, ()))
         input_value_infos.append(make_tensor_value_info('UNKNOWN_SHAPE_' + name, TensorProto.UNDEFINED, ()))
         nodes[:0] = [make_node("Reshape", ['SEED_' + name, 'UNKNOWN_SHAPE_' + name], [name])]
     return helper.make_graph(nodes, "test", input_value_infos, [], value_info=value_info)
Ejemplo n.º 8
0
def generate_onnx_node(node, graph):
    """
    Convert an MDF node into an ONNX node.
    Takes an MDF node, MDF graph and returns the ONNX node, any inputs to the node coming from  outside the graph,
    any outputs from the node going outside the graph, and an initializer for constants
    """

    onnx_graph_inputs = []
    onnx_graph_outputs = []
    onnx_initializer = []

    sender_port_name = {}  # Names of ports that send values to this node

    # Go over all parameters
    # Assumption: there may be multiple parameters but there will only be one function.
    # If there are multiple functions, the code should change so that each function should become its own node.
    for param in node.parameters:
        # If this is a constant
        if param.value:
            # Create a constant onnx node
            name = node.id + "_" + param.id
            constant = helper.make_tensor(name,
                                          data_type=TensorProto.FLOAT,
                                          dims=[],
                                          vals=[param.value])
            onnx_initializer.append(constant)
            # The following will be the sender port from the constant onnx node for this parameter
            sender_port_name[param.id] = name
        elif param.function:
            # This is a function and will be part of an onnx node corresponding to this MDF node
            function_name = param.function

            onnx_function_prefix = "onnx::"
            pattern = re.compile(onnx_function_prefix)
            if re.match(pattern, function_name):
                # This is an onnx function
                function_name = function_name[len(onnx_function_prefix):]
                # Get the arguments that this onnx function expects
                schema = get_schema(function_name)
                # The MDF description would have specified all the expected arguments of ths function
                function_input_names = [
                    param.args[arg.name] for arg in schema.inputs
                ]
            else:
                # Error
                raise "Cannot generate onnx function for the unknown function: {} specfied in the MDF node {}".format(
                    function_name,
                    node.id,
                )

    # Find the inputs to the new ONNX node. These are the senders of the in edges to this node
    node_in_edges = [edge for edge in graph.edges if edge.receiver == node.id]
    for in_edge in node_in_edges:
        sender_port_name[in_edge.receiver_port] = (in_edge.sender + "_" +
                                                   in_edge.sender_port)

    onnx_node_input_names = [
        sender_port_name[function_input_name]
        if function_input_name in sender_port_name else function_input_name
        for function_input_name in function_input_names
    ]

    # No parameters. Constants became their own nodes earlier
    onnx_node_parameters = {}

    # Find the outputs of the new ONNX node. These are the output ports of the node
    onnx_node_output_names = [
        node.id + "_" + port.id for port in node.output_ports
    ]

    # print(node.id, node_in_edges,node_out_edges)
    # print(function_name, onnx_node_input_names, onnx_node_output_names)

    # Create an ONNX node
    onnx_node = helper.make_node(
        function_name,
        onnx_node_input_names,
        onnx_node_output_names,
        name=node.id,
        **onnx_node_parameters,
    )

    # Check if any of the node's inputs are the inputs to the ONNX graph itself.
    # These are the node's inputs that don't have an incoming edge.
    input_ports_with_edge = [
        in_edge.receiver_port for in_edge in node_in_edges
    ]
    input_ports_without_edge = [
        input_port for input_port in node.input_ports
        if input_port.id not in input_ports_with_edge
    ]
    if input_ports_without_edge:
        # Create ONNX graph input ports
        for input_port in input_ports_without_edge:
            shape = literal_eval(input_port.shape)
            value_info = helper.make_tensor_value_info(input_port.id,
                                                       TensorProto.FLOAT,
                                                       shape)
            onnx_graph_inputs.append(value_info)

    # Check if any of the node's outputs are the outputs of the ONNX graph.
    # These are the node's outputs that don't have an outgoing edge
    node_out_edges = [edge for edge in graph.edges if edge.sender == node.id]

    output_ports_with_edge = [
        out_edge.sender_port for out_edge in node_out_edges
    ]
    output_ports_without_edge = [
        output_port for output_port in node.output_ports
        if output_port.id not in output_ports_with_edge
    ]
    if output_ports_without_edge:
        # Create ONNX graph output ports
        for output_port in output_ports_without_edge:
            # No need to create output shapes because they are inferred by ONNX
            value_info = helper.make_empty_tensor_value_info(node.id + "_" +
                                                             output_port.id)
            onnx_graph_outputs.append(value_info)
    # print("Graph ip op", input_ports_without_edge, output_ports_without_edge)

    return onnx_node, onnx_graph_inputs, onnx_graph_outputs, onnx_initializer