Esempio n. 1
0
 def test_set_outputs_updates_old_outputs(self):
     dummy = Node(op="dummy")
     self.tensor.outputs = [dummy]
     assert len(self.output_node.inputs) == 0
     assert dummy.inputs[0] == self.tensor
Esempio n. 2
0
 def test_i(self):
     x = Variable(name="x")
     y = Variable(name="y")
     node = Node(op="Add", name="Input", inputs=[x], outputs=[y])
     assert y.i() == x
Esempio n. 3
0
 def test_o(self):
     intermediate_tensor = Variable(name="intermediate")
     input_node = Node(op="Add", name="Input", inputs=[self.input_tensor], outputs=[intermediate_tensor])
     output_node = Node(op="Add", name="Out", inputs=[intermediate_tensor], outputs=[self.output_tensor])
     assert input_node.o() == output_node
Esempio n. 4
0
 def setup_method(self, field_names):
     self.tensors = [Variable(name="test_tensor_{:}".format(i), dtype=np.float32, shape=(1, 3, 224, 224)) for i in range(10)]
     self.node = Node(op="Dummy")
Esempio n. 5
0
 def test_can_copy_inputs_from_other_node(self):
     node = Node(op="Subtract")
     node.inputs = self.node.inputs
     assert node.inputs == self.node.inputs
     # Contents should be the same, but it should not just be a reference to the existing SynchronizedList
     assert node.inputs is not self.node.inputs
Esempio n. 6
0
 def test_can_copy_outputs_from_other_node(self):
     node = Node(op="Subtract")
     node.outputs = self.node.outputs
     assert node.outputs == self.node.outputs
     assert node.outputs is not self.node.outputs
Esempio n. 7
0
 def test_equals_num_inputs_mismatch(self):
     node = Node(op="Subtract", name="Test")
     assert not self.node == node
Esempio n. 8
0
    def test_equals_inputs_mismatch(self):
        tensor = Variable(name="other_tensor")
        assert not self.input_tensor == tensor

        node = Node(op="Add", name="Test", inputs=[tensor])
        assert not self.node == node
Esempio n. 9
0
 def setup_method(self):
     self.input_tensor = Variable(name="x")
     self.output_tensor = Variable(name="y")
     self.node = Node(op="Add", name="Test", inputs=[self.input_tensor], outputs=[self.output_tensor])
Esempio n. 10
0
 def test_equals_name_mismatch(self):
     node = Node(op="Add", name="OtherTest")
     assert not self.node == node
Esempio n. 11
0
 def setup_method(self):
     self.tensor = Constant(name="test_tensor", values=np.ones((1, 3, 5, 5), dtype=np.float64))
     self.input_node = Node(op="Add", outputs=[self.tensor]) # Doesn't make sense for Constants, but needed to make base tests happy.
     self.output_node = Node(op="Add", inputs=[self.tensor])
Esempio n. 12
0
 def setup_method(self):
     self.tensor = Variable(name="test_tensor", dtype=np.float32, shape=(1, 3, 224, 224))
     self.input_node = Node(op="Add", outputs=[self.tensor])
     self.output_node = Node(op="Add", inputs=[self.tensor])
Esempio n. 13
0
    def import_node(onnx_node: onnx.NodeProto,
                    tensor_map: "OrderedDict[str, Tensor]",
                    subgraph_tensor_map: "OrderedDict[str, Tensor]") -> Node:
        def attrs_to_dict(attrs):
            attr_dict = OrderedDict()
            for attr in attrs:

                def process_attr(attr_str: str):
                    processed = getattr(attr,
                                        ONNX_PYTHON_ATTR_MAPPING[attr_str])
                    if attr_str == "STRING":
                        processed = processed.decode()
                    elif attr_str == "TENSOR":
                        processed = OnnxImporter.import_tensor(processed)
                    elif attr_str == "GRAPH":
                        processed = OnnxImporter.import_graph(
                            processed,
                            misc.combine_dicts(tensor_map,
                                               subgraph_tensor_map))
                    elif attr_str == "FLOATS" or attr_str == "INTS":
                        processed = list(processed)
                    elif attr_str == "STRINGS":
                        processed = [p.decode() for p in processed]
                    return processed

                if attr.type in ATTR_TYPE_MAPPING:
                    attr_str = ATTR_TYPE_MAPPING[attr.type]
                    if attr_str in ONNX_PYTHON_ATTR_MAPPING:
                        attr_dict[attr.name] = process_attr(attr_str)
                    else:
                        G_LOGGER.warning(
                            "Attribute of type {:} is currently unsupported. Skipping attribute."
                            .format(attr_str))
                else:
                    G_LOGGER.warning(
                        "Attribute type: {:} was not recognized. Was the graph generated with a newer IR version than the installed `onnx` package? Skipping attribute."
                        .format(attr.type))
            return attr_dict

        # Optional inputs/outputs are represented by empty tensors. All other tensors should already have been populated during shape inference.
        def get_tensor(name: str, check_outer_graph=True):
            # Prioritize the subgraph even if check_outer_graph is set
            if name in subgraph_tensor_map:
                return subgraph_tensor_map[name]

            if check_outer_graph and name in tensor_map:
                return tensor_map[name]

            if not name:
                # Empty tensors are not tracked by the graph, as these represent optional inputs/outputs that have been omitted.
                G_LOGGER.verbose("Generating empty tensor")
                return Variable.empty()

            G_LOGGER.verbose(
                "Tensor: {:} was not generated during shape inference, or shape inference was not run on this model. Creating a new Tensor."
                .format(name))
            subgraph_tensor_map[name] = Variable(name)
            return subgraph_tensor_map[name]

        # Retrieve Tensors for node inputs/outputs. Only empty tensors should need to be newly added.
        def retrieve_node_inputs() -> List[Tensor]:
            inputs = []  # List[Tensor]
            for input_name in onnx_node.input:
                inputs.append(get_tensor(input_name))
            return inputs

        def retrieve_node_outputs() -> List[Tensor]:
            outputs = []  # List[Tensor]
            for output_name in onnx_node.output:
                # Node outputs cannot come from the outer graph, they must be created within the inner graph.
                outputs.append(get_tensor(output_name,
                                          check_outer_graph=False))
            return outputs

        return Node(op=onnx_node.op_type,
                    name=onnx_node.name,
                    attrs=attrs_to_dict(onnx_node.attribute),
                    inputs=retrieve_node_inputs(),
                    outputs=retrieve_node_outputs())
Esempio n. 14
0
    def layer(self, inputs=[], outputs=[], *args, **kwargs):
        """
        Creates a node, adds it to this graph, and optionally creates its input and output tensors.

        The input and output lists can include various different types:

            - ``Tensor``: Any Tensors provided will be used as-is in the inputs/outputs of the node created.
            - ``str``:
                    If a string is provided, this function will generate a new tensor using
                    the string to generate a name. It will append an index to the end of the provided string
                    to attempt to avoid duplicate tensor names, but since this doesn't guarantee that the name will
                    be unique, you should try to ensure that the string provided is as unique as possible.
            - ``numpy.ndarray``:
                    If a NumPy array is provided, this function will generate a Constant tensor
                    using the name prefix: "onnx_graphsurgeon_constant"
            - ``Union[List[Number], Tuple[Number]]``:
                    If a list or tuple of numbers (int or float) is provided, this function will
                    generate a Constant tensor using the name prefix: "onnx_graphsurgeon_lst_constant"

        Args:
            inputs (List[Union[Tensor, str, numpy.ndarray]]): The list of inputs
            outputs (List[Union[Tensor, str, numpy.ndarray]]): The list of outputs
            args/kwargs: These are passed directly to the constructor of Node

        Returns:
            List[Tensor]: The output tensors of the node
        """
        def process_io(io):
            new_io = []
            for elem in io:
                if isinstance(elem, Tensor):
                    new_io.append(elem)
                elif isinstance(elem, str):
                    tensor = Variable(name=self._generate_name(elem))
                    new_io.append(tensor)
                elif isinstance(elem, np.ndarray):
                    new_io.append(
                        Constant(name=self._generate_name(
                            "onnx_graphsurgeon_constant"),
                                 values=elem))
                elif isinstance(elem, list) or isinstance(elem, tuple):
                    dtype = np.float32 if any(
                        [isinstance(x, float) for x in elem]) else np.int64
                    arr = np.array(elem, dtype=dtype)
                    new_io.append(
                        Constant(name=self._generate_name(
                            "onnx_graphsurgeon_lst_constant"),
                                 values=arr))
                else:
                    G_LOGGER.critical(
                        "Unrecognized type passed to Graph.layer: {:}.\n"
                        "\tHint: Did you forget to unpack a list with `*`?\n"
                        "\tPlease use Tensors, strings, or NumPy arrays.".
                        format(elem))
            return new_io

        inputs = process_io(inputs)
        outputs = process_io(outputs)

        if "name" not in kwargs:
            kwargs["name"] = self._generate_name("onnx_graphsurgeon_node")

        node = Node(*args, **kwargs, inputs=inputs, outputs=outputs)
        self.nodes.append(node)
        return node.outputs
Esempio n. 15
0
 def test_can_copy_inputs_from_other_node(self):
     node = Node(op="Subtract")
     node.inputs = self.node.inputs
     assert node.inputs == self.node.inputs