Exemple #1
0
 def MakeConst(self, name, np_val, skip_conversion=False, raw=True):
     """Make a new constant in the graph.
     Args:
         name: const node name, must be unique.
         np_val: value of type numpy ndarray.
         skip_conversion: bool, indicate whether this created node would be mapped during conversion.
         raw: whether to store data at field of raw_data or the specific field according to its dtype
     """
     if raw:
         onnx_tensor = util.TensorProtoFromNumpy(np_val, name)
     else:
         onnx_tensor = helper.make_tensor(
             name,
             util.Numpy2OnnxDtype(np_val.dtype),
             np_val.shape,
             np_val,
             raw=False,
         )
     dtype = onnx_tensor.data_type
     node = self.MakeNode(
         "Const",
         [],
         outputs=[name],
         name=name,
         attr={"value": onnx_tensor},
         skip_conversion=skip_conversion,
         dtypes=[dtype],
         infer_shape_dtype=False,
     )
     self.set_shape(name, np_val.shape)
     self.set_dtype(name, util.Numpy2OnnxDtype(np_val.dtype))
     return node
Exemple #2
0
 def set_tensor_value(self, new_val):
     """Set new value for existing onnx tensor.
     Args:
         new_val: value of type numpy ndarray
     """
     if not self.is_const():
         raise ValueError("set tensor value: {} must be Const".format(
             self.name))
     t = self.get_attr("value")
     if not t:
         raise ValueError("set tensor value: {} is None".format(self.name))
     t = helper.get_attribute_value(t)
     onnx_tensor = util.TensorProtoFromNumpy(new_val, t.name)
     del t
     self.set_attr("value", onnx_tensor)
     # track shapes in _output_shapes
     self._GraphCheck()
     self.graph.set_shape(onnx_tensor.name, onnx_tensor.dims)
Exemple #3
0
 def set_tensor_value(self, new_val):
     """Set new value for existing onnx tensor.
     Args:
         new_val: value of type numpy ndarray
     """
     if not self.is_const():
         raise ValueError("set tensor value: {} must be Const".format(self.name))
     t = self.attrs.get("value")
     if t is not None:
         t = helper.get_attribute_value(t)
         del t
     if self.op_type == "Const":
         tensor_name = t.name
     else:
         tensor_name = self.output_tensor_names[0]
     onnx_tensor = util.TensorProtoFromNumpy(new_val, tensor_name)
     self.attrs["value"] = onnx_tensor
     # track shapes in _output_shapes
     self._GraphCheck()
     self.graph.set_shape(onnx_tensor.name, onnx_tensor.dims)
Exemple #4
0
    def UpdateNodeShapeDtype(self, node, override=False):
        """Try the best to infer shapes and dtypes for outputs of the node,
        by default, we respect oneflow shapes and dtypes.
        """
        if node.is_const() or node.is_graph_input():
            return
        # NOTE: only support onnx node for now
        if not util.is_onnx_domain(node.domain):
            return

        logger.debug("Infer shape and dtype for [%s]", node.name)
        # NOTE: shape inference for some ops need the input values of the op, e.g., Reshape
        # op needs the "Shape" value to infer output shape.
        initializers = []
        for i, inp in enumerate(node.inputs):
            if inp is None:
                if not self.is_empty_input(node.input[i]):
                    if logger.isEnabledFor(logging.INFO):
                        logger.warning(
                            "[%s] infer a inexistent node: [%s], please check the code",
                            node.name,
                            node.input[i],
                        )
                continue
            if inp.is_const():
                tensor = util.TensorProtoFromNumpy(
                    inp.get_tensor_value(as_list=False), name=inp.output[0])
                initializers.append(tensor)

        input_shapes = [self.get_shape(i) for i in node.input]
        input_dtypes = [self.get_dtype(i) for i in node.input]

        shapes, dtypes = InferOnnxShapeDtype(node, self._opset, input_shapes,
                                             input_dtypes, initializers)
        if not shapes or not dtypes:
            return

        for output, shape, dtype in zip(node.output, shapes, dtypes):
            if dtype == TensorProto.UNDEFINED:
                logger.debug(
                    "Inferred dtype for [%s, type: %s] is UNDEFINED, SKIP",
                    node.name,
                    node.type,
                )
            else:
                existing_dtype = self.get_dtype(output)
                if existing_dtype is not None and existing_dtype != dtype:
                    if override:
                        logger.warning(
                            "Override dtype of %s from %s to %s",
                            output,
                            existing_dtype,
                            dtype,
                        )
                    else:
                        dtype = existing_dtype
                self.set_dtype(output, dtype)
                logger.debug("Set dtype of [%s] to %s", output, dtype)

            if shape is None:
                logger.debug(
                    "Inferred shape for [%s, type: %s] is None, SKIP",
                    node.name,
                    node.type,
                )
            else:
                existing_shape = self.get_shape(output)
                if existing_shape is not None and not util.AreShapesEqual(
                        existing_shape, shape):
                    if override:
                        logger.warning(
                            "Override shape of %s from %s to %s",
                            output,
                            existing_shape,
                            shape,
                        )
                    else:
                        shape = existing_shape
                self.set_shape(output, shape)
                logger.debug("Set shape of [%s] to %s", output, shape)
Exemple #5
0
    def MakeGraph(self,
                  doc,
                  onnx_filename,
                  external_data=False,
                  graph_name="oneflow.python.onnx"):
        """
        Create GraphProto for onnx from internal graph.
        Args:
            optimize: optimize graph via onnx
            doc: text for doc string of the graph
        """
        self.DeleteUnusedNodes(self.outputs)
        self.TopologicalSort(self.get_nodes())
        self.UpdateProto()

        ops = []
        order_non_sensitive_placeholders = []
        order_sensitive_placeholders = self._order_sensitive_inputs
        const_ops = []
        output_ops = []
        for op in self.get_nodes():
            if op.is_const():
                const_ops.append(op)
                continue
            if op.is_graph_input():
                if op not in self._order_sensitive_inputs:
                    order_non_sensitive_placeholders.append(op)
                continue
            ops.append(op)
        placeholder_ops = (order_sensitive_placeholders +
                           order_non_sensitive_placeholders)

        initializers = []
        # create initializers for constant nodes
        for op in const_ops:
            tensor_name = op.output[0]
            tensor = util.TensorProtoFromNumpy(
                op.get_tensor_value(as_list=False),
                tensor_name,
                external_data=external_data,
                export_path=onnx_filename,
            )
            initializers.append(tensor)

        # create input_tensor_values
        input_ids = [op.output[0] for op in placeholder_ops]
        # onnx with IR version below 4 requires initializer should be in inputs.
        # here we check opset version rather than IR version for the reason:
        # https://github.com/onnx/tensorflow-onnx/pull/557
        # opset 9 come with IR 4.
        if self.opset < 9:
            input_ids += [op.output[0] for op in const_ops]

        input_tensor_values = self.MakeOnnxGraphIO(input_ids)

        # create output_tensor_values
        output_tensor_values = self.MakeOnnxGraphIO(self.outputs)

        # create graph proto
        graph = helper.make_graph(
            [op.op for op in ops],
            graph_name,
            input_tensor_values,
            output_tensor_values,
            initializer=initializers,
            doc_string=doc,
        )

        return graph