Beispiel #1
0
    def convert(self, inputs: List["chainer.Variable"], outputs: List["chainer.Variable"]) -> Graph:
        """convert(inputs, outputs)

        Convert chainer computational graph into WebDNN IR.

        Args:
            inputs(list of chainer.Variable): input chainer variables
            outputs(list of chainer.Variable): output chainer variables

        .. admonition:: example

            Convert pre-trained ResNet model

            .. code::

                model = chainer.links.model.vision.resnet.ResNet50Layers()

                # Forward propagation with dummy input to build computational graph
                x = chainer.Variable(np.empty((1, 3, 224, 224), dtype=np.float32))
                y = model(x, layers=["fc6"])["fc6"]

                graph = ChainerConverter().convert([x], [y])

        Returns:
            (:class:`~webdnn.Graph`): WebDNN Graph
        """

        for v in inputs:
            if isinstance(v, PlaceholderVariable):
                n_var = Variable(v.actual_shape, Order([None] * v.ndim))
                self.set_variable(to_variable_node(v), n_var)

        inputs = [to_variable_node(v) for v in inputs]
        outputs = [to_variable_node(v) for v in outputs]

        # Convert parameters into constant variable
        input_set = set(inputs)
        for node in chainer.computational_graph.build_computational_graph(outputs).nodes:
            if isinstance(node, T_VARIABLE) and not self.has_variable(node) and node.creator is None:
                # If "c_var.creator" is None, it's input variable or parameters.

                # NOTE(Kiikurage):
                # In chainer v1, "Variable" doesn't support "__eq__" method, so "list.__contains__" cannot be used for list of variables.
                # However, "Variable.__hash__" is implemented and "set.__contains__" is available.
                self._convert_var(node, constant=node not in input_set)

        # Convert each Chainer function into WebDNN operators
        for c_opr in _listup_functions(inputs, outputs):
            self._convert_operator(c_opr)

        # Build graph
        graph = Graph([self.get_variable(c_var) for c_var in inputs], [self.get_variable(c_var) for c_var in outputs])

        for v in graph.inputs:
            v.attributes.add(Input(v))

        for v in graph.outputs:
            v.attributes.add(Output(v))

        return graph
Beispiel #2
0
    def convert(self,
                model: "keras.models.Model",
                input_orders: List[Order] = None) -> Graph:
        """convert(model, input_orders=None)

        Convert kerasmodel into WebDNN IR Graph.

        Args:
            model (`keras.models.Model`): keras model
            input_orders (list of :class:`~webdnn.graph.order.Order`): Order of input tensors. If `None` is passed, default order
                (`OrderNC` for 2D, `OrderNTC` for 3D, `OrderNHWC` for 4D) is used. If `input_orders=None`, default orders
                are assigned to all input tensors. If `input_orders[0]=None`, only first input tensor are converted with
                the default order.

        .. admonition:: Example

            .. code::

                model = keras.models.load_model("pre_trained_model.h5")
                graph = KerasConverter(batch_size=1).convert(model)

        Returns:
            (:class:`~webdnn.graph.graph.Graph`): WebDNN IR Graph
        """
        if not model.built:
            model.build(None)

        self._convert_tensors(model.inputs, input_orders)

        for depth in sorted(list(model.nodes_by_depth.keys()), reverse=True):
            for node in model.nodes_by_depth[depth]:
                self._convert_operator(node.outbound_layer)

                # Check that all output tensors from current layer are converted into WebDNN Variable
                for tensor in node.output_tensors:
                    if not self.has_variable(tensor):
                        raise AssertionError(
                            f"[KerasConverter] {node.outbound_layer} outputs {tensor}, but it was not converted into WebDNN Variable by "
                            f"{self._handler_map[self.__class__.__name__][self.serialize_operator_type(node.outbound_layer)]}"
                        )

        self._input_index_dict[model] -= 1
        self._output_index_dict[model] -= 1
        self._input_tensor_cache = None
        self._output_tensor_cache = None

        graph = Graph(
            [self.get_variable(t) for t in self.get_input_tensor(model)],
            [self.get_variable(t) for t in self.get_output_tensor(model)])

        self._input_tensor_cache = None
        self._output_tensor_cache = None

        for v in graph.inputs:
            v.attributes.add(Input(v))

        for v in graph.outputs:
            v.attributes.add(Output(v))

        return graph
Beispiel #3
0
    def convert(self, inputs: List["chainer.Variable"], outputs: List["chainer.Variable"]) -> Graph:
        """convert(inputs, outputs)

        Convert chainer computational graph into WebDNN IR.

        Args:
            inputs(list of chainer.Variable): input chainer variables
            outputs(list of chainer.Variable): output chainer variables

        .. admonition:: Example

            .. code::

                model = chainer.links.model.vision.resnet.ResNet50Layers()

                # Forward propagation with dummy input to build computational graph
                x = chainer.Variable(np.empty((1, 3, 224, 224), dtype=np.float32))
                y = model(x, layers=["fc6"])["fc6"]

                graph = ChainerConverter().convert_from_inout_vars([x], [y])

        Returns:
            (:class:`~webdnn.Graph`): WebDNN Graph
        """
        chainer_graph = chainer.computational_graph.build_computational_graph(outputs)

        # In chainer v2, variables are represented as Variable and VariableNode object, and
        # graph information such as edge connection is contained in variable node.
        # Therefore all chainer variable must be normalized into variable node.
        c_vars = list(map(_to_variable_node,
                          filter(lambda v: isinstance(v, VariableNode), chainer_graph.nodes)))  # type: List[VariableNode]
        inputs = [_to_variable_node(v) for v in inputs]
        outputs = [_to_variable_node(v) for v in outputs]
        input_set = set(inputs)

        for c_var in c_vars:
            if c_var.creator is None:
                # If :code:`creator is None` and it's not input variable, it's parameter.

                # NOTE(Kiikurage):
                # In chainer v1.x and v2.x, `Variable` doesn't support `__eq__` method and `list.__contains__` cannot be used for
                # Variable list. However, `Variable.__hash__` is implemented and `set.__contains__` is available.
                self._convert_var(c_var, constant=c_var not in input_set)

        for c_opr in _listup_functions(inputs, outputs):
            self._convert_operator(c_opr)

        graph = Graph([self.get_variable(c_var) for c_var in inputs],
                      [self.get_variable(c_var) for c_var in outputs])

        for v in graph.inputs:
            v.attributes.add(Input(v))

        for v in graph.outputs:
            v.attributes.add(Output(v))

        return graph
Beispiel #4
0
    def convert(self, model: IModelProto) -> Graph:
        """convert(model)

        Convert ONNX computational graph into WebDNN IR.

        Args:
            model: Proto data of ONNX model

        .. admonition:: example

            Convert model stored as ONNX format in "model.proto".

            .. code::

                import onnx
                from webdnn.frontend.onnx import ONNXConverter

                # import model in onnx
                model = onnx.load("model.proto")

                # convert
                graph = ONNXConverter().convert(model)

        Returns:
            (:class:`~webdnn.Graph`): WebDNN Graph
        """
        onnx_graph = model.graph  # type: IGraphProto
        self.opset_version = model.opset_import[0].version

        # Convert constant parameters
        for proto in onnx_graph.initializer:
            self.set_variable(proto.name, _convert_tensor_proto(proto))

        # Convert input variables
        # In ONNX, both input variable and parameters are included in `graph.input`.
        inputs = []
        for proto in filter(lambda proto: not self.has_variable(proto.name),
                            onnx_graph.input):
            v = _convert_value_info_proto(proto)
            self.set_variable(proto.name, v)
            inputs.append(v)

        # Convert operators
        for onnx_op in _listup_functions(onnx_graph):
            self._convert_operator(onnx_op)

        webdnn_graph = Graph(
            inputs,
            [self.get_variable(proto.name) for proto in onnx_graph.output])

        for v in webdnn_graph.inputs:
            v.attributes.add(Input())

        for v in webdnn_graph.outputs:
            v.attributes.add(Output())

        return webdnn_graph
Beispiel #5
0
    def convert(self, chainer_computational_graph: chainer.computational_graph.
                ComputationalGraph, input_c_vars: List[chainer.Variable],
                output_c_vars: List[chainer.Variable]) -> Graph:
        # In chainer v2, variables are represented as Variable and VariableNode object, and
        # graph information such as edge connection is contained in variable node.
        # Therefore all chainer variable must be normalized into variable node.
        input_c_vars = [_to_variable_node(v) for v in input_c_vars]
        output_c_vars = [_to_variable_node(v) for v in output_c_vars]

        # Append InputVariable attribute to input variables
        input_n_vars = []
        for c_var in input_c_vars:
            n_var = self._convert_var(c_var)
            n_var.attributes.add(Input(n_var))
            input_n_vars.append(n_var)

        self._convert_weight_vars(chainer_computational_graph)

        pending_c_oprs = [
            c_opr for c_opr in chainer_computational_graph.nodes
            if isinstance(c_opr, chainer.Function)
        ]

        while len(pending_c_oprs) > 0:
            for c_opr in pending_c_oprs:
                if all(((self.has_variable(_to_variable_node(c_var)))
                        for c_var in c_opr.inputs)):
                    # All input variables of the `cfunc` are converted, so this `c_opr` can be converted.
                    self.convert_operator(c_opr)
                    pending_c_oprs.remove(c_opr)
                    break  # for c_opr in pending_functions
            else:
                console.debug(pending_c_oprs)
                raise ValueError("Inputs to functions cannot be resolved.")

        # Append OutputVariable attribute to output variables
        output_n_vars = []
        for c_var in output_c_vars:
            if not self.has_variable(c_var):
                raise ValueError("Output variable is not generated by graph.")
            n_var = self.get_variable(c_var)
            n_var.attributes.add(Output)
            output_n_vars.append(n_var)

        # Convert variable order into typical one in Chainer
        self._transpose_vars()

        return Graph(input_n_vars, output_n_vars)
Beispiel #6
0
    def _convert_fallback(self, model: "keras.models.Model") -> Graph:
        if not model.built:
            model.build(None)

        self._convert_tensors(model.inputs)
        for tensor in model.inputs:
            v = self.get_variable(tensor)
            if not Placeholder.check_resolved(v.shape[0]):
                v.shape[0].value = self._batch_size

        for depth in sorted(list(model._nodes_by_depth.keys()), reverse=True):
            for node in model._nodes_by_depth[depth]:
                self._convert_operator(node.outbound_layer)

                # Check that all output tensors from current layer are converted into WebDNN Variable
                for tensor in node.output_tensors:
                    if not self.has_variable(tensor):
                        raise AssertionError(
                            f"[KerasConverter] {node.outbound_layer} outputs {tensor}, but it was not converted into WebDNN Variable by "
                            f"{self._handler_map[self.__class__.__name__][self.serialize_operator_type(node.outbound_layer)]}"
                        )

        self._input_index_dict[model] -= 1
        self._output_index_dict[model] -= 1
        self._input_tensor_cache = None
        self._output_tensor_cache = None

        graph = Graph(
            [self.get_variable(t) for t in self.get_input_tensor(model)],
            [self.get_variable(t) for t in self.get_output_tensor(model)])

        self._input_tensor_cache = None
        self._output_tensor_cache = None

        for v in graph.inputs:
            v.attributes.add(Input())

        for v in graph.outputs:
            v.attributes.add(Output())

        return graph
def test_input():
    """
    test_input

    v[Input] -+
              +-{Add}- y
           c -+
    """

    c = ConstantVariable(np.random.rand(2, 3, 4, 5), OrderNCHW)
    v = Variable(c.shape, c.order)
    v.attributes.add(Input())

    y = v + c
    op = y.output_from

    assert not op.has_attribute(Inplace)

    UpdateInplaceAttribute().optimize(Graph([v], [y]))

    assert not op.has_attribute(Inplace)
Beispiel #8
0
    def convert(self, chainer_computational_graph:
                "chainer.computational_graph.ComputationalGraph",
                input_c_vars: List["chainer.Variable"],
                output_c_vars: List["chainer.Variable"]) -> Graph:
        """convert(chainer_computational_graph, input_c_vars, output_c_vars)

        Convert chainer computational graph into WebDNN IR.

        Instead of using this method directly, you should use
        :func:`convert_from_inout_vars<webdnn.frontend.chainer.ChainerConverter.convert_from_inout_vars>`.

        Args:
            chainer_computational_graph(chainer.computational_graph.ComputationalGraph): chainer computational graph
            input_c_vars(list of chainer.Variable): input chainer variables
            output_c_vars(list of chainer.Variable): output chainer variables

        Returns:
            (:class:`~webdnn.Graph`): WebDNN Graph
        """
        # In chainer v2, variables are represented as Variable and VariableNode object, and
        # graph information such as edge connection is contained in variable node.
        # Therefore all chainer variable must be normalized into variable node.
        input_c_vars = [_to_variable_node(v) for v in input_c_vars]
        output_c_vars = [_to_variable_node(v) for v in output_c_vars]

        # Append InputVariable attribute to input variables
        input_n_vars = []
        for c_var in input_c_vars:
            n_var = self._convert_var(c_var)
            n_var.attributes.add(Input(n_var))
            input_n_vars.append(n_var)

        self._convert_weight_vars(chainer_computational_graph)

        pending_c_oprs = [
            c_opr for c_opr in chainer_computational_graph.nodes
            if isinstance(c_opr, chainer.Function)
        ]

        while len(pending_c_oprs) > 0:
            for c_opr in pending_c_oprs:
                if all(((self.has_variable(_to_variable_node(c_var)))
                        for c_var in c_opr.inputs)):
                    # All input variables of the `cfunc` are converted, so this `c_opr` can be converted.
                    self._convert_operator(c_opr)
                    pending_c_oprs.remove(c_opr)
                    break  # for c_opr in pending_functions
            else:
                console.debug(pending_c_oprs)
                raise ValueError("Inputs to functions cannot be resolved.")

        # Append OutputVariable attribute to output variables
        output_n_vars = []
        for c_var in output_c_vars:
            if not self.has_variable(c_var):
                raise ValueError("Output variable is not generated by graph.")
            n_var = self.get_variable(c_var)
            n_var.attributes.add(Output)
            output_n_vars.append(n_var)

        graph = Graph(input_n_vars, output_n_vars)
        # Convert variable order into typical one in Chainer
        self._transpose_vars(graph)

        return graph
Beispiel #9
0
    def convert(
        self,
        inputs: List["tf.Tensor"],
        outputs: List["tf.Tensor"],
        order_hints: Optional[Dict[Union["tf.Tensor", "tf.Variable"],
                                   Order]] = None
    ) -> Graph:
        """convert(model, input_orders=None)

        Args:
            inputs (list of `tf.Tensor`): tensorflow input tensors
            outputs (list of `tf.Tensor`): tensorflow output tensors
            order_hints: Order annotations which helps webdnn's optimizer.

        .. admonition:: Example

            .. code::

                # y = x @ W + b
                x = tf.placeholder(tf.float32, [None, 784])
                W = tf.Variable(tf.zeros([784, 10]))
                b = tf.Variable(tf.zeros([10]))
                y = tf.nn.softmax(tf.matmul(x, W) + b)

                webdnn_graph = TensorFlowConverter().convert([x], [y])

        Returns:
            (:class:`~webdnn.graph.graph.Graph`): WebDNN IR Graph
        """

        for tensor in inputs:
            shape = [
                Placeholder() if dim.value is None else dim.value
                for dim in tensor.shape.dims
            ]
            if isinstance(shape[0], Placeholder):
                shape[0] = self._batch_size
            self.set_variable(tensor,
                              Variable(shape, Order([None] * len(shape))))

        ops = _listup_operations(inputs, outputs)
        for op in ops:
            self._convert_operator(op)

        if order_hints:
            for tensor, order in order_hints.items():
                if isinstance(tensor, tf.Variable):
                    tensor = tensor.value()

                variable = self.get_variable(tensor)
                for axis1, axis2 in zip(variable.order.axes, order.axes):
                    axis1.unify(axis2)

        # Remove redundant ReinterpretAxis operators
        graph = Graph([self.get_variable(tensor) for tensor in inputs],
                      [self.get_variable(tensor) for tensor in outputs])
        graph, _ = TensorFlowFrontendOptimizeRule().optimize(graph)

        for v in graph.inputs:
            v.attributes.add(Input(v))

        for v in graph.outputs:
            v.attributes.add(Output(v))

        return graph
Beispiel #10
0
 def _convert_input_vars(self, input_vars: List[chainer.Variable]):
     for cvar in input_vars:
         nvar = self._convert_var(cvar)
         nvar.attributes.add(Input(nvar))
Beispiel #11
0
    def convert(
        self,
        inputs: Sequence["tf.Tensor"],
        outputs: Sequence["tf.Tensor"],
        order_hints: Optional[Dict[Union["tf.Tensor", "tf.Variable"],
                                   Order]] = None
    ) -> Graph:
        """convert(model, input_orders=None)

        Args:
            inputs (list of `tf.Tensor`): tensorflow input tensors
            outputs (list of `tf.Tensor`): tensorflow output tensors
            order_hints: Order annotations which helps webdnn's optimizer.

        .. admonition:: example

            Convert TensorFlow model.

            .. code::

                import tensorflow as tf
                from webdnn.frontend.tensorflow import TensorFlowConverter

                # y = x @ W + b
                x = tf.placeholder(tf.float32, [None, 784])
                W = tf.Variable(tf.zeros([784, 10]))
                b = tf.Variable(tf.zeros([10]))
                y = tf.nn.softmax(tf.matmul(x, W) + b)

                graph = TensorFlowConverter().convert([x], [y])

        Returns:
            (:class:`~webdnn.graph.graph.Graph`): WebDNN IR Graph
        """

        for tensor in inputs:
            shape = [
                Placeholder() if dim.value is None else dim.value
                for dim in tensor.shape.dims
            ]
            if isinstance(shape[0], Placeholder):
                shape[0] = self._batch_size
            self.set_variable(tensor,
                              Variable(shape, Order([None] * len(shape))))

        ops = _listup_operations(inputs, outputs)
        for op in ops:
            self._convert_operator(op)
            sub_graph = Graph([
                self.get_variable(tf_tensor)
                for tf_tensor in op.inputs if self.has_variable(tf_tensor)
            ], [
                self.get_variable(tf_tensor)
                for tf_tensor in op.outputs if self.has_variable(tf_tensor)
            ])
            old_outputs = list(sub_graph.outputs)

            # Constant folding improves possibility of conversion, because many tensors are used not only for main input variable but also
            # for other parameter like indices of operation, and WebDNN doesn't support dynamic indices operation.
            OptimizeRuleGroup([ConstantFolding()],
                              repeat=True).optimize(sub_graph)

            # After constant folding, it need to replace old variable with new constant variable
            for tf_tensor in op.outputs:
                if not self.has_variable(tf_tensor):
                    # This tensor is not converted (ignored)
                    continue

                old_v = self.get_variable(tf_tensor)
                new_v = sub_graph.outputs[old_outputs.index(old_v)]
                if old_v != new_v:
                    self.set_variable(tf_tensor, new_v, overwrite=True)

        if order_hints:
            for tensor, order in order_hints.items():
                if isinstance(tensor, tf.Variable):
                    tensor = tensor.value()

                variable = self.get_variable(tensor)
                for axis1, axis2 in zip(variable.order.axes, order.axes):
                    axis1.unify(axis2)

        # Remove redundant ReinterpretAxis operators
        graph = Graph([self.get_variable(tensor) for tensor in inputs],
                      [self.get_variable(tensor) for tensor in outputs])
        graph, _ = TensorFlowFrontendOptimizeRule().optimize(graph)

        for v in graph.inputs:
            v.attributes.add(Input(v))

        for v in graph.outputs:
            v.attributes.add(Output(v))

        return graph