Example #1
0
    def testTensorNames(self):
        fdef = self._build_function_def()
        g, tensor_name_map = function_def_to_graph.function_def_to_graph_def(
            fdef)

        # Verify that inputs of body nodes are correctly renamed.
        # foo_1
        self.assertSequenceEqual(g.node[3].input, ["x:0", "y:0", "z:0"])
        # foo_2
        self.assertSequenceEqual(g.node[5].input,
                                 ["foo_1:0", "foo_1:1", "list_output:1"])

        # Verify that the `tensor_name_map` has the correct mapping.
        self.assertDictEqual(
            tensor_name_map, {
                "x": "x:0",
                "^x": "^x",
                "y": "y:0",
                "^y": "^y",
                "z": "z:0",
                "^z": "^z",
                "foo_1:d:0": "foo_1:0",
                "foo_1:e:0": "foo_1:1",
                "^foo_1": "^foo_1",
                "list_output:a:0": "list_output:0",
                "list_output:a:1": "list_output:1",
                "^list_output": "^list_output",
                "foo_2:d:0": "foo_2:0",
                "foo_2:e:0": "foo_2:1",
                "^foo_2": "^foo_2",
            })
  def testTensorNames(self):
    fdef = self._build_function_def()
    g, tensor_name_map = function_def_to_graph.function_def_to_graph_def(fdef)

    # Verify that inputs of body nodes are correctly renamed.
    # foo_1
    self.assertSequenceEqual(g.node[3].input, ["x:0", "y:0", "z:0"])
    # foo_2
    self.assertSequenceEqual(g.node[5].input,
                             ["foo_1:0", "foo_1:1", "list_output:1"])

    # Verify that the `tensor_name_map` has the correct mapping.
    self.assertDictEqual(
        tensor_name_map, {
            "x": "x:0",
            "^x": "^x",
            "y": "y:0",
            "^y": "^y",
            "z": "z:0",
            "^z": "^z",
            "foo_1:d:0": "foo_1:0",
            "foo_1:e:0": "foo_1:1",
            "^foo_1": "^foo_1",
            "list_output:a:0": "list_output:0",
            "list_output:a:1": "list_output:1",
            "^list_output": "^list_output",
            "foo_2:d:0": "foo_2:0",
            "foo_2:e:0": "foo_2:1",
            "^foo_2": "^foo_2",
        })
 def testShapes(self):
   fdef = self._build_function_def()
   g, _ = function_def_to_graph.function_def_to_graph_def(
       fdef,
       input_shapes=[tensor_shape.scalar(),
                     tensor_shape.vector(5), None])
   self.assertEqual("shape" in g.node[0].attr, True)
   self.assertSequenceEqual(
       tensor_shape.TensorShape(g.node[0].attr["shape"].shape).as_list(), [])
   self.assertEqual(g.node[0].attr["shape"].shape.unknown_rank, False)
   self.assertEqual("shape" in g.node[1].attr, True)
   self.assertSequenceEqual(
       tensor_shape.TensorShape(g.node[1].attr["shape"].shape).as_list(), [5])
   self.assertEqual(g.node[0].attr["shape"].shape.unknown_rank, False)
   self.assertFalse("shape" in g.node[2].attr)
Example #4
0
    def testAttributesForArgDef(self):
        @function.defun
        def fn(x):
            return x

        inp = constant_op.constant(1.0)
        fdef = fn.get_concrete_function(inp).function_def
        fdef.arg_attr[0].attr["_test_attr"].s = "value".encode("ascii")
        graph_def = function_def_to_graph.function_def_to_graph_def(fdef)
        placeholders = [
            ndef for ndef in graph_def[0].node if ndef.op == "Placeholder"
        ]
        self.assertEqual(1, len(placeholders))
        self.assertEqual(placeholders[0].attr["_test_attr"].s,
                         "value".encode("ascii"))
Example #5
0
 def testShapes(self):
   fdef = self._build_function_def()
   g, _ = function_def_to_graph.function_def_to_graph_def(
       fdef,
       input_shapes=[tensor_shape.scalar(),
                     tensor_shape.vector(5), None])
   self.assertEqual("shape" in g.node[0].attr, True)
   self.assertSequenceEqual(
       tensor_shape.TensorShape(g.node[0].attr["shape"].shape).as_list(), [])
   self.assertEqual(g.node[0].attr["shape"].shape.unknown_rank, False)
   self.assertEqual("shape" in g.node[1].attr, True)
   self.assertSequenceEqual(
       tensor_shape.TensorShape(g.node[1].attr["shape"].shape).as_list(), [5])
   self.assertEqual(g.node[0].attr["shape"].shape.unknown_rank, False)
   self.assertFalse("shape" in g.node[2].attr)
  def testAttributesForArgDef(self):

    @function.defun
    def fn(x):
      return x

    inp = constant_op.constant(1.0)
    fdef = fn.get_concrete_function(inp).function_def
    fdef.arg_attr[0].attr["_test_attr"].s = "value".encode("ascii")
    graph_def = function_def_to_graph.function_def_to_graph_def(fdef)
    placeholders = [
        ndef for ndef in graph_def[0].node if ndef.op == "Placeholder"
    ]
    self.assertEqual(1, len(placeholders))
    self.assertEqual(placeholders[0].attr["_test_attr"].s,
                     "value".encode("ascii"))
Example #7
0
    def _partition_call_operator(self, inputs, attr):
        """
        Convert the Relay Partition call ops into Relay Function calls and
        function definitions from Tensorflow graph library attribute to Relay global
        functions

        Parameters
        ----------
        node: TensorFlow graph node object.
            A TensorFlow graph node object.

        inputs : List[tvm.relay.Expr]
            List of input symbols.

        attrs : Dict[tvm.Attrs]
            Dict of operator attributes.

        Returns
        -------
        op : tvm.relay.Expr
            Converted relay expression.
        """

        try:
            from tensorflow.python.framework import function_def_to_graph
        except ImportError as e:
            raise ImportError(
                "Unable to import tensorflow which is required {}".format(e))

        main_graph_proto = self._main_graph_proto
        outer_graph_def = main_graph_proto._graph

        node_func_name = attr.get("f").name
        func = next(
            (f for f in outer_graph_def.library.function
             if f.signature.name == node_func_name),
            None,
        )
        if func:
            devices = set(node.device for node in func.node_def)
            if len(devices) > 1:
                raise Exception("Found inconsistent Device assignment in the "
                                "Stateful Partitioned SubGraph. Rejecting "
                                "the subgraph ")
            # Convert function definition to graph
            func_input_shapes = func.attr["_input_shapes"].list.shape
            subgraph, _ = function_def_to_graph.function_def_to_graph_def(
                func, func_input_shapes)

            # Computing subgraph's input shape dictionary
            subgraph_shape_dict, input_expr_dict = {}, {}
            for f_arg, input in zip(func.signature.input_arg, inputs):
                input_expr_dict[f_arg.name] = input
                subgraph_shape_dict[f_arg.name] = _infer_shape(
                    input, main_graph_proto._mod)

            func_name = "func_{}".format(func.signature.name)
            try:
                global_func = main_graph_proto._mod[func_name]
                sub_func = global_func
                sub_params = main_graph_proto._params
            except ValueError:
                # Construct relay nodes from the subgraph
                g1 = SubGraphProto(main_graph_proto)
                sub_func, sub_params = g1.from_tensorflow(
                    subgraph, shape=subgraph_shape_dict)
                main_graph_proto._params.update(sub_params)
                func_expr = _function.Function(sub_func.params, sub_func.body)
                global_func = tvm.relay.GlobalVar(func_name)
                main_graph_proto._mod[global_func] = func_expr
                main_graph_proto._mod = InferType()(main_graph_proto._mod)

            param_exprs = []
            for param_expr in sub_func.params:
                # sub_params is subset of sub_func.params
                param_name = param_expr.vid.name_hint
                if param_name in input_expr_dict.keys():
                    param_exprs.append(input_expr_dict[param_name])
                elif param_name in sub_params.keys():
                    param_exprs.append(param_expr)
                else:
                    raise Exception(
                        "Input parameter {} not found".format(param_name))

            sb = tvm.relay.scope_builder.ScopeBuilder()
            loop_ret = global_func(*param_exprs)
            sb.ret(loop_ret)
            ret = sb.get()
        else:
            raise Exception("Function not found - {}".format(node_func_name))
        return ret
Example #8
0
def from_tensorflow(graph_def, layout="NHWC", shape=None, outputs=None):
    """convert tensorflow2.x graph into relay function.

    Parameters
    ----------
    graph_def : must be frozen graph (no variables allowed).
        Placeholders are assumed to be inputs to the graph.

        tensorflow/core/framework/graph.proto
            message GraphDef {
              repeated NodeDef node = 1;
              FunctionDefLibrary library = 2;
            }
        tensorflow/core/framework/function.proto
            message FunctionDef {
              repeated NodeDef node_def = 3;
            }

    layout : str
        The layout for the model.

    shape : List[str, List[int]]
        Input to the model. It is a key and shape vector mapping. Applies to placeholders.

    outputs : List[str]
        The list of output nodes. The last node is treated as the output if not
        specified.

    Returns
    -------
    mod : tvm.IRModule
        The module that optimizations will be performed on.

    params : dict of str to tvm.nd.NDArray
        Dict of converted parameters stored in tvm.nd.NDArray format.

    Examples
    --------
    "x+1" tf module where x has a shape of (2,2) is converted as follows:

    mod : tvm.IRModule
        def @func___inference_add_95(%x: Tensor[(2, 2), float32], %add/y: Tensor[(2, 2), float32])
        -> Tensor[(2, 2), float32] {
        add(%x, %add/y) /* Identity */ /* ty=Tensor[(2, 2), float32] */
        }

        def @main(%x1: Tensor[(2, 2), float32], %add/y1: Tensor[(2, 2), float32]) {
        @func___inference_add_95(%x1, %add/y1) /* Identity */
        }

    params : dict of str to tvm.nd.NDArray
        {'add/y': <tvm.nd.NDArray shape=(2, 2), cpu(0)>

    """

    # Subgraph graph_defs are cached here to avoid a TF error when parsing after prelude init
    graph_def_library = {}
    for func in graph_def.library.function:
        inshape = func.attr["_input_shapes"].list.shape
        graph_def_library[func.signature.name], _ = function_def_to_graph.function_def_to_graph_def(
            func, inshape
        )
    module = RelayModule()
    g = GraphProto(module)
    func, params = g.from_tensorflow(graph_def, layout, shape, outputs, gdef_lib=graph_def_library)
    module.mod["main"] = func
    module.params.update(params)
    return module.mod, module.params