Exemple #1
0
    def test_contrib_op_with_loop(self):
        class M(torch.nn.Module):
            def __init__(self):
                super().__init__()
                self.gelu = torch.nn.GELU()

            def forward(self, x):
                res = []
                res2 = []
                for i in range(x.size(0)):
                    if len(res) > 0:
                        res2.append(res[0])
                    else:
                        res2.append(self.gelu(x[0]))
                    res.append(x[0])
                return torch.stack(res), torch.stack(res2)

        def symbolic_custom_gelu(g, input):
            return g.op("com.microsoft::Gelu", input).setType(input.type())

        from torch.onnx import register_custom_op_symbolic
        register_custom_op_symbolic("::gelu", symbolic_custom_gelu, 1)

        x = torch.randn(3, 3, 4, requires_grad=True)
        model = torch.jit.script(M())
        run_model_test(self, model, input=(x, ))
Exemple #2
0
def exportTest(self, model, inputs, rtol=1e-2, atol=1e-7, opset_versions=None):
    opset_versions = opset_versions if opset_versions else [
        7, 8, 9, 10, 11, 12
    ]
    for opset_version in opset_versions:
        self.opset_version = opset_version
        run_model_test(self, model, False, input=inputs, rtol=rtol, atol=atol)
Exemple #3
0
def exportTest(self, model, inputs, rtol=1e-2, atol=1e-7, opset_versions=None):
    opset_versions = opset_versions if opset_versions else [7, 8, 9, 10, 11, 12, 13, 14]

    for opset_version in opset_versions:
        self.opset_version = opset_version
        self.onnx_shape_inference = True
        run_model_test(self, model, input_args=inputs, rtol=rtol, atol=atol)

        if self.is_script_test_enabled and opset_version > 11:
            script_model = torch.jit.script(model)
            run_model_test(self, script_model, input_args=inputs, rtol=rtol, atol=atol)
def exportTest(self, model, inputs, rtol=1e-2, atol=1e-7, opset_versions=None):
    opset_versions = opset_versions if opset_versions else [7, 8, 9, 10, 11, 12]

    for opset_version in opset_versions:
        self.opset_version = opset_version
        run_model_test(self, model, False,
                       input=inputs, rtol=rtol, atol=atol)

        if self.is_script_test_enabled and opset_version > 11:
            outputs = model(inputs)
            script_model = torch.jit.script(model)
            run_model_test(self, script_model, False, example_outputs=outputs,
                           input=inputs, rtol=rtol, atol=atol, use_new_jit_passes=True)
Exemple #5
0
    def test_register_custom_op(self):
        class MyClip(torch.autograd.Function):
            @staticmethod
            def forward(ctx, input, scalar):
                ctx.save_for_backward(input)
                return input.clamp(min=scalar)

        class MyRelu(torch.autograd.Function):
            @staticmethod
            def forward(ctx, input):
                ctx.save_for_backward(input)
                return input.clamp(min=0)

        class MyModule(torch.nn.Module):
            def __init__(self):
                super().__init__()
                self.clip = MyClip.apply
                self.relu = MyRelu.apply

            def forward(self, x):
                h = self.clip(x, 2)
                h = self.relu(h)
                return h

        def symbolic_pythonop(ctx: torch.onnx.SymbolicContext, g, *args,
                              **kwargs):
            n = ctx.cur_node
            name = kwargs["name"]
            if name == "MyClip":
                return g.op("Clip",
                            args[0],
                            min_f=args[1],
                            outputs=n.outputsSize())
            elif name == "MyRelu":
                return g.op("Relu", args[0], outputs=n.outputsSize())
            else:
                return _unimplemented("prim::PythonOp",
                                      "unknown node kind: " + name)

        from torch.onnx import register_custom_op_symbolic

        register_custom_op_symbolic("prim::PythonOp", symbolic_pythonop, 1)

        x = torch.randn(2, 3, 4, requires_grad=True)
        model = MyModule()
        run_model_test(self, model, input=(x, ))
Exemple #6
0
def exportTest(self, model, inputs, rtol=1e-2, atol=1e-7, opset_versions=None):
    opset_versions = opset_versions if opset_versions else [
        7, 8, 9, 10, 11, 12, 13, 14
    ]

    for opset_version in opset_versions:
        self.opset_version = opset_version
        run_model_test(self, model, False, input=inputs, rtol=rtol, atol=atol)

        if self.is_script_test_enabled and opset_version > 11:
            TestModels.onnx_shape_inference = True

            outputs = model(inputs)
            script_model = torch.jit.script(model)
            run_model_test(self,
                           script_model,
                           False,
                           example_outputs=outputs,
                           input=inputs,
                           rtol=rtol,
                           atol=atol)
Exemple #7
0
    def test_symbolic(self):
        class MyClip(torch.autograd.Function):
            @staticmethod
            def forward(ctx, input, scalar):
                ctx.save_for_backward(input)
                return input.clamp(min=scalar)

            @staticmethod
            def symbolic(g, input, scalar):
                return g.op("Clip", input, min_f=scalar)

        class MyModule(torch.nn.Module):
            def __init__(self):
                super().__init__()
                self.clip = MyClip.apply

            def forward(self, x):
                h = self.clip(x, 2)
                return h

        x = torch.randn(2, 3, 4, requires_grad=True)
        model = MyModule()
        run_model_test(self, model, input=(x, ))