Ejemplo n.º 1
0
    def test_custom_pythonop_pytorch(self):

        # register_custom_op_symbolic(
        #   '<namespace>::inverse', my_inverse, <opset_version>)
        register_custom_op_symbolic('::inverse', my_inverse, 1)

        x = torch.randn(3, 3)

        # Export model to ONNX
        f = io.BytesIO()
        torch.onnx.export(CustomInverse(), (x, ), f)
        onnx_model = load(io.BytesIO(f.getvalue()))
        self.assertIn('domain: "ai.onnx.contrib"', str(onnx_model))

        model = CustomInverse()
        pt_outputs = model(x)

        so = _ort.SessionOptions()
        so.register_custom_ops_library(_get_library_path())

        # Run the exported model with ONNX Runtime
        ort_sess = _ort.InferenceSession(f.getvalue(), so)
        ort_inputs = dict((ort_sess.get_inputs()[i].name, input.cpu().numpy())
                          for i, input in enumerate((x, )))
        ort_outputs = ort_sess.run(None, ort_inputs)

        # Validate PyTorch and ONNX Runtime results
        numpy.testing.assert_allclose(pt_outputs.cpu().numpy(),
                                      ort_outputs[0],
                                      rtol=1e-03,
                                      atol=1e-05)
    def test_string_sentencepiece_tokenizer_bin(self):
        so = _ort.SessionOptions()
        so.register_custom_ops_library(_get_library_path())
        model, model_b64 = load_piece('model__6')
        modelb = bytes(model)
        py_onnx_model = _create_test_model_sentencepiece('Py', None)
        self.assertIn('op_type: "PySentencepieceTokenizer"',
                      str(py_onnx_model))
        cc_onnx_model = _create_test_model_sentencepiece('', modelb)
        self.assertIn('op_type: "SentencepieceTokenizer"', str(cc_onnx_model))
        py_sess = _ort.InferenceSession(py_onnx_model.SerializeToString(), so)
        cc_sess = _ort.InferenceSession(cc_onnx_model.SerializeToString(), so)

        alpha = 0
        nbest_size = 0
        bools = 0
        inputs = dict(model=model,
                      inputs=np.array(["Hello world", "Hello world louder"],
                                      dtype=np.object),
                      nbest_size=np.array([nbest_size], dtype=np.int64),
                      alpha=np.array([alpha], dtype=np.float32),
                      add_bos=np.array([bools & 1], dtype=np.bool_),
                      add_eos=np.array([bools & 2], dtype=np.bool_),
                      reverse=np.array([bools & 4], dtype=np.bool_))
        exp = self.SentencepieceTokenizer(**inputs)
        py_txout = py_sess.run(None, inputs)
        del inputs['model']
        cc_txout = cc_sess.run(None, inputs)
        for i in range(0, 2):
            assert_almost_equal(exp[i], py_txout[i])
            assert_almost_equal(exp[i], cc_txout[i])
    def test_string_ragged_string_to_dense_cc(self):
        so = _ort.SessionOptions()
        so.register_custom_ops_library(_get_library_path())
        model, model_b64 = load_piece('model__6')
        onnx_model = _create_test_model_ragged_to_dense('', model_b64)
        self.assertIn('op_type: "RaggedTensorToDense"', str(onnx_model))
        sess = _ort.InferenceSession(onnx_model.SerializeToString(), so)

        inputs = dict(model=model,
                      inputs=np.array(["Hello world", "Hello world louder"],
                                      dtype=np.object),
                      nbest_size=np.array([0], dtype=np.int64),
                      alpha=np.array([0], dtype=np.float32),
                      add_bos=np.array([0], dtype=np.bool_),
                      add_eos=np.array([0], dtype=np.bool_),
                      reverse=np.array([0], dtype=np.bool_))
        del inputs['model']
        txout = sess.run(None, inputs)
        assert_almost_equal(
            txout[0],
            np.array([[17486, 1017, -1, -1], [17486, 1017, 155, 21869]],
                     dtype=np.int64))
        assert_almost_equal(
            txout[1],
            np.array([17486, 1017, 17486, 1017, 155, 21869], dtype=np.int32))
Ejemplo n.º 4
0
 def test_check_saved_model(self):
     this = os.path.dirname(__file__)
     so = _ort.SessionOptions()
     so.register_custom_ops_library(_get_library_path())
     onnx_content = _create_test_model_test()
     onnx_bytes = onnx_content.SerializeToString()
     with open(os.path.join(this, 'data', 'custom_op_test.onnx'),
               'rb') as f:
         saved = f.read()
     assert onnx_bytes == saved
Ejemplo n.º 5
0
 def test_python_join(self):
     so = _ort.SessionOptions()
     so.register_custom_ops_library(_get_library_path())
     onnx_model = _create_test_join()
     self.assertIn('op_type: "PyOpJoin"', str(onnx_model))
     sess = _ort.InferenceSession(onnx_model.SerializeToString(), so)
     arr = np.array([["a", "b"]], dtype=np.object)
     txout = sess.run(None, {'input_1': arr})
     exp = np.array(["a;b"], dtype=np.object)
     assert txout[0][0] == exp[0]
Ejemplo n.º 6
0
 def test_cc_negpos(self):
     so = _ort.SessionOptions()
     so.register_custom_ops_library(_get_library_path())
     onnx_model = _create_test_model_2outputs("")
     self.assertIn('op_type: "NegPos"', str(onnx_model))
     sess = _ort.InferenceSession(onnx_model.SerializeToString(), so)
     x = np.array([[0., 1., 1.5], [7., 8., -5.5]]).astype(np.float32)
     neg, pos = sess.run(None, {'x': x})
     diff = x - (neg + pos)
     assert_almost_equal(diff, np.zeros(diff.shape))
Ejemplo n.º 7
0
 def test_add_epsilon_python(self):
     so = _ort.SessionOptions()
     so.register_custom_ops_library(_get_library_path())
     onnx_model = _create_test_model_double('Py')
     self.assertIn('op_type: "PyAddEpsilon"', str(onnx_model))
     sess = _ort.InferenceSession(onnx_model.SerializeToString(), so)
     input_1 = np.array([[0., 1., 1.5], [7., 8., -5.5]])
     txout = sess.run(None, {'input_1': input_1})
     diff = txout[0] - input_1 - 1e-3
     assert_almost_equal(diff, np.zeros(diff.shape))
Ejemplo n.º 8
0
 def test_python_operator(self):
     so = _ort.SessionOptions()
     so.register_custom_ops_library(_get_library_path())
     onnx_model = _create_test_model()
     self.assertIn('op_type: "PyReverseMatrix"', str(onnx_model))
     sess = _ort.InferenceSession(onnx_model.SerializeToString(), so)
     input_1 = np.array([1, 2, 3, 4, 5,
                         6]).astype(np.float32).reshape([3, 2])
     txout = sess.run(None, {'input_1': input_1})
     assert_almost_equal(txout[0], np.array([[5., 6.], [3., 4.], [1., 2.]]))
Ejemplo n.º 9
0
def _run_string_length(input):
    model = _create_test_model(input.ndim, input.ndim)

    so = _ort.SessionOptions()
    so.register_custom_ops_library(_get_library_path())
    sess = _ort.InferenceSession(model.SerializeToString(), so)
    result = sess.run(None, {'input_1': input})

    # verify
    output = np.array([len(elem) for elem in input.flatten()],
                      dtype=np.int64).reshape(input.shape)
    np.testing.assert_array_equal(result, [output])
Ejemplo n.º 10
0
 def test_cc_operator(self):
     so = _ort.SessionOptions()
     so.register_custom_ops_library(_get_library_path())
     onnx_content = _create_test_model_test()
     self.assertIn('op_type: "CustomOpOne"', str(onnx_content))
     ser = onnx_content.SerializeToString()
     sess0 = _ort.InferenceSession(ser, so)
     res = sess0.run(
         None, {
             'input_1': np.random.rand(3, 5).astype(np.float32),
             'input_2': np.random.rand(3, 5).astype(np.float32)
         })
     self.assertEqual(res[0].shape, (3, 5))
Ejemplo n.º 11
0
    def _run_tokenizer(self, test_sentence, padding_length=-1):
        model = _create_test_model(vocab_file=self.tokjson,
                                   merges_file=self.merges,
                                   max_length=padding_length)
        so = _ort.SessionOptions()
        so.register_custom_ops_library(_get_library_path())
        sess = _ort.InferenceSession(model.SerializeToString(), so)
        input_text = np.array(test_sentence)
        input_ids, attention_mask = sess.run(None,
                                             {'string_input': input_text})
        expect_input_ids, expect_attention_mask = self.tokenizer.tokenizer_sentence(
            test_sentence, padding_length)
        np.testing.assert_array_equal(expect_input_ids, input_ids)
        np.testing.assert_array_equal(expect_attention_mask, attention_mask)

        del sess
        del so
Ejemplo n.º 12
0
def _run_string_concat(input1, input2):
    model = _create_test_model(input1.ndim, input1.ndim)

    so = _ort.SessionOptions()
    so.register_custom_ops_library(_get_library_path())
    sess = _ort.InferenceSession(model.SerializeToString(), so)
    result = sess.run(None, {'input_1': input1, 'input_2': input2})

    # verify
    output = []
    shape = input1.shape
    input1 = input1.flatten()
    input2 = input2.flatten()
    for i in range(len(input1)):
        output.append(input1[i] + input2[i])
    output = np.array(output).reshape(shape)
    np.testing.assert_array_equal(result, [output])
Ejemplo n.º 13
0
    def test_pyop_hooking(self):  # type: () -> None
        model = torchvision.models.mobilenet_v2(pretrained=False)
        x = torch.rand(1, 3, 224, 224)
        with io.BytesIO() as f:
            torch.onnx.export(model, (x, ), f)
            model = onnx.load_model_from_string(f.getvalue())

            self.assertTrue(model.graph.node[5].op_type == 'Conv')
            hkd_model = hook_model_op(model, model.graph.node[5].name,
                                      TestPyTorchCustomOp.on_hook,
                                      [PyOp.dt_float] * 3)

            so = _ort.SessionOptions()
            so.register_custom_ops_library(_get_library_path())
            sess = _ort.InferenceSession(hkd_model.SerializeToString(), so)
            TestPyTorchCustomOp._hooked = False
            sess.run(None, {'input.1': x.numpy()})
            self.assertTrue(TestPyTorchCustomOp._hooked)
    def test_string_ragged_string_to_sparse_python(self):
        so = _ort.SessionOptions()
        so.register_custom_ops_library(_get_library_path())
        model, model_b64 = load_piece('model__6')
        onnx_model = _create_test_model_ragged_to_sparse('Py', None)
        self.assertIn('op_type: "PyRaggedTensorToSparse"', str(onnx_model))
        sess = _ort.InferenceSession(onnx_model.SerializeToString(), so)

        inputs = dict(model=model,
                      inputs=np.array(["Hello world", "Hello world louder"],
                                      dtype=np.object),
                      nbest_size=np.array([0], dtype=np.int64),
                      alpha=np.array([0], dtype=np.float32),
                      add_bos=np.array([0], dtype=np.bool_),
                      add_eos=np.array([0], dtype=np.bool_),
                      reverse=np.array([0], dtype=np.bool_))
        txout = sess.run(None, inputs)
        temp = self.SentencepieceTokenizer(**inputs)
        exp = self.RaggedTensorToSparse(temp[1], temp[0])
        for i in range(0, 3):
            assert_almost_equal(exp[i], txout[i])