Esempio n. 1
0
from os.path import dirname, abspath
from keras2onnx.proto import keras
import numpy as np
import tensorflow as tf
from onnxconverter_common.onnx_ex import get_maximum_opset_supported

sys.path.insert(0, os.path.join(dirname(abspath(__file__)), '../../tests/'))
from test_utils import run_onnx_runtime
from keras2onnx.proto import is_tensorflow_older_than

enable_full_transformer_test = False
if os.environ.get('ENABLE_FULL_TRANSFORMER_TEST', '0') != '0':
    enable_transformer_test = True


@unittest.skipIf(is_tensorflow_older_than('2.1.0'),
                 "Transformers conversion need tensorflow 2.1.0+")
class TestTransformers(unittest.TestCase):

    text_str = 'The quick brown fox jumps over lazy dog.'

    def setUp(self):
        self.model_files = []

    def tearDown(self):
        for fl in self.model_files:
            os.remove(fl)

    def _get_token_path(self, file_name):
        return 'https://lotus.blob.core.windows.net/converter-models/transformer_tokenizer/' + file_name
Esempio n. 2
0

def test_auto_encoder(runner):
    tf.keras.backend.clear_session()
    original_dim = 20
    vae = VariationalAutoEncoder(original_dim, 64, 32)
    x = tf.random.normal((7, original_dim))
    expected = vae.predict(x)
    oxml = keras2onnx.convert_keras(vae)
    # assert runner('variational_auto_encoder', oxml, [x.numpy()], expected)
    # The random generator is not same between different engines.
    import onnx
    onnx.checker.check_model(oxml)


@pytest.mark.skipif(is_tensorflow_older_than('2.2.0'),
                    reason="only supported on tf 2.2 and above.")
def test_tf_where(runner):
    def _tf_where(input_0):
        a = tf.where(True, input_0, [0, 1, 2, 5, 7])
        b = tf.where([True], tf.expand_dims(input_0, axis=0),
                     tf.expand_dims([0, 1, 2, 5, 7], axis=0))
        c = tf.logical_or(tf.cast(a, tf.bool), tf.cast(b, tf.bool))
        return c

    swm = SimpleWrapperModel(_tf_where)
    const_in = [np.array([2, 4, 6, 8, 10]).astype(np.int32)]
    expected = swm(const_in)
    swm._set_inputs(const_in)
    oxml = keras2onnx.convert_keras(swm)
    assert runner('where_test', oxml, const_in, expected)
Esempio n. 3
0

def test_auto_encoder(runner):
    tf.keras.backend.clear_session()
    original_dim = 20
    vae = VariationalAutoEncoder(original_dim, 64, 32)
    x = tf.random.normal((7, original_dim))
    expected = vae.predict(x)
    oxml = keras2onnx.convert_keras(vae)
    # assert runner('variational_auto_encoder', oxml, [x.numpy()], expected)
    # The random generator is not same between different engines.
    import onnx
    onnx.checker.check_model(oxml)


@pytest.mark.skipif(is_tensorflow_older_than('2.2.0'), reason="only supported on tf 2.2 and above.")
def test_tf_where(runner):
    def _tf_where(input_0):
        a = tf.where(True, input_0, [0, 1, 2, 5, 7])
        b = tf.where([True], tf.expand_dims(input_0, axis=0), tf.expand_dims([0, 1, 2, 5, 7], axis=0))
        c = tf.logical_or(tf.cast(a, tf.bool), tf.cast(b, tf.bool))
        return c

    swm = SimpleWrapperModel(_tf_where)
    const_in = [np.array([2, 4, 6, 8, 10]).astype(np.int32)]
    expected = swm(const_in)
    swm._set_inputs(const_in)
    oxml = keras2onnx.convert_keras(swm)
    assert runner('where_test', oxml, const_in, expected)

Esempio n. 4
0
# Licensed under the MIT License. See License.txt in the project root for
# license information.
###############################################################################
import os
import sys
import unittest
from os.path import dirname, abspath
from keras2onnx.proto import keras, is_tensorflow_older_than

sys.path.insert(0, os.path.join(dirname(abspath(__file__)), '../../tests/'))
from test_utils import run_image

img_path = os.path.join(os.path.dirname(__file__), '../data', 'street.jpg')


@unittest.skipIf(is_tensorflow_older_than('2.1.0'), "efficientnet needs tensorflow >= 2.1.0")
class TestEfn(unittest.TestCase):

    def setUp(self):
        self.model_files = []

    def tearDown(self):
        for fl in self.model_files:
            os.remove(fl)

    @unittest.skip("TODO: model discrepancy")
    def test_custom(self):
        from efficientnet import tfkeras as efn
        keras.backend.set_learning_phase(0)
        base_model = efn.EfficientNetB0(input_shape=(600, 600, 3), weights=None)
        backbone = keras.Model(base_model.input, base_model.get_layer("top_activation").output)
import sys
import unittest
import keras2onnx
import json
from os.path import dirname, abspath
sys.path.insert(0, os.path.join(dirname(abspath(__file__)), '../../tests/'))
from test_utils import run_onnx_runtime
from keras2onnx.proto import is_tensorflow_older_than

enable_transformer_test = False
if os.environ.get('ENABLE_TRANSFORMER_TEST', '0') != '0':
    enable_transformer_test = True


@unittest.skipIf(
    is_tensorflow_older_than('2.1.0') or not enable_transformer_test,
    "Need enable transformer test before Transformers conversion.")
class TestTransformers(unittest.TestCase):
    def setUp(self):
        self.model_files = []

    def tearDown(self):
        for fl in self.model_files:
            os.remove(fl)

    def _prepare_inputs(self, tokenizer):
        raw_data = json.dumps(
            {'text': 'The quick brown fox jumps over the lazy dog.'})
        text = json.loads(raw_data)['text']
        inputs = tokenizer.encode_plus(text,
                                       add_special_tokens=True,