def test_Bidirectional(self): for return_sequences in [True, False]: input_dim = 10 sequence_len = 5 model = keras.Sequential() model.add(keras.layers.Bidirectional(keras.layers.LSTM(10, return_sequences=return_sequences), input_shape=(5, 10))) model.add(keras.layers.Dense(5)) model.add(keras.layers.Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') onnx_model = keras2onnx.convert_keras(model, 'test') data = np.random.rand(input_dim, sequence_len).astype(np.float32).reshape((1, sequence_len, input_dim)) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime('bidirectional', onnx_model, data, expected)) for merge_mode in ['concat', None]: # TODO: case return_sequences=False for return_sequences in [True]: input_dim = 10 sequence_len = 5 sub_input1 = keras.layers.Input(shape=(sequence_len, input_dim)) sub_mapped1 = keras.layers.Bidirectional(keras.layers.LSTM(10, return_sequences=return_sequences), input_shape=(5, 10), merge_mode=merge_mode)(sub_input1) keras_model = keras.Model(inputs=sub_input1, outputs=sub_mapped1) onnx_model = keras2onnx.convert_keras(keras_model, 'test_2') data = np.random.rand(input_dim, sequence_len).astype(np.float32).reshape((1, sequence_len, input_dim)) expected = keras_model.predict(data) self.assertTrue(self.run_onnx_runtime('bidirectional', onnx_model, data, expected))
def test_timedistributed(self): keras_model = keras.Sequential() keras_model.add(keras.layers.TimeDistributed(keras.layers.Dense(8), input_shape=(10, 16))) # keras_model.output_shape == (None, 10, 8) onnx_model = keras2onnx.convert_keras(keras_model, keras_model.name, debug_mode=True) x = np.random.rand(32, 10, 16).astype(np.float32) expected = keras_model.predict(x) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, x, expected)) keras_model = keras.Sequential() N, D, W, H, C = 5, 10, 15, 15, 3 keras_model.add(keras.layers.TimeDistributed(keras.layers.Conv2D(64, (3, 3)), input_shape=(D, W, H, C))) onnx_model = keras2onnx.convert_keras(keras_model, keras_model.name, debug_mode=True) x = np.random.rand(N, D, W, H, C).astype(np.float32) expected = keras_model.predict(x) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, x, expected))
def test_repeat_vector(self): model = keras.Sequential() model.add(keras.layers.core.RepeatVector(3, input_shape=(4,))) onnx_model = keras2onnx.convert_keras(model, model.name) data = self.asarray(1, 2, 3, 4) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime('repeat_vector', onnx_model, data, expected))
def test_permute(self): model = keras.Sequential() model.add(keras.layers.core.Permute((2, 1), input_shape=(3, 2))) onnx_model = keras2onnx.convert_keras(model, model.name) data = np.array([[[1, 2], [3, 4], [5, 6]]]).astype(np.float32) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime('permute', onnx_model, data, expected))
def test_Bidirectional_with_bias(self): model = keras.Sequential() model.add(keras.layers.Bidirectional(keras.layers.LSTM(1, return_sequences=False), input_shape=(1, 1))) # Set weights(kernel, recurrent_kernel, bias) for forward layer followed by the backward layer model.set_weights([[[1, 2, 3, 4]], [[5, 6, 7, 8]], [1, 2, 3, 4], [[1, 2, 3, 4]], [[5, 6, 7, 8]], [1, 2, 3, 4]]) onnx_model = keras2onnx.convert_keras(model, 'test') data = np.random.rand(1, 1).astype(np.float32).reshape((1, 1, 1)) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime('bidirectional', onnx_model, data, expected))
def test_embedding(self): model = keras.Sequential() model.add(keras.layers.Embedding(1000, 64, input_length=10)) input_array = np.random.randint(1000, size=(1, 10)).astype(np.float32) model.compile('rmsprop', 'mse') onnx_model = keras2onnx.convert_keras(model, model.name) expected = model.predict(input_array) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, input_array, expected))
def test_dense(self): for bias_value in [True, False]: model = keras.Sequential() model.add(keras.layers.Dense(5, input_shape=(4,), activation='sigmoid')) model.add(keras.layers.Dense(3, input_shape=(5,), use_bias=bias_value)) model.compile('sgd', 'mse') onnx_model = keras2onnx.convert_keras(model, model.name) data = self.asarray(1, 0, 0, 1) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime('dense', onnx_model, data, expected))
def test_keras_with_tf2onnx(self): try: import keras2onnx except (ImportError, AssertionError): warnings.warn("keras2onnx or one of its dependencies is missing.") return from keras2onnx.proto import keras from keras2onnx.proto.tfcompat import is_tf2 if not is_tf2: # tf2onnx is not available for tensorflow 2.0 yet. model = keras.Sequential() model.add(keras.layers.Dense(units=4, input_shape=(10,), activation='relu')) model.compile(loss='binary_crossentropy', optimizer='Adam', metrics=['binary_accuracy']) graph_def = keras2onnx.export_tf_frozen_graph(model) onnx_model = onnxmltools.convert_tensorflow(graph_def, **keras2onnx.build_io_names_tf2onnx(model)) self.assertTrue(len(onnx_model.graph.node) > 0)
def activationlayer_helper(self, layer, data_for_advanced_layer=None, op_version=None): if op_version is None: op_version = get_opset_number_from_onnx() if data_for_advanced_layer is None: data = self.asarray(-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5) layer = keras.layers.Activation(layer, input_shape=(data.size,)) else: data = data_for_advanced_layer model = keras.Sequential() model.add(layer) onnx_model = keras2onnx.convert_keras(model, model.name, target_opset=op_version) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, data, expected))
def _pooling_test_helper(self, layer, ishape, data_format='channels_last'): model = keras.Sequential() if sys.version_info >= (3, 6): nlayer = layer(data_format=data_format, input_shape=ishape) if \ (layer.__name__.startswith("Global")) else layer(2, data_format=data_format, input_shape=ishape) else: nlayer = layer(input_shape=ishape) if \ (layer.__name__.startswith("Global")) else layer(2, input_shape=ishape) model.add(nlayer) onnx_model = keras2onnx.convert_keras(model, model.name) data = np.random.uniform(-0.5, 0.5, size=(1,) + ishape).astype(np.float32) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, data, expected))
def _batch_norm_helper(self, data, gamma, beta, scale, center, axis): model = keras.Sequential() layer = keras.layers.BatchNormalization( axis=axis, input_shape=data.shape[1:], moving_mean_initializer=keras.initializers.constant(np.mean(data)), moving_variance_initializer=keras.initializers.constant(np.var(data)), gamma_initializer=gamma, beta_initializer=beta, center=center, scale=scale, ) model.add(layer) onnx_model = keras2onnx.convert_keras(model, model.name) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, data, expected))
def _conv_helper(self, layer_type, input_channels, output_channels, kernel_size, strides, input_size, activation, rtol, atol, bias, channels_first=False, padding='valid'): model = keras.Sequential() input_size_seq = (input_size,) if isinstance(input_size, int) else input_size kwargs = {} if channels_first: input_shape = (input_channels,) + input_size_seq if not isinstance(layer_type, keras.layers.Conv1D): kwargs['data_format'] = 'channels_first' else: input_shape = input_size_seq + (input_channels,) model.add(layer_type(output_channels, kernel_size, input_shape=input_shape, strides=strides, padding=padding, dilation_rate=1, activation=activation, use_bias=bias, **kwargs)) data = np.random.uniform(-0.5, 0.5, size=(1,) + input_shape).astype(np.float32) onnx_model = keras2onnx.convert_keras(model, model.name) expected = model.predict(data) self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, data, expected, rtol=rtol, atol=atol))