def test_model_creation(self):
        N, C, H, W = 2, 3, 5, 5
        input1 = keras.layers.Input(shape=(H, W, C))
        x1 = keras.layers.Dense(8, activation='relu')(input1)
        input2 = keras.layers.Input(shape=(H, W, C))
        x2 = keras.layers.Dense(8, activation='relu')(input2)
        maximum_layer = keras.layers.Maximum()([x1, x2])

        out = keras.layers.Dense(8)(maximum_layer)
        model = keras.models.Model(inputs=[input1, input2], outputs=out)

        trial1 = np.random.rand(N, H, W, C).astype(np.float32, copy=False)
        trial2 = np.random.rand(N, H, W, C).astype(np.float32, copy=False)

        predicted = model.predict([trial1, trial2])
        self.assertIsNotNone(predicted)

        converted_model_7 = onnxmltools.convert_keras(model, target_opset=7)
        converted_model_5 = onnxmltools.convert_keras(model, target_opset=5)

        self.assertIsNotNone(converted_model_7)
        self.assertIsNotNone(converted_model_5)

        opset_comparison = converted_model_7.opset_import[0].version > converted_model_5.opset_import[0].version

        self.assertTrue(opset_comparison)
    def _test_one_to_one_operator_coreml(self, keras_model, x):
        # Verify Keras-to-CoreML-to-ONNX path
        coreml_model = None
        try:
            coreml_model = coremltools.converters.keras.convert(keras_model)
        except (AttributeError, ImportError) as e:
            warnings.warn(
                "Unable to test due to an error in coremltools '{0}'".format(
                    e))

        onnx_model = None if coreml_model is None else onnxmltools.convert_coreml(
            coreml_model)
        self.assertTrue(onnx_model or coreml_model is None)

        if self._no_available_inference_engine():
            return

        y_reference = keras_model.predict(x)
        y_produced = evaluate_deep_model(onnx_model, x)

        self.assertTrue(np.allclose(y_reference, y_produced))

        # Verify Keras-to-ONNX path
        onnx_model = onnxmltools.convert_keras(keras_model)
        y_produced = evaluate_deep_model(onnx_model, x)

        self.assertTrue(np.allclose(y_reference, y_produced))
def convert_keras2onnx(model, output):
    import onnxmltools
    from keras.models import load_model

    keras_model = load_model(model)
    onnx_model = onnxmltools.convert_keras(keras_model)
    onnxmltools.utils.save_model(onnx_model, output)
 def save(self, using='dnn'):
     if using == 'sklearn':
         filename = 'scikit_model'
         pickle.dump(self.model, open(filename, 'wb'))
     else:
         onnx_model = onnxmltools.convert_keras(self.model)
         return onnx_model
Exemple #5
0
    def test_custom_op(self):
        N, C, H, W = 2, 3, 5, 5
        x = np.random.rand(N, H, W, C).astype(np.float32, copy=False)

        model = Sequential()
        model.add(
            Conv2D(2,
                   kernel_size=(1, 2),
                   strides=(1, 1),
                   padding='valid',
                   input_shape=(H, W, C),
                   data_format='channels_last'))
        model.add(ScaledTanh(0.9, 2.0))
        model.add(
            MaxPooling2D((2, 2), strides=(2, 2), data_format='channels_last'))

        model.compile(optimizer='sgd', loss='mse')
        converted_model = onnxmltools.convert_keras(
            model, custom_conversion_functions={ScaledTanh: custom_activation})

        actual = model.predict(x)
        self.assertIsNotNone(actual)

        self.assertIsNotNone(converted_model)
        dump_data_and_model(x.astype(np.float32),
                            model,
                            converted_model,
                            basename="KerasCustomOp-Out0",
                            context=dict(ScaledTanh=ScaledTanh))
    def _test_one_to_one_operator_core_keras(self, keras_model, x):
        y_reference = keras_model.predict(x)

        onnx_model = onnxmltools.convert_keras(keras_model)
        y_produced = _evaluate(onnx_model, x)

        self.assertTrue(np.allclose(y_reference, y_produced))
Exemple #7
0
    def test_channel_last(self):
        N, C, H, W = 2, 3, 5, 5
        x = np.random.rand(N, H, W, C).astype(np.float32, copy=False)

        model = Sequential()
        model.add(
            Conv2D(2,
                   kernel_size=(1, 2),
                   strides=(1, 1),
                   padding='valid',
                   input_shape=(H, W, C),
                   data_format='channels_last'))  # , activation='softmax')
        model.add(
            MaxPooling2D((2, 2), strides=(2, 2), data_format='channels_last'))

        model.compile(optimizer='sgd', loss='mse')
        converted_model = onnxmltools.convert_keras(
            model, channel_first_inputs=[model.inputs[0].name])

        expected = model.predict(x)
        self.assertIsNotNone(expected)
        self.assertIsNotNone(converted_model)

        try:
            import onnxruntime
            sess = onnxruntime.InferenceSession(
                converted_model.SerializeToString())
            actual = sess.run(
                [], {
                    sess.get_inputs()[0].name:
                    np.transpose(x.astype(np.float32), [0, 3, 1, 2])
                })
            self.assertTrue(np.allclose(expected, actual, rtol=1.e-3))
        except ImportError:
            pass
Exemple #8
0
 def from_keras(
     model: keras.models.Model,
     opset: int = DEFAULT_OPSET,
 ):
     onnx_model = onnxmltools.convert_keras(model, target_opset=opset)
     logger.info('keras to onnx converted successfully')
     return onnx_model
Exemple #9
0
def convertToKerasAndBack(fileName, outputFileName, inputs):
    ret = None
    onnxModel = onnx.load(fileName)
    kwArgs = dict()
    if "shuffle" in fileName:
        kwArgs["input_shapes"] = [1, 3, 224, 224]
        import shufflenet
        import keras
        kerasModel = shufflenet.ShuffleNet(groups=3)
        kerasModel.load_weights(
            "keras-shufflenet/weights/ShuffleNet_1X_g3_br_0.25_373.hdf5")
        kerasModel.compile(optimizer=keras.optimizers.SGD(lr=.05,
                                                          decay=5e-4,
                                                          momentum=0.9),
                           metrics=['accuracy'],
                           loss='categorical_crossentropy')
        ret = kerasModel.predict(inputs[0][0].transpose(0, 2, 3, 1))
    else:
        kerasModel = onnx_to_keras(onnxModel,
                                   getOnnxInputNames(fileName),
                                   verbose=False,
                                   **kwArgs)

    #tf.keras.utils.plot_model(kerasModel, show_shapes=True)
    backconvOnnxModel = onnxmltools.convert_keras(kerasModel)
    onnxmltools.utils.save_model(backconvOnnxModel, outputFileName)
    return ret
    def testRunModelConv(self):

        # keras model
        N, C, H, W = 2, 3, 5, 5
        x = np.random.rand(N, H, W, C).astype(np.float32, copy=False)

        model = Sequential()
        model.add(Conv2D(2, kernel_size=(1, 2), strides=(1, 1), padding='valid', input_shape=(H, W, C),
                         data_format='channels_last'))
        model.add(ScaledTanh(0.9, 2.0))
        model.add(MaxPooling2D((2, 2), strides=(2, 2), data_format='channels_last'))

        model.compile(optimizer='sgd', loss='mse')
        actual = model.predict(x)
        self.assertIsNotNone(actual)

        # conversion
        converted_model = onnxmltools.convert_keras(model, custom_conversion_functions={ScaledTanh: custom_activation})
        self.assertIsNotNone(converted_model)
        
        # runtime
        content = converted_model.SerializeToString()
        rt = onnxrt.InferenceSession(content)
        input = {rt.get_inputs()[0].name: x}
        actual_rt = rt.run(None, input)
        self.assertEqual(len(actual_rt), 1)
        np.testing.assert_allclose(actual, actual_rt[0], rtol=1e-05, atol=1e-08)
Exemple #11
0
def main(args):
    spec = importlib.util.spec_from_file_location("model", args.dave_path)
    model = importlib.util.module_from_spec(spec)
    spec.loader.exec_module(model)

    onnx_model = onnxmltools.convert_keras(model.DAVE())
    onnxmltools.utils.save_model(onnx_model, args.output_path)
    def _test_one_to_one_operator_coreml_channels_last(self, keras_model, x):
        '''
        There are two test paths. One is Keras-->CoreML-->ONNX and the other one is Keras-->ONNX.

        Keras-->CoreML-->ONNX:

        Keras computation path:
            [N, C, H, W] ---> numpy transpose ---> [N, H, W, C] ---> keras convolution --->
            [N, H, W, C] ---> numpy transpose ---> [N, C, H, W]

        ONNX computation path:
            [N, C, H, W] ---> ONNX convolution ---> [N, C, H, W]

        The reason for having extra transpose's in the Keras path is that CoreMLTools doesn't not handle channels_last
        flag properly. Precisely, oreMLTools always converts Conv2D under channels_first mode.

        Keras-->ONNX

        Keras computation path:
            [N, C, H, W] ---> numpy transpose ---> [N, H, W, C] ---> keras convolution --->
            [N, H, W, C]

        ONNX computation path:
            [N, C, H, W] ---> numpy transpose ---> [N, H, W, C] ---> ONNX convolution ---> [N, H, W, C]

        '''
        # Verify Keras-to-CoreML-to-ONNX path
        coreml_model = None
        try:
            coreml_model = coremltools.converters.keras.convert(keras_model)
        except (AttributeError, ImportError) as e:
            warnings.warn(
                "Unable to test due to an error in coremltools '{0}'.".format(
                    e))

        onnx_model_p1 = None if coreml_model is None else onnxmltools.convert_coreml(
            coreml_model)
        onnx_model_p2 = onnxmltools.convert_keras(keras_model)

        self.assertTrue(onnx_model_p1 or coreml_model is None)
        self.assertTrue(onnx_model_p2)

        if self._no_available_inference_engine():
            return

        if isinstance(x, list):
            x_t = [np.transpose(_, [0, 2, 3, 1]) for _ in x]
        else:
            x_t = np.transpose(x, [0, 2, 3, 1])
        y_reference = np.transpose(keras_model.predict(x_t), [0, 3, 1, 2])
        y_produced = evaluate_deep_model(onnx_model_p1, x)

        self.assertTrue(np.allclose(y_reference, y_produced))

        # Verify Keras-to-ONNX path
        y_reference = np.transpose(y_reference, [0, 2, 3, 1])
        y_produced = evaluate_deep_model(onnx_model_p2, x_t)

        self.assertTrue(np.allclose(y_reference, y_produced, atol=1e-6))
Exemple #13
0
def export_onnx(keras_model, output_path):
    """Export the model to the ONNX format.

    Args:
      keras_model: the loaded yolo/tiny-yolo model.
      output_path: output path of the ONNX model
    """
    onnx_model = onnxmltools.convert_keras(keras_model)
    onnxmltools.utils.save_model(onnx_model, output_path)
Exemple #14
0
    def _test_one_to_one_operator_core_keras(self, keras_model, x):
        y_reference = keras_model.predict(x)

        onnx_model = onnxmltools.convert_keras(keras_model)
        if find_keras_backend():
            y_produced = evaluate_deep_model(onnx_model, x)
            self.assertTrue(np.allclose(y_reference, y_produced))
        else:
            warnings.warn("cntk or caffe2 are not available")
Exemple #15
0
def main(args):
    os.makedirs(os.path.dirname(args.output_path), exist_ok=True)

    with open(args.model_path) as json_file:
        keras_model = model_from_json(json_file.read())
    keras_model.load_weights(args.weights_path)

    onnx_model = onnxmltools.convert_keras(keras_model)
    optimized_onnx_model = concat_outputs(onnx_model)
    onnxmltools.utils.save_model(optimized_onnx_model, args.output_path)
Exemple #16
0
def get_onnx_model(model_format,
                   model,
                   initial_types: list = None,
                   final_types: list = None):
    if model_format == ModelFormat.KERAS:
        return onnxmltools.convert_keras(model)
    if model_format == ModelFormat.SK_LEARN:
        return onnxmltools.convert_sklearn(model, initial_types=initial_types)
    if model_format == ModelFormat.TENSORFLOW:
        return onnxmltools.convert_tensorflow(model)
def log_model(model, artifact_path, model_name=None):
    import mlflow
    import mlflow.onnx
    import onnx
    import onnxmltools
    onnx_model = onnxmltools.convert_keras(model, artifact_path)
    print("onnx_model.type:",type(onnx_model))
    mlflow.onnx.log_model(onnx_model, artifact_path, registered_model_name=model_name)
    mlflow.set_tag("version.onnx",onnx.__version__)
    mlflow.set_tag("version.onnxtools",onnxmltools.__version__)
Exemple #18
0
    def save(self, filename=None):
        if filename == None:
            raise ValueError(
                'Name Error: to save the model you need to specify the filename')
        pickle.dump(self.final_model, open(filename + '.pkl', 'wb'))

        self.final_model.save(filename + '.h5')

        onnx_model = onnxmltools.convert_keras(self.final_model)
        onnxmltools.utils.save_model(onnx_model, filename + '.onnx')
Exemple #19
0
def save_onnx(model_name):
    keras_path = "tmp.h5"
    onnx_path = f'{model_name}.onnx'

    keras_model = load_model(keras_path)
    onnx_model = onnxmltools.convert_keras(keras_model)
    onnx_model.graph.input[0].type.tensor_type.shape.dim[0].dim_value = 1
    onnx_model.graph.output[0].type.tensor_type.shape.dim[0].dim_value = 1
    onnx.checker.check_model(onnx_model)
    onnx.save(onnx_model, onnx_path)
def log_model(model, artifact_path):
    import mlflow
    import mlflow.onnx
    import onnx
    import onnxmltools
    onnx_model = onnxmltools.convert_keras(model, artifact_path)
    print("onnx_model.type:", type(onnx_model))
    mlflow.onnx.log_model(onnx_model, artifact_path)
    mlflow.set_tag("onnx_version", onnx.__version__)
    mlflow.set_tag("onnxtools_version", onnxmltools.__version__)
Exemple #21
0
 def create_onnx_model(self):
     self.construct_matfile()
     W, b, a = self.matDict["W"], self.matDict["b"], self.matDict[
         "act_fcns"]
     model = create_nn(W, b, a)
     self.final_output_path = os.path.join(self.outputFilePath,
                                           self.originalFilename) + '.onnx'
     onnx_model = onnxmltools.convert_keras(model)
     onnxmltools.utils.save_model(onnx_model, self.final_output_path)
     self.originalFile.close()
Exemple #22
0
def evaluate_model(trainX, trainy):
    verbose, epochs, batch_size = 0, 10, 32
    # random seed for reproducibility
    seed = 7
    np.random.seed(seed)
    # k-fold validation
    X = trainX
    Y = trainy
    n_split = 10
    # evaluation metrics
    accuracylist = list()
    precisionlist = list()
    recalllist = list()
    f1scorelist = list()
    # repeat calculator
    number = 0
    for train_index, test_index in KFold(n_split).split(X, Y):
        x_train, x_test = X[train_index], X[test_index]
        y_train, y_test = Y[train_index], Y[test_index]
        # create model
        model = create_model(trainX, trainy)
        # fit network
        model.fit(x_train, y_train, epochs=epochs, batch_size=batch_size, verbose=verbose)
        # save model
        model.save('./models/model' + str(number+1) + '.h5')
        keras_model = load_model('./models/model' + str(number+1) + '.h5')
        onnx_model = onnxmltools.convert_keras(keras_model)
        onnxmltools.utils.save_model(onnx_model, './models/model' + str(number+1) + '.onnx')
        # evaluate model
        y_predict = model.predict(x_test, batch_size=batch_size, verbose=verbose)
        print('#' + str(number+1) + '#' + 'y_predict:', y_predict)
        y_predict = np.argmax(y_predict, axis=1)
        y_test = np.argmax(y_test, axis=1)
        y_true = np.reshape(y_test, [-1])
        y_pred = np.reshape(y_predict, [-1])
        accuracy = accuracy_score(y_true, y_pred)
        accuracylist.append(accuracy)
        precision = precision_score(y_true, y_pred, average='macro')
        precisionlist.append(precision)
        recall = recall_score(y_true, y_pred, average='macro')
        recalllist.append(recall)
        f1score = f1_score(y_true, y_pred, average='macro')
        f1scorelist.append(f1score)
        number = number + 1
    # summarize results
    accuracy_mean, accuracy_std = summarize_results(accuracylist)
    print('K-fold Accuracy: %.2f%% (+/-%.2f)' % (accuracy_mean*100, accuracy_std))
    precision_mean, precision_std = summarize_results(precisionlist)
    print('K-fold Precision: %.2f%% (+/-%.2f)' % (precision_mean*100, precision_std))
    recall_mean, recall_std = summarize_results(recalllist)
    print('K-fold Recall: %.2f%% (+/-%.2f)' % (recall_mean*100, recall_std))
    f1score_mean, f1score_std = summarize_results(f1scorelist)
    print('K-fold F1 Score: %.2f%% (+/-%.2f)' % (f1score_mean*100, f1score_std))
    def _test_one_to_one_operator_keras(self, keras_model, x):
        y_reference = keras_model.predict(x)

        onnx_model = onnxmltools.convert_keras(keras_model)
        if not self._no_available_inference_engine():
            y_produced = evaluate_deep_model(onnx_model, x)
            self.assertTrue(np.allclose(y_reference, y_produced))
        else:
            self.assertIsNotNone(onnx_model)
            warnings.warn("None of onnx inference engine is available.")
            if self._no_available_inference_engine():
                return
 def create_onnx_model(self):
     # Convert the Keras model into ONNX
     if self.no_json:
         model = models.load_model(self.pathToOriginalFile)
     else:
         model = self.load_files(self.jsonFile, self.pathToOriginalFile)
     self.final_output_path = os.path.join(self.outputFilePath,
                                           self.originalFilename) + '.onnx'
     onnx_model = onnxmltools.convert_keras(model)
     # Save as protobuf
     onnxmltools.utils.save_model(onnx_model, self.final_output_path)
     self.originalFile.close()
Exemple #25
0
def keras2onnx(args):
    import keras
    # Load your Keras model
    keras_model = keras.models.load_model(args.model)

    # Convert the Keras model into ONNX
    onnx_model = onnxmltools.convert_keras(keras_model,
                                           initial_types=args.initial_types,
                                           target_opset=int(args.target_opset))

    # Save as protobuf
    onnxmltools.utils.save_model(onnx_model, args.output_onnx_path)
    def save(self, filename=None):
        if filename == None:
            raise ValueError(
                'Name Error: to save the model you need to specify the filename')

        if isinstance(self.proxy_model, (sklearn.DimensionalityReductionModel)):
            raise AttributeError("'SklearnKerasRegressor' object has no attribute 'save'")
        pickle.dump(self.final_model, open(filename + '.pkl', 'wb'))

        self.final_model.save(filename + '.h5')

        onnx_model = onnxmltools.convert_keras(self.final_model)
        onnxmltools.utils.save_model(onnx_model, filename + '.onnx')
Exemple #27
0
def convert_model_to_onnx(model: Any):
    """
    Helper function to convert a ML model to onnx format
    """
    if isinstance(model, model_classes_keras):
        return onnxmltools.convert_keras(model)
    if isinstance(model, model_classes_sklearn):
        return onnxmltools.convert_sklearn(model)
    if 'xgboost' in model.__repr__():
        return onnxmltools.convert_sklearn(model)
    if isinstance(model, model_classes_scipy):
        raise Exception("Pytorch models not yet supported to onnx")
    else:
        raise Exception(
            "Attempt to convert unsupported model to onnx: {model}")
    def _test_one_to_one_operator_core(self, keras_model, x):
        # Verify Keras-to-CoreML-to-ONNX path
        coreml_model = coremltools.converters.keras.convert(keras_model)
        onnx_model = onnxmltools.convert_coreml(coreml_model)

        y_reference = keras_model.predict(x)
        y_produced = _evaluate(onnx_model, x)

        self.assertTrue(np.allclose(y_reference, y_produced))

        # Verify Keras-to-ONNX path
        onnx_model = onnxmltools.convert_keras(keras_model)
        y_produced = _evaluate(onnx_model, x)

        self.assertTrue(np.allclose(y_reference, y_produced))
def saveModel(model, label_persona_dict):
    onnx_model = onnxmltools.convert_keras(model)
    onnxmltools.utils.save_model(onnx_model, 'example.onnx')

    model_json = model.to_json()
    with open("model.json", "w") as json_file:
        json_file.write(model_json)
    # serialize weights to HDF5
    model.save_weights("model.h5")
    logging.info("Saved model to disk")
    
    dict_writer = open("./label_persona_dict.json", 'w')
    serialized_dict = json.dumps(label_persona_dict)
    dict_writer.write(serialized_dict)
    dict_writer.close()
    logging.info("Saved dict to disk")
Exemple #30
0
def training(model, train_set, test_set):
    history = model.fit_generator(train_set,
                                  steps_per_epoch=100,
                                  epochs=100,
                                  validation_data=test_set,
                                  validation_steps=100)

    model.save("saved_model.h5", True, True)

    onnx = onnxmltools.convert_keras(model)
    onnxmltools.save_model(onnx, "converted.onnx")

    with open("history.json", "w+") as file:
        file.write(str(history.history))

    acc_plot(history)
    loss_plot(history)