def convert_model(model, name, input_types): """ Runs the appropriate conversion method. :param model: model, *scikit-learn*, *keras*, or *coremltools* object :return: *onnx* model """ from sklearn.base import BaseEstimator if model.__class__.__name__.startswith("LGBM"): from onnxmltools.convert import convert_lightgbm model, prefix = convert_lightgbm(model, name, input_types), "LightGbm" elif isinstance(model, BaseEstimator): from onnxmltools.convert import convert_sklearn model, prefix = convert_sklearn(model, name, input_types), "Sklearn" else: from keras.models import Model if isinstance(model, Model): from onnxmltools.convert import convert_keras model, prefix = convert_keras(model, name, input_types), "Keras" else: from onnxmltools.convert import convert_coreml model, prefix = convert_coreml(model, name, input_types), "Cml" if model is None: raise RuntimeError("Unable to convert model of type '{0}'.".format(type(model))) return model, prefix
def keras_model(keras_file_path): keras_load_model = load_model(keras_file_path) keras2onnx = convert_keras(keras_load_model) # Save as protobuf keras2onnx = onnxmltools.utils.save_model(keras2onnx, '.path/to/save/onnx/model.onnx') return keras2onnx
def keras_converter(args): from onnxmltools.convert import convert_keras from keras.models import load_model source_model = load_model(args.source) onnx_model = convert_keras(source_model, name=args.name, target_opset=get_opset(args.ONNXVersion)) return onnx_model
X_train, X_test, y_train, y_test = train_test_split(X, y_multi) model = Sequential() model.add(Dense(units=10, activation='relu', input_dim=4)) model.add(Dense(units=3, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy']) model.fit(X_train, y_train, epochs=5, batch_size=16) ########################### # Convert a model into ONNX # +++++++++++++++++++++++++ initial_type = [('float_input', FloatTensorType([None, 4]))] onx = convert_keras(model, initial_types=initial_type) ################################### # Compute the predictions with onnxruntime # ++++++++++++++++++++++++++++++++++++++++ sess = rt.InferenceSession(onx.SerializeToString()) input_name = sess.get_inputs()[0].name label_name = sess.get_outputs()[0].name pred_onx = sess.run( [label_name], {input_name: X_test.astype(numpy.float32)})[0] print(pred_onx) ################################## # Display the ONNX graph # ++++++++++++++++++++++