コード例 #1
0
def convertModel():
    coreML_name = infile_name.get()
    onnx_name = outfile_name.get()

    if coreML_name == "":
        status_variable.set("\t (!) Input file doesn't exist.")
        return
    if onnx_name == "":
        status_variable.set("\t (!) Output file doesn't exist.")
        return

    ## check coreML_name
    if len(coreML_name) <= len(
            ".mlmodel") or coreML_name[-len(".mlmodel"):] != ".mlmodel":
        status_variable.set(
            "\t (!) File name error: \"" + coreML_name +
            "\" is not a coreMLmodel. It's not ended with \".mlmodel\"")
        return

    ## check file exist
    if not os.path.isfile(coreML_name):
        status_variable.set("\t (!) File doesn't exist: \"" + coreML_name +
                            "\" does not exist, please provide another one.")
        return

    status_variable.set(">> load model from:" + coreML_name + "...")
    model_coreml = load_spec(coreML_name)

    status_variable.set(">> covert model...")
    model_onnx = convert_coreml(model_coreml)

    status_variable.set(">> save model to:" + onnx_name + "...")
    save_model(model_onnx, onnx_name)

    status_variable.set(">> Convert Finished! ")
コード例 #2
0
def coreml_converter(args):
    # When imported, CoreML tools checks for the current version of Keras and TF and prints warnings if they are
    # outside its expected range. We don't want it to import these packages (since they are big and take seconds to
    # load) and we don't want to clutter the console with unrelated Keras warnings when converting from CoreML.
    import sys
    sys.modules['keras'] = None
    import coremltools
    source_model = coremltools.utils.load_spec(args.source)
    onnx_model = winmltools.convert_coreml(source_model, get_opset(args.ONNXVersion), args.name)
    return onnx_model
コード例 #3
0
ファイル: mlconvert.py プロジェクト: keonlee/iotedgesample
from coremltools.models.utils import load_spec

# Load model file
model_coreml = load_spec('example.mlmodel')
from winmltools import convert_coreml

# Convert it!
# The automatic code generator (mlgen) uses the name parameter to generate class names.
model_onnx = convert_coreml(model_coreml, name='ExampleModel') 


from winmltools.utils import save_model
# Save the produced ONNX model in binary format
save_model(model_onnx, 'example.onnx')
コード例 #4
0
from winmltools.utils import save_model
from winmltools import convert_coreml
from coremltools.models.utils import load_spec

input_coreml_model = '../webservice/pretrain_models/water_meter_yolov2_tiny/yolo-obj-416-water_yolov2_tiny_15000.mlmodel'
output_onnx_model = '../webservice/pretrain_models/water_meter_yolov2_tiny/yolo-obj-416-water_yolov2_tiny_15000_winmltools.onnx'

model_coreml = load_spec(input_coreml_model)
model_onnx = convert_coreml(
    model_coreml, 8, name='yolo-obj-416-water_yolov2_tiny_15000_winmltools')

save_model(model_onnx, output_onnx_model)
コード例 #5
0
from coremltools.models.utils import load_spec
from winmltools import convert_coreml
from winmltools.utils import save_model, save_text

# ml model load
model_coreml = load_spec("MobileNet.mlmodel")
# convert coreml models to onnx model
model_onnx = convert_coreml(model_coreml, name="mobilenet")

# save onnx format
save_model(model_onnx, "mobilenet.onnx")
save_text(model_onnx, "mobilenet.txt")
コード例 #6
0
from coremltools.models.utils import load_spec
from winmltools import convert_coreml
from winmltools.utils import save_model

# Load model file
model_coreml = load_spec('Fruit.mlmodel')

# Convert it!
# The automatic code generator (mlgen) uses the name parameter to generate class names.
model_onnx = convert_coreml(model_coreml, name='Fruit')

# Save the produced ONNX model in binary format
save_model(model_onnx, 'Fruit.onnx')   

コード例 #7
0
folder_path = ml_path[0]
format = ml_path[-1].split('.')[-1]
if format == 'onnx':
    model_onnx = load_model(ml_file)
else:
    # Load model file
    model_coreml = load_spec(ml_file)

    print('ONNX model name:')
    ml_name = str(input())

    # Convert it!
    # The automatic code generator (mlgen) uses the name parameter to generate
    # class names.

    model_onnx = convert_coreml(model_coreml, name=ml_name)

onnx_file_name = os.path.join(folder_path, ml_path[1].split('.')[0])

print('Convert to floating point 16? (y/n)')
need_float_16 = input()
if (need_float_16 == 'y'):
    model_onnx = convert_float_to_float16(model_onnx)

# Save the produced ONNX model in binary format
onnx_path = onnx_file_name + '16bit.onnx'
save_model(model_onnx, onnx_path)
print(onnx_path)

#save_text(model_onnx, 'example.txt')
# Save as text
コード例 #8
0
def main():
    model_coreml = load_spec(
        'D:/Project/ShuffleNet/ncnn/save1/mobilenetv1.mlmodel')
    model_onnx = convert_coreml(model_coreml, 7, name='ExampleModel')
    save_model(model_onnx, 'D:/Project/ShuffleNet/ncnn/save1/mobilenetv1.onnx')
コード例 #9
0
from coremltools.models.utils import load_spec
from winmltools import convert_coreml
from winmltools.utils import save_model

# Load model file
model_coreml = load_spec('ContosoIT.mlmodel')

# Convert it!
# The automatic code generator (mlgen) uses the name parameter to generate class names.
model_onnx = convert_coreml(model_coreml, name='ContosoIT')

# Save the produced ONNX model in binary format
save_model(model_onnx, 'ContosoIT.onnx')  
from winmltools.utils import save_model
from winmltools import convert_coreml
from coremltools.models.utils import load_spec

input_coreml_model = '../../pretrain_models/yolov4/yolov4.mlmodel'
output_onnx_model = '../../pretrain_models/yolov4/yolov4_winmltools.onnx'

model_coreml = load_spec(input_coreml_model)
model_onnx = convert_coreml(model_coreml, 8, name='yolov4_winmltools')

save_model(model_onnx, output_onnx_model)
コード例 #11
0
        "\t (!) Usage: python convertCoreML.py [coreML model name] [onnx model name]"
    )
    print(
        "\t\t * coreML_model : The name of the model you want to convert, ended with \".mlmodel\"."
    )
    print("\t\t * onnx_model \t: The output name of the onnx model.")
    exit()
else:
    coreML_name = sys.argv[1]
    onnx_name = sys.argv[2]

if len(coreML_name) <= len(
        ".mlmodel") or coreML_name[-len(".mlmodel"):] != ".mlmodel":
    print("\t (!) File name error: ", coreML_name,
          "is not a coreMLmodel. It's not ended with \".mlmodel\"")
    exit()

print(">> load model from:", coreML_name, "...")
from coremltools.models.utils import load_spec
model_coreml = load_spec(coreML_name)

print(">> covert model...")
from winmltools import convert_coreml
model_onnx = convert_coreml(model_coreml)

print(">> save model to:", onnx_name, "...")
from winmltools.utils import save_model
save_model(model_onnx, onnx_name)

print("Finished!")
コード例 #12
0
from coremltools.models.utils import load_spec
from winmltools import convert_coreml
from winmltools.utils import save_model  #, save_text

from pathlib import Path

mlmodel_list = []
for mlmodel in Path("./").glob("*.mlmodel"):
    # ml model load
    model_coreml = load_spec(mlmodel)
    # convert coreml models to onnx model
    tgt_opset = 8
    model_onnx = convert_coreml(model_coreml,
                                tgt_opset,
                                name=mlmodel.stem.lower())

    # save onnx format
    save_model(model_onnx, f"{mlmodel.stem.lower()}.onnx")
    # save_text(model_onnx, f"{mlmodel.stem.lower()}.txt")