Ejemplo n.º 1
0
def test_models(model_name, input_shape, output_shape):
    """ Tests Googlenet model for both onnx import and export"""
    model_path, inputs, outputs = get_test_files(model_name)
    logging.info("Translating model from ONNX model zoo to Mxnet")
    sym, arg_params, aux_params = onnx_mxnet.import_model(model_path)
    params = {}
    params.update(arg_params)
    params.update(aux_params)

    dir_path = os.path.dirname(model_path)
    new_model_name = "exported_" + model_name + ".onnx"
    onnx_file = os.path.join(dir_path, new_model_name)

    logging.info("Translating converted model from mxnet to ONNX")
    converted_model_path = onnx_mxnet.export_model(sym, params, [input_shape], np.float32, onnx_file)

    sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model_path)

    metadata = onnx_mxnet.get_model_metadata(converted_model_path)
    assert len(metadata) == 2
    assert metadata.get('input_tensor_data')
    assert metadata.get('input_tensor_data')[0][1] == input_shape
    assert metadata.get('output_tensor_data')
    assert metadata.get('output_tensor_data')[0][1] == output_shape
    data_names = [input_name[0] for input_name in metadata.get('input_tensor_data')]

    logging.info("Running inference on onnx re-import model in mxnet")
    # run test for each test file
    for input_data, output_data in zip(inputs, outputs):
        result = forward_pass(sym, arg_params, aux_params, data_names, input_data)

        # verify the results
        npt.assert_equal(result.shape, output_data.shape)
        npt.assert_almost_equal(output_data, result, decimal=3)
    logging.info(model_name + " conversion successful")
def model_fn(model_dir):
    """
    Load the onnx model. Called once when hosting service starts.

    :param: model_dir The directory where model files are stored.
    :return: a model
    """
    onnx_path = os.path.join(model_dir, "model.onnx")
    ctx = mx.cpu() # todo: pass into function
    # load onnx model symbol and parameters
    sym, arg_params, aux_params = onnx_mxnet.import_model(onnx_path)
    model_metadata = onnx_mxnet.get_model_metadata(onnx_path)
    # first index is name, second index is shape
    input_names = [inputs[0] for inputs in model_metadata.get('input_tensor_data')]
    input_symbols = [mx.sym.var(i) for i in input_names]
    net = gluon.nn.SymbolBlock(outputs=sym, inputs=input_symbols)
    net_params = net.collect_params()
    # set parameters (on correct context)
    for param in arg_params:
        if param in net_params:
            net_params[param]._load_init(arg_params[param], ctx=ctx)
    for param in aux_params:
        if param in net_params:
            net_params[param]._load_init(aux_params[param], ctx=ctx)
    # hybridize for increase performance
    net.hybridize()
    return net
Ejemplo n.º 3
0
def test_bvlc_rcnn_ilsvrc13():
    """Tests the bvlc rcnn model"""
    model_path, inputs, outputs = get_test_files('bvlc_reference_rcnn_ilsvrc13')
    logging.info("Translating rcnn_ilsvrc13 model from ONNX to Mxnet")
    sym, arg_params, aux_params = onnx_mxnet.import_model(model_path)
    metadata = onnx_mxnet.get_model_metadata(model_path)
    assert len(metadata) == 2
    assert metadata.get('input_tensor_data')
    assert metadata.get('input_tensor_data') == [(u'data_0', (1, 3, 224, 224))]
    assert metadata.get('output_tensor_data')
    assert metadata.get('output_tensor_data') == [(u'fc-rcnn_1', (1, 200))]
    data_names = [input_name[0] for input_name in metadata.get('input_tensor_data')]

    # run test for each test file
    for input_data, output_data in zip(inputs, outputs):
        # create module
        mod = mx.mod.Module(symbol=sym, data_names=data_names, context=mx.cpu(), label_names=None)
        mod.bind(for_training=False, data_shapes=[(data_names[0], input_data.shape)], label_shapes=None)
        mod.set_params(arg_params=arg_params, aux_params=aux_params,
                       allow_missing=True, allow_extra=True)
        # run inference
        batch = namedtuple('Batch', ['data'])
        mod.forward(batch([mx.nd.array(input_data)]), is_train=False)

        # verify the results
        npt.assert_equal(mod.get_outputs()[0].shape, output_data.shape)
        npt.assert_almost_equal(output_data, mod.get_outputs()[0].asnumpy(), decimal=3)
    logging.info("rcnn_ilsvrc13 model conversion Successful")
Ejemplo n.º 4
0
def model_fn(model_dir):
    """
    Load the onnx model. Called once when hosting service starts.

    :param: model_dir The directory where model files are stored.
    :return: a model
    """
    onnx_path = os.path.join(model_dir, "model.onnx")
    ctx = mx.cpu() # todo: pass into function
    # load onnx model symbol and parameters
    sym, arg_params, aux_params = onnx_mxnet.import_model(onnx_path)
    model_metadata = onnx_mxnet.get_model_metadata(onnx_path)
    # first index is name, second index is shape
    input_names = [inputs[0] for inputs in model_metadata.get('input_tensor_data')]
    input_symbols = [mx.sym.var(i) for i in input_names]
    net = gluon.nn.SymbolBlock(outputs=sym, inputs=input_symbols)
    net_params = net.collect_params()
    # set parameters (on correct context)
    for param in arg_params:
        if param in net_params:
            net_params[param]._load_init(arg_params[param], ctx=ctx)
    for param in aux_params:
        if param in net_params:
            net_params[param]._load_init(aux_params[param], ctx=ctx)
    # hybridize for increase performance
    net.hybridize()
    return net
Ejemplo n.º 5
0
def test_bvlc_rcnn_ilsvrc13():
    """Tests the bvlc rcnn model"""
    model_path, inputs, outputs = get_test_files('bvlc_reference_rcnn_ilsvrc13')
    logging.info("Translating rcnn_ilsvrc13 model from ONNX to Mxnet")
    sym, arg_params, aux_params = onnx_mxnet.import_model(model_path)
    metadata = onnx_mxnet.get_model_metadata(model_path)
    assert len(metadata) == 2
    assert metadata.get('input_tensor_data')
    assert metadata.get('input_tensor_data') == [(u'data_0', (1, 3, 224, 224))]
    assert metadata.get('output_tensor_data')
    assert metadata.get('output_tensor_data') == [(u'fc-rcnn_1', (1, 200))]
    data_names = [input_name[0] for input_name in metadata.get('input_tensor_data')]

    # run test for each test file
    for input_data, output_data in zip(inputs, outputs):
        # create module
        mod = mx.mod.Module(symbol=sym, data_names=data_names, context=mx.cpu(), label_names=None)
        mod.bind(for_training=False, data_shapes=[(data_names[0], input_data.shape)], label_shapes=None)
        mod.set_params(arg_params=arg_params, aux_params=aux_params,
                       allow_missing=True, allow_extra=True)
        # run inference
        batch = namedtuple('Batch', ['data'])
        mod.forward(batch([mx.nd.array(input_data)]), is_train=False)

        # verify the results
        npt.assert_equal(mod.get_outputs()[0].shape, output_data.shape)
        npt.assert_almost_equal(output_data, mod.get_outputs()[0].asnumpy(), decimal=3)
    logging.info("rcnn_ilsvrc13 model conversion Successful")
Ejemplo n.º 6
0
def perform_inference(sym, arg_params, aux_params, input_img, img_cb, img_cr):
    """Perform inference on image using mxnet"""
    metadata = onnx_mxnet.get_model_metadata('super_resolution.onnx')
    data_names = [
        input_name[0] for input_name in metadata.get('input_tensor_data')
    ]
    # create module
    mod = mx.mod.Module(symbol=sym, data_names=data_names, label_names=None)
    mod.bind(for_training=False,
             data_shapes=[(data_names[0], input_img.shape)])
    mod.set_params(arg_params=arg_params, aux_params=aux_params)

    # run inference
    batch = namedtuple('Batch', ['data'])
    mod.forward(batch([mx.nd.array(input_img)]))

    # Save the result
    img_out_y = Image.fromarray(np.uint8(
        mod.get_outputs()[0][0][0].asnumpy().clip(0, 255)),
                                mode='L')

    result_img = Image.merge("YCbCr", [
        img_out_y,
        img_cb.resize(img_out_y.size, Image.BICUBIC),
        img_cr.resize(img_out_y.size, Image.BICUBIC)
    ]).convert("RGB")
    output_img_dim = 672
    assert result_img.size == (output_img_dim, output_img_dim)
    LOGGER.info("Super Resolution example success.")
    result_img.save("super_res_output.jpg")
    return result_img
Ejemplo n.º 7
0
def perform_inference(sym, arg_params, aux_params, input_img, img_cb, img_cr):
    """Perform inference on image using mxnet"""
    metadata = onnx_mxnet.get_model_metadata('super_resolution.onnx')
    data_names = [input_name[0] for input_name in metadata.get('input_tensor_data')]
    # create module
    mod = mx.mod.Module(symbol=sym, data_names=data_names, label_names=None)
    mod.bind(for_training=False, data_shapes=[(data_names[0], input_img.shape)])
    mod.set_params(arg_params=arg_params, aux_params=aux_params)

    # run inference
    batch = namedtuple('Batch', ['data'])
    mod.forward(batch([mx.nd.array(input_img)]))

    # Save the result
    img_out_y = Image.fromarray(np.uint8(mod.get_outputs()[0][0][0].
                                         asnumpy().clip(0, 255)), mode='L')

    result_img = Image.merge(
        "YCbCr", [img_out_y,
                  img_cb.resize(img_out_y.size, Image.BICUBIC),
                  img_cr.resize(img_out_y.size, Image.BICUBIC)]).convert("RGB")
    output_img_dim = 672
    assert result_img.size == (output_img_dim, output_img_dim)
    LOGGER.info("Super Resolution example success.")
    result_img.save("super_res_output.jpg")
    return result_img
Ejemplo n.º 8
0
def test_model_accuracy(model_name, input_shape):
    """ Imports ONNX model, runs inference, exports and imports back
        run inference, compare result with the previous inference result"""
    model_path, inputs, outputs = get_test_files(model_name)
    logging.info("Translating model from ONNX model zoo to Mxnet")
    sym, arg_params, aux_params = onnx_mxnet.import_model(model_path)

    metadata = onnx_mxnet.get_model_metadata(model_path)
    data_names = [
        input_name[0] for input_name in metadata.get('input_tensor_data')
    ]

    expected_result = []
    for input_data, output_data in zip(inputs, outputs):
        result = forward_pass(sym, arg_params, aux_params, data_names,
                              input_data)
        expected_result.append(result)

    params = {}
    params.update(arg_params)
    params.update(aux_params)

    dir_path = os.path.dirname(model_path)
    new_model_name = "exported_" + model_name + ".onnx"
    onnx_file = os.path.join(dir_path, new_model_name)

    logging.info("Translating converted model from mxnet to ONNX")
    converted_model_path = onnx_mxnet.export_model(sym, params, [input_shape],
                                                   np.float32, onnx_file)

    sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model_path)

    metadata = onnx_mxnet.get_model_metadata(converted_model_path)
    data_names = [
        input_name[0] for input_name in metadata.get('input_tensor_data')
    ]

    actual_result = []
    for input_data, output_data in zip(inputs, outputs):
        result = forward_pass(sym, arg_params, aux_params, data_names,
                              input_data)
        actual_result.append(result)

    # verify the results
    for expected, actual in zip(expected_result, actual_result):
        npt.assert_equal(expected.shape, actual.shape)
        npt.assert_almost_equal(expected, actual, decimal=3)
Ejemplo n.º 9
0
def test_nodims_import():
    # Download test model without dims mentioned in params
    test_model = download(test_model_path, dirname=CURR_PATH.__str__())
    input_data = np.array([0.2, 0.5])
    nd_data = mx.nd.array(input_data).expand_dims(0)
    sym, arg_params, aux_params = onnx_mxnet.import_model(test_model)
    model_metadata = onnx_mxnet.get_model_metadata(test_model)
    input_names = [inputs[0] for inputs in model_metadata.get('input_tensor_data')]
    output_data = forward_pass(sym, arg_params, aux_params, input_names, nd_data)
    assert(output_data.shape == (1,1))
Ejemplo n.º 10
0
    def get_model_results(modelpath):
        symbol, args, aux = onnx_mxnet.import_model(modelpath)

        data = onnx_mxnet.get_model_metadata(modelpath)
        data_names = [input_name[0] for input_name in data.get('input_tensor_data')]

        result = []
        for input_data, output_data in zip(inputs, outputs):
            output = forward_pass(symbol, args, aux, data_names, input_data)
            result.append(output)
        return symbol, args, aux, result, data
def test_model_accuracy(model_name, input_shape):
    """ Imports ONNX model, runs inference, exports and imports back
        run inference, compare result with the previous inference result"""
    model_path, inputs, outputs = get_test_files(model_name)
    logging.info("Translating model from ONNX model zoo to Mxnet")
    sym, arg_params, aux_params = onnx_mxnet.import_model(model_path)

    metadata = onnx_mxnet.get_model_metadata(model_path)
    data_names = [input_name[0] for input_name in metadata.get('input_tensor_data')]

    expected_result= []
    for input_data, output_data in zip(inputs, outputs):
        result = forward_pass(sym, arg_params, aux_params, data_names, input_data)
        expected_result.append(result)

    params = {}
    params.update(arg_params)
    params.update(aux_params)

    dir_path = os.path.dirname(model_path)
    new_model_name = "exported_" + model_name + ".onnx"
    onnx_file = os.path.join(dir_path, new_model_name)

    logging.info("Translating converted model from mxnet to ONNX")
    converted_model_path = onnx_mxnet.export_model(sym, params, [input_shape], np.float32,
                                                   onnx_file)

    sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model_path)

    metadata = onnx_mxnet.get_model_metadata(converted_model_path)
    data_names = [input_name[0] for input_name in metadata.get('input_tensor_data')]

    actual_result = []
    for input_data, output_data in zip(inputs, outputs):
        result = forward_pass(sym, arg_params, aux_params, data_names, input_data)
        actual_result.append(result)

    # verify the results
    for expected, actual in zip(expected_result, actual_result):
        npt.assert_equal(expected.shape, actual.shape)
        npt.assert_almost_equal(expected, actual, decimal=3)