Пример #1
0
def _main():
    base_dir = os.path.dirname(__file__)
    request = {
        "serving_type": "openvino",
        "input_model": os.path.join(base_dir, "./model/mnist.onnx"),
        "export_path": os.path.join(base_dir, "model_repos"),
        "input_names": [
            "input.1"
        ],
        "input_formats": [
            "channels_first"
        ],
        "output_names": [
            "20"
        ],
        "input_signatures": [
            "image"
        ],
        "output_signatures": [
            "label"
        ],
        "model_name": "mnist",
        "job_id": "mnist_pytorch",
        "callback": "",
        "max_batch_size": 128
    }

    result = model_compiler.compile_model(request)
    print(result)
Пример #2
0
    def test_success_result(self):
        for k in [keras, tf.keras]:
            with self.subTest(k=k), NamedTemporaryFile(
                    suffix='.h5') as model_file, TemporaryDirectory(
                    ) as target_dir:
                with tf.compat.v1.Session(graph=tf.Graph()):
                    k.Sequential([k.layers.Dense(units=4, input_shape=[8])
                                  ]).save(model_file.name)

                result = model_compiler.compile_model({
                    'serving_type':
                    'tf',
                    'model_name':
                    'foobar',
                    'version':
                    4,
                    'max_batch_size':
                    7,
                    'input_model':
                    model_file.name,
                    'input_signatures': ['x'],
                    'output_signatures': ['y'],
                    'export_path':
                    target_dir
                })

                self.assertEqual(
                    result, {
                        'status': 'success',
                        'path': path.join(target_dir, 'foobar_4.zip')
                    })
Пример #3
0
    def test_error_result(self):
        result = model_compiler.compile_model({})

        self.assertEqual(
            result, {
                'status': 'failure',
                'error_msg': 'Unable to determine the source model type.'
            })
Пример #4
0
def compile_model(args):
    """
    Compile the model.
    """

    request_dir = os.path.join(args.test_model_path, args.serving_model_json)
    try:
        with open(request_dir, 'r') as request_file:
            test_model_dir = args.test_model_path
            request = _get_request(request_file, test_model_dir)
            result = model_compiler.compile_model(request)
            print(result)
    except FileNotFoundError:
        print(f"Can not compile the model in {os.path.join(test_model_dir, args.model_path)}")
Пример #5
0
def _main():
    base_dir = os.path.dirname(__file__)
    request = {
        "serving_type": "tflite",  # or openvino, tensorrt
        "input_model": os.path.join(base_dir, "./model/resnet50.h5"),
        "export_path": os.path.join(base_dir, "model_repos"),
        "input_formats": ["channels_last"],
        "input_signatures": ["image"],
        "output_signatures": ["label"],
        "optimization": True,
        "model_name": "resnet50",
        "max_batch_size": 1
    }

    result = model_compiler.compile_model(request)
    print(result)
Пример #6
0
def _main():
    base_dir = os.path.dirname(__file__)
    request = {
        "serving_type": "tf",  # or openvino, tensorrt
        "input_model": os.path.join(base_dir, "./model/mnist.h5"),
        "export_path": os.path.join(base_dir, "model_repos"),
        "input_layer_names": ["conv2d_1"],
        "output_layer_names": ["dense_2"],
        "input_formats": ["channels_last"],
        "input_signatures": ["image"],
        "output_signatures": ["label"],
        "model_name": "mnist",
        "max_batch_size": 128
    }

    result = model_compiler.compile_model(request)
    print(result)
Пример #7
0
def _main():
    base_dir = os.path.dirname(__file__)
    request = {
        "serving_type": "openvino",
        "input_model": os.path.join(base_dir, "./model/resnet50.onnx"),
        "export_path": os.path.join(base_dir, "model_repos"),
        "input_names": ["image"],
        "input_formats": ["channels_first"],
        "output_names": ["save_infer_model/scale_0.tmp_0"],
        "input_signatures": ["image"],
        "output_signatures": ["label"],
        "model_name": "ResNet50_vd_ssld",
        "job_id": "ResNet50_PaddlePaddle",
        "callback": "",
        "max_batch_size": 128
    }

    result = model_compiler.compile_model(request)
    print(result)