Exemplo n.º 1
0
 def test_load_func_model_with_error(self, patches):
     patches.mock_open.side_effect = [
         mock.mock_open(read_data=self.mock_manifest).return_value
     ]
     sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
     patches.os_path.return_value = True
     handler = 'dummy_func_model_service:wrong'
     model_loader = ModelLoaderFactory.get_model_loader()
     with pytest.raises(ValueError, match=r"Expected only one class .*"):
         model_loader.load(self.model_name, self.model_dir, handler, 0, 1)
Exemplo n.º 2
0
    def test_load_class_model(self, patches):
        patches.mock_open.side_effect = [
            mock.mock_open(read_data=self.mock_manifest).return_value
        ]
        sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
        patches.os_path.return_value = True
        handler = 'dummy_class_model_service'
        model_loader = ModelLoaderFactory.get_model_loader()
        service = model_loader.load(self.model_name, self.model_dir, handler,
                                    0, 1)

        assert inspect.ismethod(service._entry_point)
Exemplo n.º 3
0
    def load_model(load_model_request):
        """
        Expected command
        {
            "command" : "load", string
            "modelPath" : "/path/to/model/file", string
            "modelName" : "name", string
            "gpu" : None if CPU else gpu_id, int
            "handler" : service handler entry point if provided, string
            "envelope" : name of wrapper/unwrapper of request data if provided, string
            "batchSize" : batch size, int
            "limitMaxImagePixels": limit pillow image max_image_pixels, bool
        }

        :param load_model_request:
        :return:
        """
        try:
            model_dir = load_model_request["modelPath"].decode("utf-8")
            model_name = load_model_request["modelName"].decode("utf-8")
            handler = load_model_request["handler"].decode(
                "utf-8") if load_model_request["handler"] else None
            envelope = load_model_request["envelope"].decode(
                "utf-8") if "envelope" in load_model_request else None
            envelope = envelope if envelope is not None and len(
                envelope) > 0 else None

            batch_size = None
            if "batchSize" in load_model_request:
                batch_size = int(load_model_request["batchSize"])
            logging.info("model_name: %s, batchSize: %d", model_name,
                         batch_size)

            gpu = None
            if "gpu" in load_model_request:
                gpu = int(load_model_request["gpu"])

            limit_max_image_pixels = True
            if "limitMaxImagePixels" in load_model_request:
                limit_max_image_pixels = bool(
                    load_model_request["limitMaxImagePixels"])

            model_loader = ModelLoaderFactory.get_model_loader()
            service = model_loader.load(model_name, model_dir, handler, gpu,
                                        batch_size, envelope,
                                        limit_max_image_pixels)

            logging.debug("Model %s loaded.", model_name)

            return service, "loaded model {}".format(model_name), 200
        except MemoryError:
            return None, "System out of memory", 507
Exemplo n.º 4
0
    def test_load_func_model(self, patches):
        patches.mock_open.side_effect = [
            mock.mock_open(read_data=self.mock_manifest).return_value
        ]
        sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
        patches.os_path.return_value = True
        handler = 'dummy_func_model_service:infer'
        model_loader = ModelLoaderFactory.get_model_loader()
        service = model_loader.load(self.model_name, self.model_dir, handler,
                                    0, 1)

        assert isinstance(service._entry_point, types.FunctionType)
        assert service._entry_point.__name__ == 'infer'
Exemplo n.º 5
0
    def test_load_model_legacy(self, patches):
        patches.mock_open.side_effect = [
            mock.mock_open(read_data=self.mock_manifest).return_value
        ]
        patches.open_signature.side_effect = [
            mock.mock_open(read_data='{}').return_value
        ]
        patches.is_file.return_value = True
        patches.os_path.side_effect = [False, True]
        sys.path.append(self.model_dir)
        handler = 'dummy_model_service'
        model_loader = ModelLoaderFactory.get_model_loader(self.model_dir)
        assert isinstance(model_loader, LegacyModelLoader)
        service = model_loader.load(self.model_name, self.model_dir, handler,
                                    0, 1)

        assert inspect.ismethod(service._entry_point)
Exemplo n.º 6
0
    def load_model(load_model_request):
        """
        Expected command
        {
            "command" : "load", string
            "modelPath" : "/path/to/model/file", string
            "modelName" : "name", string
            "gpu" : None if CPU else gpu_id, int
            "handler" : service handler entry point if provided, string
            "batchSize" : batch size, int
        }

        :param load_model_request:
        :return:
        """
        try:
            model_dir = load_model_request["modelPath"].decode("utf-8")
            model_name = load_model_request["modelName"].decode("utf-8")
            handler = load_model_request["handler"].decode(
                "utf-8") if load_model_request["handler"] else None
            batch_size = None
            if "batchSize" in load_model_request:
                batch_size = int(load_model_request["batchSize"])

            gpu = None
            if "gpu" in load_model_request:
                gpu = int(load_model_request["gpu"])

            model_loader = ModelLoaderFactory.get_model_loader()
            service = model_loader.load(model_name, model_dir, handler, gpu,
                                        batch_size)

            logging.debug("Model %s loaded.", model_name)

            return service, "loaded model {}".format(model_name), 200
        except MemoryError:
            return None, "System out of memory", 507
Exemplo n.º 7
0
    def test_model_loader_factory(self):
        model_loader = ModelLoaderFactory.get_model_loader(
            os.path.abspath('ts/tests/unit_tests/test_utils/'))

        assert isinstance(model_loader, TsModelLoader)
Exemplo n.º 8
0
    def test_model_loader_factory_legacy(self):
        model_loader = ModelLoaderFactory.get_model_loader(
            os.path.abspath('ts/tests/unit_tests/model_service/dummy_model'))

        assert isinstance(model_loader, LegacyModelLoader)
Exemplo n.º 9
0
    def test_model_loader_factory(self):
        model_loader = ModelLoaderFactory.get_model_loader()

        assert isinstance(model_loader, TsModelLoader)