コード例 #1
0
def model_service_worker(socket_patches):
    model_service_worker = TorchModelServiceWorker('unix', 'my-socket', None,
                                                   None)
    model_service_worker.sock = socket_patches.socket
    model_service_worker.service = Service('name', 'mpath', 'testmanifest',
                                           None, 0, 1)
    return model_service_worker
コード例 #2
0
    def load(self,
             model_name,
             model_dir,
             handler,
             gpu_id,
             batch_size,
             envelope=None):
        """
        Load TorchServe 1.0 model from file.

        :param model_name:
        :param model_dir:
        :param handler:
        :param gpu_id:
        :param batch_size:
        :param envelope:
        :return:
        """
        logging.debug("Loading model - working dir: %s", os.getcwd())
        # TODO: Request ID is not given. UUID is a temp UUID.
        metrics = MetricsStore(uuid.uuid4(), model_name)
        manifest_file = os.path.join(model_dir, "MAR-INF/MANIFEST.json")
        manifest = None
        if os.path.exists(manifest_file):
            with open(manifest_file) as f:
                manifest = json.load(f)

        function_name = None
        try:
            module, function_name = self._load_handler_file(handler)
        except ImportError:
            module = self._load_default_handler(handler)

        if module is None:
            raise ValueError(
                "Unable to load module {}, make sure it is added to python path"
                .format(module_name))

        envelope_class = None
        if envelope is not None:
            envelope_class = self._load_default_envelope(envelope)

        function_name = function_name or "handle"
        if hasattr(module, function_name):
            entry_point, initialize_fn = self._get_function_entry_point(
                module, function_name)
        else:
            entry_point, initialize_fn = self._get_class_entry_point(module)

        if envelope_class is not None:
            envelope_instance = envelope_class(entry_point)
            entry_point = envelope_instance.handle

        service = Service(model_name, model_dir, manifest, entry_point, gpu_id,
                          batch_size)
        service.context.metrics = metrics
        initialize_fn(service.context)

        return service
コード例 #3
0
def model_service_worker(socket_patches):
    if not sys.platform.startswith("win"):
        model_service_worker = TorchModelServiceWorker('unix', 'my-socket', None, None)
    else:
        model_service_worker = TorchModelServiceWorker('tcp', 'my-socket', None, port_num=9999)
    model_service_worker.sock = socket_patches.socket
    model_service_worker.service = Service('name', 'mpath', 'testmanifest', None, 0, 1)
    return model_service_worker
コード例 #4
0
    def load(self, model_name, model_dir, handler, gpu_id, batch_size):
        """
        Load TorchServe 0.3 model from file.

        :param model_name:
        :param model_dir:
        :param handler:
        :param gpu_id:
        :param batch_size:
        :return:
        """
        manifest_file = os.path.join(model_dir, "MANIFEST.json")

        manifest = None
        if os.path.isfile(manifest_file):
            with open(manifest_file) as f:
                manifest = json.load(f)
        if not handler.endswith(".py"):
            handler = handler + ".py"

        service_file = os.path.join(model_dir, handler)
        name = os.path.splitext(os.path.basename(service_file))[0]
        if sys.version_info[0] > 2:
            from importlib import util

            spec = util.spec_from_file_location(name, service_file)
            module = util.module_from_spec(spec)
            spec.loader.exec_module(module)
        else:
            import imp
            module = imp.load_source(name, service_file)

        if module is None:
            raise ValueError("Unable to load module {}".format(service_file))

        from ts.model_service.model_service import SingleNodeService
        from .utils.util import list_classes_from_module
        model_class_definitions = list_classes_from_module(
            module, SingleNodeService)
        module_class = model_class_definitions[0]

        module = module_class(model_name, model_dir, manifest, gpu_id)
        service = Service(model_name, model_dir, manifest, module.handle,
                          gpu_id, batch_size)

        module.initialize(service.context)

        return service
コード例 #5
0
    def load(self, model_name, model_dir, handler, gpu_id, batch_size):
        """
        Load TorchServe 1.0 model from file.

        :param model_name:
        :param model_dir:
        :param handler:
        :param gpu_id:
        :param batch_size:
        :return:
        """
        logging.debug("Loading model - working dir: %s", os.getcwd())
        # TODO: Request ID is not given. UUID is a temp UUID.
        metrics = MetricsStore(uuid.uuid4(), model_name)
        manifest_file = os.path.join(model_dir, "MAR-INF/MANIFEST.json")
        manifest = None
        if os.path.exists(manifest_file):
            with open(manifest_file) as f:
                manifest = json.load(f)

        try:
            temp = handler.split(":", 1)
            module_name = temp[0]
            function_name = None if len(temp) == 1 else temp[1]
            if module_name.endswith(".py"):
                module_name = module_name[:-3]
            module_name = module_name.split("/")[-1]
            module = importlib.import_module(module_name)
            # pylint: disable=unused-variable
        except ImportError as e:
            module_name = ".{0}".format(handler)
            module = importlib.import_module(module_name, 'ts.torch_handler')
            function_name = None

        if module is None:
            raise ValueError(
                "Unable to load module {}, make sure it is added to python path"
                .format(module_name))
        if function_name is None:
            function_name = "handle"
        if hasattr(module, function_name):
            entry_point = getattr(module, function_name)
            service = Service(model_name, model_dir, manifest, entry_point,
                              gpu_id, batch_size)

            service.context.metrics = metrics
            # initialize model at load time
            entry_point(None, service.context)
        else:
            model_class_definitions = list_classes_from_module(module)
            if len(model_class_definitions) != 1:
                raise ValueError(
                    "Expected only one class in custom service code or a function entry point {}"
                    .format(model_class_definitions))

            model_class = model_class_definitions[0]
            model_service = model_class()
            handle = getattr(model_service, "handle")
            if handle is None:
                raise ValueError("Expect handle method in class {}".format(
                    str(model_class)))

            service = Service(model_name, model_dir, manifest,
                              model_service.handle, gpu_id, batch_size)
            initialize = getattr(model_service, "initialize")
            if initialize is not None:
                model_service.initialize(service.context)
            else:
                raise ValueError("Expect initialize method in class {}".format(
                    str(model_class)))

        return service