def main(args):
    parameters = parse_parameters(json.loads(args.parameters))
    model_uri = parameters["model_uri"]

    # TODO: Cache downloaded model for MLFlowServer.py
    log.info(f"Downloading model from {model_uri}")
    model_folder = Storage.download(model_uri)
    setup_env(model_folder)
示例#2
0
 def load(self):
     logger.info(f"Downloading model from {self.model_uri}")
     model_folder = Storage.download(self.model_uri)
     self._model = pyfunc.load_model(model_folder)
     self.ready = True
示例#3
0
 def load(self):
     model_file = os.path.join(Storage.download(self.model_uri),
                               MODEL_FILE_NAME)
     self._model = torch.load(model_file)
     self.ready = True