def save_model(python_model,
               path='./model/',
               conda_env=None,
               dependencies=[],
               github=None,
               module_path=None,
               model_class=None):
    """
    Save a generic python model to a path on the local file system.

    :param python_model: Python model to be saved. 

    :param path: Path to a directory saving model data.

    :param conda_env: Either a dictionary representation of a Conda environment or the path to a conda environment yaml file. 

    :param dependencies: artifacts to be copied to model path. 
    """
    if (not path.endswith('/')):
        path += '/'
    if not os.path.exists(path):
        os.makedirs(path)

    # call model save function
    python_model.save(path)

    if conda_env is None:
        conda_env = _get_default_conda_env()
    print(f'path={path}, conda_env={conda_env}')
    utils.save_conda_env(path, conda_env)

    for dependency in dependencies:
        shutil.copy(dependency, path)

    func = getattr(python_model, 'predict')
    if func is None:
        raise RuntimeError('Cannot find predict function in model')

    args = inspect.getargspec(func).args
    if 'self' in args:
        args.remove('self')

    spec = utils.generate_default_model_spec(FLAVOR_NAME,
                                             MODEL_FILE_NAME,
                                             input_args=args)
    pySpec = spec[FLAVOR_NAME]
    if github is not None: pySpec["github"] = github
    if module_path is not None: pySpec["module_path"] = module_path
    if model_class is not None: pySpec["model_class"] = model_class
    utils._save_model_spec(path, spec)
    utils.generate_ilearner_files(path)  # temp solution, to remove later
Exemple #2
0
def save_model(sess,
               input_tensor_list,
               output_tensor_list,
               graph_tags=None,
               signature_name=None,
               conda_env=None,
               path='./model/'):
    """
    Save a Tensorflow model to a path on the local file system.

    :param sess: Tensorflow session.

    :param input_tensor_list: list of input tensors.
    
    :param output_tensor_list: list of output tensors.
    
    :param graph_tags: list of graph tags (optional), if not specified, default its value would be [tf.saved_model.tag_constants.SERVING].
    
    :param signature_name: signature name (optional), if not specified, default its value would be 'signature_name'.

    :param conda_env: Either a dictionary representation of a Conda environment or the path to a conda environment yaml file (optional). 

    :param path: Path to a directory containing model, spec, conda yaml data (optional).
    """
    if (not path.endswith('/')):
        path += '/'
    if not os.path.exists(path):
        os.makedirs(path)

    if graph_tags == None or len(graph_tags) == 0:
        graph_tags = [tf.saved_model.tag_constants.SERVING]

    if signature_name is None or signature_name == '':
        signature_name = 'signature_name'

    model_file_path = 'model'  # sub-directory containing the tensorflow model
    _save_model(os.path.join(path, model_file_path), sess, input_tensor_list,
                output_tensor_list, graph_tags, signature_name)

    if conda_env is None:
        conda_env = _get_default_conda_env()
    utils.save_conda_env(path, conda_env)

    _save_model_spec(path, model_file_path, graph_tags, signature_name)
    utils.generate_ilearner_files(path)  # temp solution, to remove later
Exemple #3
0
def save_model(keras_model, path='./model/', conda_env=None):
    """
    Save a Keras model to a path on the local file system.

    :param keras_model: Keras model to be saved. 

    :param path: Path to a directory containing model data.
    
    :param conda_env: Either a dictionary representation of a Conda environment or the path to a conda environment yaml file. 
    """
    if (not path.endswith('/')):
        path += '/'
    if not os.path.exists(path):
        os.makedirs(path)

    keras_model.save(os.path.join(path, model_file_name))

    if conda_env is None:
        conda_env = _get_default_conda_env()
    utils.save_conda_env(path, conda_env)

    utils.save_model_spec(path, FLAVOR_NAME, model_file_name)
    utils.generate_ilearner_files(path)  # temp solution, to remove later
Exemple #4
0
def save_model(pytorch_model,
               path='./model/',
               conda_env=None,
               dependencies=[]):
    """
    Save a PyTorch model to a path on the local file system.

    :param pytorch_model: PyTorch model to be saved. 

    :param path: Path to a directory containing model data.

    :param conda_env: Either a dictionary representation of a Conda environment or the path to a conda environment yaml file. 
    """
    if (not path.endswith('/')):
        path += '/'
    if not os.path.exists(path):
        os.makedirs(path)

    # only save cpu version
    _save_model(pytorch_model.to('cpu'), os.path.join(path, MODEL_FILE_NAME))
    fn = os.path.join(path, MODEL_FILE_NAME)
    print(f'MODEL_FILE: {fn}')

    if conda_env is None:
        conda_env = _get_default_conda_env()
    print(f'path={path}, conda_env={conda_env}')
    utils.save_conda_env(path, conda_env)

    for dependency in dependencies:
        shutil.copy(dependency, path)
    forward_func = getattr(pytorch_model, 'forward')
    args = inspect.getargspec(forward_func).args
    if 'self' in args:
        args.remove('self')

    utils.save_model_spec(path, FLAVOR_NAME, MODEL_FILE_NAME, input_args=args)
    utils.generate_ilearner_files(path)  # temp solution, to remove later