def test_unified_model(model_instance: UnifiedModel, data=None, conda_environment=False):
    """
    Helps to test whether your model instance can be successfully loaded in another python environment.
    This method saves the model instance, loads the model file in another python process,
    and (optionally) calls `predict()` with the provided test data.

    # Arguments
        model_instance (UnifiedModel): Unified model instance.
        data (string or bytes): Input data to test the model (optional).
        conda_environment (bool): If `True`, a clean conda environment will be created for the test (optional).
    """

    import sys
    import os
    import tempfile
    import subprocess
    import shutil

    log.info("Starting model test.")
    temp_test_folder = tempfile.mkdtemp()
    saved_model_path = model_instance.save(os.path.join(temp_test_folder, "test_model"))

    python_runtime = sys.executable

    CONDA_ENV = "model-test-env"
    if conda_environment:
        log.info("Creating clean conda environment.")
        try:
            log.info(subprocess.check_output("conda create -n " + CONDA_ENV + " python=3.6 cython -y",
                                             stderr=subprocess.STDOUT, shell=True).decode("utf-8"))
            log.info("Installing unified model.")
            log.info(
                subprocess.check_output("/opt/conda/envs/"
                                        + CONDA_ENV
                                        + "/bin/pip install --upgrade unified-model",
                                        stderr=subprocess.STDOUT,
                                        shell=True).decode("utf-8"))

            python_runtime = "/opt/conda/envs/" + CONDA_ENV + "/bin/python"
        except subprocess.CalledProcessError as e:
            log.info("Failed to create conda environment: \n" + e.output.decode("utf-8"))

    test_command = python_runtime + " " + saved_model_path + ' predict'
    if data:
        test_command += ' --input-data "' + str(data) + '"'

    log.info("Executing " + test_command)

    try:
        log.info(subprocess.check_output(test_command, stderr=subprocess.STDOUT, shell=True).decode("utf-8"))
        log.info("Finished model test successfully!")
    except subprocess.CalledProcessError as e:
        log.info("Test failed: \n" + e.output.decode("utf-8"))

    shutil.rmtree(temp_test_folder)

    if conda_environment:
        log.info("Removing conda environment.")
        subprocess.call("conda remove --name " + CONDA_ENV + " --all -y", shell=True)
Beispiel #2
0
    def _init_model(self, **kwargs):

        try:
            # Check if install requirements was set
            self._install_requirements
        except AttributeError:
            # Otherwise set false as default
            self._install_requirements = False

        self.second_stage_models = {}

        self.first_stage_model = UnifiedModel.load(
            self.get_file(self.first_stage_model_key),
            install_requirements=self._install_requirements)

        for category, key in self.second_stage_model_keys.items():
            self.second_stage_models[category] = UnifiedModel.load(
                self.get_file(key),
                install_requirements=self._install_requirements)
    def __setstate__(self, d):
        self.__dict__ = d

        temp_file = tempfile.NamedTemporaryFile()
        temp_file.write(self.virtual_model_file)
        temp_file.flush()
        os.fsync(temp_file.fileno())

        self.unified_model = UnifiedModel.load(temp_file.name)
        del self.virtual_model_file

        temp_file.close()
def get_model(model_key: str = None) -> UnifiedModel:
    """
    Get the model instance for the given key.

    # Arguments
        model_key (string): Key of the model. If 'None', return the default model.

    # Returns
    Unified model instance.

    # Raises
        Exception: Model failed to load.
    """

    global loaded_resources
    global resource_loading
    global install_req

    if not model_key:
        if default_model is None:
            raise Exception(
                "Model key not provided and no default model is set.")
        else:
            return default_model

    if model_key in loaded_resources:
        return loaded_resources[model_key]

    if model_key in resource_loading and resource_loading[model_key]:
        raise Exception("Model is currently loading. Please try again.")

    model_path = key_resolver(model_key)
    if not model_path:
        raise Exception("Model could not be loaded: " + model_key)

    if not os.path.exists(model_path):
        raise Exception("Could not find model at path: " + model_path)

    resource_loading[model_key] = True
    model_instance = UnifiedModel.load(model_path,
                                       install_requirements=install_req)

    if model_instance is None:
        resource_loading[model_key] = False
        raise Exception("Failed to load model from path: " + model_path)

    loaded_resources[model_key] = model_instance
    resource_loading[model_key] = False
    return model_instance
Beispiel #5
0
    def _init_model(self):
        self.models = []

        try:
            # Check if install requirements was set
            self._install_requirements
        except AttributeError:
            # Otherwise set false as default
            self._install_requirements = False

        for model_key in self.model_keys:
            self.models.append(
                UnifiedModel.load(
                    self.get_file(model_key),
                    install_requirements=self._install_requirements))
def convert_to_pex(unified_model, output_file_path: str) -> str:
    """
    Convert the given unified model into an executable PEX file.

    # Arguments
        unified_model (UnifiedModel or str): Unified model instance or path to model file
        output_file_path (string): Path to save the model.

    # Returns
    Full path to the converted model
    """
    # https://gist.github.com/simeonf/062af826e79259bc7686
    log.info("Start conversion to PEX")

    if isinstance(unified_model, UnifiedModel):
        pass
    elif os.path.exists(str(unified_model)):
        # load model instance
        unified_model = UnifiedModel.load(str(unified_model))
    else:
        raise Exception("Could not find model for: " + str(unified_model))

    from git import Repo
    from pex.bin import pex

    model_temp_folder = tempfile.mkdtemp()

    model_instance_name = "model_instance"
    model_instance_folder = os.path.join(model_temp_folder,
                                         model_instance_name)
    os.makedirs(model_instance_folder)

    with open(os.path.join(model_instance_folder, "setup.py"),
              "w") as text_file:
        text_file.write("from distutils.core import setup " +
                        "\nsetup(name='" + model_instance_name + "'," +
                        "\n\tpackages=['" + model_instance_name + "']," +
                        "\n\tversion='1.0'," + "\n\tpackage_data={'" +
                        model_instance_name + "': ['" + model_instance_name +
                        "']})")

    model_instance_package = os.path.join(model_instance_folder,
                                          model_instance_name)
    os.makedirs(model_instance_package)

    with open(os.path.join(model_instance_package, "__init__.py"),
              "w") as text_file:
        text_file.write(
            "import os, pkg_resources" +
            "\nfrom unified_model import cli_handler" +
            "\nos.environ[cli_handler.DEFAULT_MODEL_PATH_ENV] = pkg_resources.resource_filename(__name__, '"
            + model_instance_name + "')" + "\ncli_handler.cli()")

    with open(os.path.join(model_instance_package, "__main__.py"),
              "w") as text_file:
        text_file.write("")

    unified_model.save(
        os.path.join(model_instance_package, model_instance_name))

    lib_repo_folder = os.path.join(model_temp_folder, "unified-model")
    Repo.clone_from(UNIFIED_MODEL_REPO_URL, lib_repo_folder)

    parser, resolver_options_builder = pex.configure_clp()
    args = [lib_repo_folder, model_instance_folder, "--disable-cache"]
    for req in unified_model._requirements:
        if isinstance(req, six.string_types):
            args.append(req)
    options, reqs = parser.parse_args(args=args)
    pex_builder = pex.build_pex(reqs, options, resolver_options_builder)
    pex_builder.set_entry_point(model_instance_name)
    pex_builder.build(output_file_path)

    shutil.rmtree(model_temp_folder)

    log.info("Conversion to PEX successful: " + output_file_path)
    return output_file_path
def convert_to_pipelineai(unified_model, output_path: str) -> str:
    """
    Convert the given unified model into a pipelineai model.

    # Arguments
        unified_model (UnifiedModel or str): Unified model instance or path to model file
        output_path (string): Path to save the model.

    # Returns
    Full path to the converted model
    """
    # https://github.com/PipelineAI/pipeline/tree/master/docs/quickstart/docker
    # https://github.com/PipelineAI/models/tree/master/scikit/mnist/model
    log.info("Start conversion to PipelineAI")

    if isinstance(unified_model, UnifiedModel):
        pass
    elif os.path.exists(str(unified_model)):
        # load model instance
        unified_model = UnifiedModel.load(str(unified_model))
    else:
        raise Exception("Could not find model for: " + str(unified_model))

    if os.path.isdir(output_path) and len(os.listdir(output_path)) > 0:
        log.warning("Aborting conversion. Output directory is not empty: " +
                    output_path)
        return output_path
    else:
        os.makedirs(output_path)

    # Save model into folder
    unified_model_filename = str(unified_model)
    unified_model.save(os.path.join(output_path, unified_model_filename))

    # Required, but Empty is OK.
    with open(os.path.join(output_path, "pipeline_condarc"), "w") as text_file:
        text_file.write("")

    # Required, but Empty is OK.
    with open(os.path.join(output_path, "pipeline_modelserver.properties"),
              "w") as text_file:
        text_file.write("")

    # Required, but Empty is OK.
    setup_script = ""
    if unified_model._setup_script:
        setup_script = unified_model._setup_script
    with open(os.path.join(output_path, "pipeline_setup.sh"),
              "w") as text_file:
        text_file.write(setup_script)

    requirement_string = ""
    for requirement in unified_model._requirements:
        if isinstance(requirement, six.string_types):
            requirement_string += "\n - " + str(requirement)

    with open(os.path.join(output_path, "pipeline_conda_environment.yaml"),
              "w") as text_file:
        text_file.write("name: " +
                        str(unified_model).lower().replace(" ", "_") +
                        "\nchannels:\n- conda-forge\n- defaults" +
                        "\ndependencies: " + "\n- python>=3.6 " +
                        "\n- pip:\n - git+" + UNIFIED_MODEL_REPO_URL +
                        requirement_string)

    with open(os.path.join(output_path, "pipeline_invoke_python.py"),
              "w") as text_file:
        text_file.write(
            "import os" + "\nfrom unified_model import UnifiedModel" +
            "\n\n_model = UnifiedModel.load(os.path.join(os.path.dirname(os.path.abspath(__file__)), '"
            + unified_model_filename + "'))" +
            "\n\ndef invoke(request):\n\treturn _model.predict(request)")

    log.info("Conversion to PipelineAI successful: " + output_path)
    return output_path
def convert_to_mlflow(unified_model, output_path: str) -> str:
    """
    Convert the given unified model into a mlflow model.

    # Arguments
        unified_model (UnifiedModel or str): Unified model instance or path to model file
        output_path (string): Path to save the model.

    # Returns
    Full path to the converted model
    """
    # https://mlflow.org/docs/latest/models.html
    log.info("Start conversion to MLFlow")

    if isinstance(unified_model, UnifiedModel):
        pass
    elif os.path.exists(str(unified_model)):
        # load model instance
        unified_model = UnifiedModel.load(str(unified_model))
    else:
        raise Exception("Could not find model for: " + str(unified_model))

    if os.path.isdir(output_path) and len(os.listdir(output_path)) > 0:
        log.warning("Aborting conversion. Output directory is not empty: " +
                    output_path)
        return output_path
    else:
        os.makedirs(output_path)

    model_temp_folder = tempfile.mkdtemp()
    # Save model to temp
    model_zip_path = unified_model.save(
        os.path.join(model_temp_folder, str(unified_model)))

    # extract to output path
    unified_model_zip = zipfile.ZipFile(model_zip_path, 'r')
    unified_model_zip.extractall(output_path)
    unified_model_zip.close()

    # unified_model_data:
    UNIFIED_MODEL_FOLDER = "unified_model"
    unified_model_folder_path = os.path.join(output_path, UNIFIED_MODEL_FOLDER)
    if not os.path.isdir(unified_model_folder_path):
        os.makedirs(unified_model_folder_path)

    for f in os.listdir(output_path):
        if f != UnifiedModel._CODE_BASE_DIR:
            shutil.move(os.path.join(output_path, f),
                        unified_model_folder_path)

    # create necessary files
    CONDA_ENV = "conda.yml"
    LOADER_MODULE = "unified_model_loader"

    with open(os.path.join(output_path, "MLmodel"), "w") as text_file:
        text_file.write("flavors:"
                        "\n  python_function:"
                        "\n    code: " + UnifiedModel._CODE_BASE_DIR +
                        "\n    data: " + UNIFIED_MODEL_FOLDER + "\n    env: " +
                        CONDA_ENV + "\n    loader_module: " + LOADER_MODULE)

    requirement_string = ""

    # req_file_path = os.path.join(output_path, UnifiedModel._REQUIREMENTS_FILENAME)
    # if os.path.isfile(req_file_path):
    #    with open(req_file_path, encoding='utf-8') as f:
    #        for line in f.readlines():
    #            requirement_string += "\n\t- " + str(line.rstrip())

    for requirement in unified_model._requirements:
        if isinstance(requirement, six.string_types):
            requirement_string += "\n - " + str(requirement)

    with open(os.path.join(output_path, CONDA_ENV), "w") as text_file:
        text_file.write("name: " +
                        str(unified_model).lower().replace(" ", "_") +
                        "\nchannels:\n- conda-forge\n- defaults" +
                        "\ndependencies: " + "\n- python>=3.6 " +
                        "\n- pip:\n - git+" + UNIFIED_MODEL_REPO_URL +
                        requirement_string)

        code_folder = os.path.join(output_path, UnifiedModel._CODE_BASE_DIR)

        if not os.path.isdir(code_folder):
            os.makedirs(code_folder)

        # TODO adapt script
        with open(os.path.join(code_folder, LOADER_MODULE + ".py"),
                  "w") as text_file:
            text_file.write(
                "from unified_model import UnifiedModel\n\n" +
                "def load_pyfunc(path):\n\treturn UnifiedModel.load(path, install_requirements=False)\n"
            )

        shutil.rmtree(model_temp_folder)
        log.info("Conversion to MLFlow successful: " + output_path)
        return output_path