def test_model_load_from_remote_uri_succeeds(sklearn_knn_model,
                                             main_scoped_model_class, tmpdir,
                                             mock_s3_bucket, iris_data):
    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_repo = S3ArtifactRepository(artifact_root)

    sklearn_model_path = os.path.join(str(tmpdir), "sklearn_model")
    mlflow.sklearn.save_model(sk_model=sklearn_knn_model,
                              path=sklearn_model_path)
    sklearn_artifact_path = "sk_model"
    artifact_repo.log_artifacts(sklearn_model_path,
                                artifact_path=sklearn_artifact_path)

    def test_predict(sk_model, model_input):
        return sk_model.predict(model_input) * 2

    pyfunc_model_path = os.path.join(str(tmpdir), "pyfunc_model")
    mlflow.pyfunc.save_model(
        path=pyfunc_model_path,
        artifacts={"sk_model": sklearn_model_path},
        python_model=main_scoped_model_class(test_predict))

    pyfunc_artifact_path = "pyfunc_model"
    artifact_repo.log_artifacts(pyfunc_model_path,
                                artifact_path=pyfunc_artifact_path)

    model_uri = artifact_root + "/" + pyfunc_artifact_path
    loaded_pyfunc_model = mlflow.pyfunc.load_pyfunc(model_uri=model_uri)
    np.testing.assert_array_equal(
        loaded_pyfunc_model.predict(model_input=iris_data[0]),
        test_predict(sk_model=sklearn_knn_model, model_input=iris_data[0]))
def test_load_model_from_remote_uri_succeeds(saved_tf_iris_model, model_path,
                                             mock_s3_bucket):
    mlflow.tensorflow.save_model(
        tf_saved_model_dir=saved_tf_iris_model.path,
        tf_meta_graph_tags=saved_tf_iris_model.meta_graph_tags,
        tf_signature_def_key=saved_tf_iris_model.signature_def_key,
        path=model_path)

    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_path = "model"
    artifact_repo = S3ArtifactRepository(artifact_root)
    artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)

    model_uri = artifact_root + "/" + artifact_path
    tf_graph = tf.Graph()
    tf_sess = tf.Session(graph=tf_graph)
    with tf_graph.as_default():
        signature_def = mlflow.tensorflow.load_model(model_uri=model_uri,
                                                     tf_sess=tf_sess)

        for _, input_signature in signature_def.inputs.items():
            t_input = tf_graph.get_tensor_by_name(input_signature.name)
            assert t_input is not None

        for _, output_signature in signature_def.outputs.items():
            t_output = tf_graph.get_tensor_by_name(output_signature.name)
            assert t_output is not None
Esempio n. 3
0
 def from_artifact_uri(artifact_uri, store):
     """
     Given an artifact URI for an Experiment Run (e.g., /local/file/path or s3://my/bucket),
     returns an ArtifactReposistory instance capable of logging and downloading artifacts
     on behalf of this URI.
     :param store: An instance of AbstractStore which the artifacts are registered in.
     """
     if artifact_uri.startswith("s3:/"):
         # Import these locally to avoid creating a circular import loop
         from mlflow.store.s3_artifact_repo import S3ArtifactRepository
         return S3ArtifactRepository(artifact_uri)
     elif artifact_uri.startswith("gs:/"):
         from mlflow.store.gcs_artifact_repo import GCSArtifactRepository
         return GCSArtifactRepository(artifact_uri)
     elif artifact_uri.startswith("wasbs:/"):
         from mlflow.store.azure_blob_artifact_repo import AzureBlobArtifactRepository
         return AzureBlobArtifactRepository(artifact_uri)
     elif artifact_uri.startswith("sftp:/"):
         from mlflow.store.sftp_artifact_repo import SFTPArtifactRepository
         return SFTPArtifactRepository(artifact_uri)
     elif artifact_uri.startswith("dbfs:/"):
         from mlflow.store.dbfs_artifact_repo import DbfsArtifactRepository
         if not isinstance(store, DatabricksStore):
             raise MlflowException(
                 '`store` must be an instance of DatabricksStore.')
         return DbfsArtifactRepository(artifact_uri,
                                       store.http_request_kwargs)
     else:
         from mlflow.store.local_artifact_repo import LocalArtifactRepository
         return LocalArtifactRepository(artifact_uri)
Esempio n. 4
0
def test_cli_build_image_with_remote_uri_calls_expected_azure_routines(
        sklearn_model, model_path):
    mlflow.sklearn.save_model(sk_model=sklearn_model, path=model_path)
    artifact_path = "model"
    s3_artifact_repo = S3ArtifactRepository("s3://test_bucket")
    s3_artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)
    model_uri = "s3://test_bucket/{artifact_path}".format(
        artifact_path=artifact_path)

    with AzureMLMocks() as aml_mocks:
        result = CliRunner(env={
            "LC_ALL": "en_US.UTF-8",
            "LANG": "en_US.UTF-8"
        }).invoke(mlflow.azureml.cli.commands, [
            'build-image',
            '-m',
            model_uri,
            '-w',
            'test_workspace',
            '-i',
            'image_name',
            '-n',
            'model_name',
        ])
        assert result.exit_code == 0

        assert aml_mocks["register_model"].call_count == 1
        assert aml_mocks["create_image"].call_count == 1
        assert aml_mocks["load_workspace"].call_count == 1
Esempio n. 5
0
def test_deploy_cli_creates_sagemaker_and_s3_resources_with_expected_names_from_s3(
        pretrained_model, sagemaker_client):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository('s3://{}'.format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path,
                                   artifact_path=artifact_path)
    model_s3_uri = 's3://{bucket_name}/{artifact_path}'.format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path)

    app_name = "test-app"
    result = CliRunner(env={
        "LC_ALL": "en_US.UTF-8",
        "LANG": "en_US.UTF-8"
    }).invoke(mfscli.commands, [
        'deploy',
        '-a',
        app_name,
        '-m',
        model_s3_uri,
        '--mode',
        mfs.DEPLOYMENT_MODE_CREATE,
    ])
    assert result.exit_code == 0

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(
        EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
Esempio n. 6
0
def test_model_load_from_remote_uri_succeeds(model, model_path, mock_s3_bucket,
                                             data, predicted):
    x, _ = data
    mlflow.keras.save_model(model, model_path)

    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_path = "model"
    artifact_repo = S3ArtifactRepository(artifact_root)
    artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)

    model_uri = artifact_root + "/" + artifact_path
    model_loaded = mlflow.keras.load_model(model_uri=model_uri)
    assert all(model_loaded.predict(x) == predicted)
Esempio n. 7
0
def test_sparkml_model_load_from_remote_uri_succeeds(spark_model_iris, model_path, mock_s3_bucket):
    sparkm.save_model(spark_model=spark_model_iris.model, path=model_path)

    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_path = "model"
    artifact_repo = S3ArtifactRepository(artifact_root)
    artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)

    model_uri = artifact_root + "/" + artifact_path
    reloaded_model = sparkm.load_model(model_uri=model_uri)
    preds_df = reloaded_model.transform(spark_model_iris.spark_df)
    preds = [x.prediction for x in preds_df.select("prediction").collect()]
    assert spark_model_iris.predictions == preds
Esempio n. 8
0
def test_load_model_from_remote_uri_succeeds(sequential_model, model_path,
                                             mock_s3_bucket, data,
                                             sequential_predicted):
    mlflow.pytorch.save_model(sequential_model, model_path)

    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_path = "model"
    artifact_repo = S3ArtifactRepository(artifact_root)
    artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)

    model_uri = artifact_root + "/" + artifact_path
    sequential_model_loaded = mlflow.pytorch.load_model(model_uri=model_uri)
    np.testing.assert_array_equal(_predict(sequential_model_loaded, data),
                                  sequential_predicted)
Esempio n. 9
0
def test_model_load_from_remote_uri_succeeds(sklearn_knn_model, model_path,
                                             mock_s3_bucket):
    mlflow.sklearn.save_model(sk_model=sklearn_knn_model.model,
                              path=model_path)

    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_path = "model"
    artifact_repo = S3ArtifactRepository(artifact_root)
    artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)

    model_uri = artifact_root + "/" + artifact_path
    reloaded_knn_model = mlflow.sklearn.load_model(model_uri=model_uri)
    np.testing.assert_array_equal(
        sklearn_knn_model.model.predict(sklearn_knn_model.inference_data),
        reloaded_knn_model.predict(sklearn_knn_model.inference_data))
Esempio n. 10
0
def test_build_image_with_remote_uri_calls_expected_azure_routines(
        sklearn_model, model_path):
    mlflow.sklearn.save_model(sk_model=sklearn_model, path=model_path)
    artifact_path = "model"
    s3_artifact_repo = S3ArtifactRepository("s3://test_bucket")
    s3_artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)
    model_uri = "s3://test_bucket/{artifact_path}".format(
        artifact_path=artifact_path)

    with AzureMLMocks() as aml_mocks:
        workspace = get_azure_workspace()
        mlflow.azureml.build_image(model_uri=model_uri, workspace=workspace)

        assert aml_mocks["register_model"].call_count == 1
        assert aml_mocks["create_image"].call_count == 1
Esempio n. 11
0
 def from_artifact_uri(artifact_uri):
     """
     Given an artifact URI for an Experiment Run (e.g., /local/file/path or s3://my/bucket),
     returns an ArtifactReposistory instance capable of logging and downloading artifacts
     on behalf of this URI.
     """
     if artifact_uri.startswith("s3:/"):
         # Import these locally to avoid creating a circular import loop
         from mlflow.store.s3_artifact_repo import S3ArtifactRepository
         return S3ArtifactRepository(artifact_uri)
     elif artifact_uri.startswith("gs:/"):
         from mlflow.store.gcs_artifact_repo import GCSArtifactRepository
         return GCSArtifactRepository(artifact_uri)
     elif artifact_uri.startswith("wasbs:/"):
         from mlflow.store.azure_blob_artifact_repo import AzureBlobArtifactRepository
         return AzureBlobArtifactRepository(artifact_uri)
     else:
         from mlflow.store.local_artifact_repo import LocalArtifactRepository
         return LocalArtifactRepository(artifact_uri)
Esempio n. 12
0
def test_load_model_from_remote_uri_succeeds(saved_tf_iris_model, model_path, mock_s3_bucket):
    mlflow.tensorflow.save_model(tf_saved_model_dir=saved_tf_iris_model.path,
                                 tf_meta_graph_tags=saved_tf_iris_model.meta_graph_tags,
                                 tf_signature_def_key=saved_tf_iris_model.signature_def_key,
                                 path=model_path)

    artifact_root = "s3://{bucket_name}".format(bucket_name=mock_s3_bucket)
    artifact_path = "model"
    artifact_repo = S3ArtifactRepository(artifact_root)
    artifact_repo.log_artifacts(model_path, artifact_path=artifact_path)

    model_uri = artifact_root + "/" + artifact_path
    infer = mlflow.tensorflow.load_model(model_uri=model_uri)
    feed_dict = {
        df_column_name: tf.constant(
            saved_tf_iris_model.inference_df[df_column_name])
        for df_column_name in list(saved_tf_iris_model.inference_df)
    }
    raw_preds = infer(**feed_dict)
    pred_dict = {column_name: raw_preds[column_name].numpy() for column_name in raw_preds.keys()}
    for col in pred_dict:
        assert(np.allclose(np.array(pred_dict[col], dtype=np.float),
                           np.array(saved_tf_iris_model.raw_results[col], dtype=np.float)))