def test_autologging_dedups_multiple_reads_of_same_datasource( spark_session, format_to_file_path): kiwi.spark.autolog() data_format = list(format_to_file_path.keys())[0] file_path = format_to_file_path[data_format] df = spark_session.read.format(data_format).option("header", "true"). \ option("inferSchema", "true").load(file_path) with kiwi.start_run(): run_id = kiwi.active_run().info.run_id df.collect() df.filter("number1 > 0").collect() df.limit(2).collect() df.collect() time.sleep(1) run = kiwi.get_run(run_id) _assert_spark_data_logged(run=run, path=file_path, data_format=data_format) # Test context provider flow df.filter("number1 > 0").collect() df.limit(2).collect() df.collect() with kiwi.start_run(): run_id2 = kiwi.active_run().info.run_id time.sleep(1) run2 = kiwi.get_run(run_id2) _assert_spark_data_logged(run=run2, path=file_path, data_format=data_format)
def test_pyfunc_model_log_load_no_active_run(sklearn_knn_model, main_scoped_model_class, iris_data): sklearn_artifact_path = "sk_model_no_run" with kiwi.start_run(): kiwi.sklearn.log_model(sk_model=sklearn_knn_model, artifact_path=sklearn_artifact_path) sklearn_model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=sklearn_artifact_path) def test_predict(sk_model, model_input): return sk_model.predict(model_input) * 2 pyfunc_artifact_path = "pyfunc_model" assert kiwi.active_run() is None kiwi.pyfunc.log_model(artifact_path=pyfunc_artifact_path, artifacts={"sk_model": sklearn_model_uri}, python_model=main_scoped_model_class(test_predict)) pyfunc_model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=pyfunc_artifact_path) loaded_pyfunc_model = kiwi.pyfunc.load_pyfunc(model_uri=pyfunc_model_uri) np.testing.assert_array_equal( loaded_pyfunc_model.predict(iris_data[0]), test_predict(sk_model=sklearn_knn_model, model_input=iris_data[0])) kiwi.end_run()
def test_log_model_calls_register_model(sklearn_knn_model, main_scoped_model_class): register_model_patch = mock.patch("mlflow.register_model") with register_model_patch: sklearn_artifact_path = "sk_model_no_run" with kiwi.start_run(): kiwi.sklearn.log_model(sk_model=sklearn_knn_model, artifact_path=sklearn_artifact_path) sklearn_model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=sklearn_artifact_path) def test_predict(sk_model, model_input): return sk_model.predict(model_input) * 2 pyfunc_artifact_path = "pyfunc_model" assert kiwi.active_run() is None kiwi.pyfunc.log_model( artifact_path=pyfunc_artifact_path, artifacts={"sk_model": sklearn_model_uri}, python_model=main_scoped_model_class(test_predict), registered_model_name="AdsModel1") model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=pyfunc_artifact_path) kiwi.register_model.assert_called_once_with(model_uri, "AdsModel1") kiwi.end_run()
def test_log_model_without_specified_conda_env_uses_default_env_with_expected_dependencies( sklearn_knn_model, main_scoped_model_class): sklearn_artifact_path = "sk_model" with kiwi.start_run(): kiwi.sklearn.log_model(sk_model=sklearn_knn_model, artifact_path=sklearn_artifact_path) sklearn_run_id = kiwi.active_run().info.run_id pyfunc_artifact_path = "pyfunc_model" with kiwi.start_run(): kiwi.pyfunc.log_model( artifact_path=pyfunc_artifact_path, artifacts={ "sk_model": utils_get_artifact_uri(artifact_path=sklearn_artifact_path, run_id=sklearn_run_id) }, python_model=main_scoped_model_class(predict_fn=None)) pyfunc_model_path = _download_artifact_from_uri( "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=pyfunc_artifact_path)) pyfunc_conf = _get_flavor_configuration( model_path=pyfunc_model_path, flavor_name=kiwi.pyfunc.FLAVOR_NAME) conda_env_path = os.path.join(pyfunc_model_path, pyfunc_conf[kiwi.pyfunc.ENV]) with open(conda_env_path, "r") as f: conda_env = yaml.safe_load(f) assert conda_env == kiwi.pyfunc.model.get_default_conda_env()
def test_model_log_load(sklearn_knn_model, main_scoped_model_class, iris_data): sklearn_artifact_path = "sk_model" with kiwi.start_run(): kiwi.sklearn.log_model(sk_model=sklearn_knn_model, artifact_path=sklearn_artifact_path) sklearn_model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=sklearn_artifact_path) def test_predict(sk_model, model_input): return sk_model.predict(model_input) * 2 pyfunc_artifact_path = "pyfunc_model" with kiwi.start_run(): kiwi.pyfunc.log_model( artifact_path=pyfunc_artifact_path, artifacts={ "sk_model": sklearn_model_uri, }, python_model=main_scoped_model_class(test_predict)) pyfunc_model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=pyfunc_artifact_path) pyfunc_model_path = _download_artifact_from_uri( "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=pyfunc_artifact_path)) model_config = Model.load(os.path.join(pyfunc_model_path, "MLmodel")) loaded_pyfunc_model = kiwi.pyfunc.load_pyfunc(model_uri=pyfunc_model_uri) assert model_config.to_yaml() == loaded_pyfunc_model.metadata.to_yaml() np.testing.assert_array_equal( loaded_pyfunc_model.predict(iris_data[0]), test_predict(sk_model=sklearn_knn_model, model_input=iris_data[0]))
def test_with_startrun(): run_id = None t0 = int(time.time() * 1000) with kiwi.start_run() as active_run: assert kiwi.active_run() == active_run run_id = active_run.info.run_id t1 = int(time.time() * 1000) run_info = kiwi.tracking._get_store().get_run(run_id).info assert run_info.status == "FINISHED" assert t0 <= run_info.end_time and run_info.end_time <= t1 assert kiwi.active_run() is None
def test_spark_autologging_with_keras_autologging(spark_session, data_format, file_path): assert kiwi.active_run() is None kiwi.spark.autolog() kiwi.keras.autolog() df = spark_session.read.format(data_format).option("header", "true"). \ option("inferSchema", "true").load(file_path).select("number1", "number2") pandas_df = df.toPandas() run = _fit_keras_model(pandas_df, epochs=1) _assert_spark_data_logged(run, file_path, data_format) assert kiwi.active_run() is None
def test_fastai_autolog_persists_manually_created_run(iris_data, fit_variant): kiwi.fastai.autolog() with kiwi.start_run() as run: model = fastai_model(iris_data) if fit_variant == 'fit_one_cycle': model.fit_one_cycle(NUM_EPOCHS) else: model.fit(NUM_EPOCHS) assert kiwi.active_run() assert kiwi.active_run().info.run_id == run.info.run_id
def test_tf_keras_autolog_persists_manually_created_run( random_train_data, random_one_hot_labels, fit_variant): kiwi.tensorflow.autolog() with kiwi.start_run() as run: data = random_train_data labels = random_one_hot_labels model = create_tf_keras_model() model.fit(data, labels, epochs=10) assert kiwi.active_run() assert kiwi.active_run().info.run_id == run.info.run_id
def test_log_and_load_model_persists_and_restores_model_successfully( saved_tf_iris_model): artifact_path = "model" with kiwi.start_run(): kiwi.tensorflow.log_model( tf_saved_model_dir=saved_tf_iris_model.path, tf_meta_graph_tags=saved_tf_iris_model.meta_graph_tags, tf_signature_def_key=saved_tf_iris_model.signature_def_key, artifact_path=artifact_path) model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=artifact_path) tf_graph = tf.Graph() tf_sess = tf.Session(graph=tf_graph) with tf_graph.as_default(): signature_def = kiwi.tensorflow.load_model(model_uri=model_uri, tf_sess=tf_sess) for _, input_signature in signature_def.inputs.items(): t_input = tf_graph.get_tensor_by_name(input_signature.name) assert t_input is not None for _, output_signature in signature_def.outputs.items(): t_output = tf_graph.get_tensor_by_name(output_signature.name) assert t_output is not None
def test_cli_build_image_with_runs_uri_calls_expected_azure_routines( sklearn_model): artifact_path = "model" with kiwi.start_run(): kiwi.sklearn.log_model(sk_model=sklearn_model, artifact_path=artifact_path) run_id = kiwi.active_run().info.run_id model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=run_id, artifact_path=artifact_path) with AzureMLMocks() as aml_mocks: result = CliRunner(env={ "LC_ALL": "en_US.UTF-8", "LANG": "en_US.UTF-8" }).invoke(kiwi.azureml.cli.commands, [ 'build-image', '-m', model_uri, '-w', 'test_workspace', '-i', 'image_name', '-n', 'model_name', ]) assert result.exit_code == 0 assert aml_mocks["register_model"].call_count == 1 assert aml_mocks["create_image"].call_count == 1 assert aml_mocks["load_workspace"].call_count == 1
def test_build_image_includes_default_metadata_in_azure_image_and_model_tags( sklearn_model): artifact_path = "model" with kiwi.start_run(): kiwi.sklearn.log_model(sk_model=sklearn_model, artifact_path=artifact_path) run_id = kiwi.active_run().info.run_id model_uri = "runs:///{run_id}/{artifact_path}".format( run_id=run_id, artifact_path=artifact_path) model_config = Model.load( os.path.join(_download_artifact_from_uri(artifact_uri=model_uri), "MLmodel")) with AzureMLMocks() as aml_mocks: workspace = get_azure_workspace() kiwi.azureml.build_image(model_uri=model_uri, workspace=workspace) register_model_call_args = aml_mocks["register_model"].call_args_list assert len(register_model_call_args) == 1 _, register_model_call_kwargs = register_model_call_args[0] called_tags = register_model_call_kwargs["tags"] assert called_tags["model_uri"] == model_uri assert called_tags["python_version"] ==\ model_config.flavors[pyfunc.FLAVOR_NAME][pyfunc.PY_VERSION] create_image_call_args = aml_mocks["create_image"].call_args_list assert len(create_image_call_args) == 1 _, create_image_call_kwargs = create_image_call_args[0] image_config = create_image_call_kwargs["image_config"] assert image_config.tags["model_uri"] == model_uri assert image_config.tags["python_version"] ==\ model_config.flavors[pyfunc.FLAVOR_NAME][pyfunc.PY_VERSION]
def test_model_log(h2o_iris_model): h2o_model = h2o_iris_model.model old_uri = kiwi.get_tracking_uri() # should_start_run tests whether or not calling log_model() automatically starts a run. for should_start_run in [False, True]: with TempDir(chdr=True, remove_on_exit=True): try: artifact_path = "gbm_model" kiwi.set_tracking_uri("test") if should_start_run: kiwi.start_run() kiwi.h2o.log_model(h2o_model=h2o_model, artifact_path=artifact_path) model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=artifact_path) # Load model h2o_model_loaded = kiwi.h2o.load_model(model_uri=model_uri) assert all( h2o_model_loaded.predict(h2o_iris_model.inference_data). as_data_frame() == h2o_model.predict( h2o_iris_model.inference_data).as_data_frame()) finally: kiwi.end_run() kiwi.set_tracking_uri(old_uri)
def test_default_conda_env_strips_dev_suffix_from_pyspark_version(spark_model_iris, model_path): mock_version_standard = mock.PropertyMock(return_value="2.4.0") with mock.patch("pyspark.__version__", new_callable=mock_version_standard): default_conda_env_standard = sparkm.get_default_conda_env() for dev_version in ["2.4.0.dev0", "2.4.0.dev", "2.4.0.dev1", "2.4.0dev.a", "2.4.0.devb"]: mock_version_dev = mock.PropertyMock(return_value=dev_version) with mock.patch("pyspark.__version__", new_callable=mock_version_dev): default_conda_env_dev = sparkm.get_default_conda_env() assert (default_conda_env_dev == default_conda_env_standard) with kiwi.start_run(): sparkm.log_model( spark_model=spark_model_iris.model, artifact_path="model", conda_env=None) model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path="model") model_path = _download_artifact_from_uri(artifact_uri=model_uri) pyfunc_conf = _get_flavor_configuration( model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME) conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV]) with open(conda_env_path, "r") as f: persisted_conda_env_dev = yaml.safe_load(f) assert (persisted_conda_env_dev == default_conda_env_standard) for unaffected_version in ["2.0", "2.3.4", "2"]: mock_version = mock.PropertyMock(return_value=unaffected_version) with mock.patch("pyspark.__version__", new_callable=mock_version): assert unaffected_version in yaml.safe_dump(sparkm.get_default_conda_env())
def test_sparkml_estimator_model_log(tmpdir, spark_model_estimator): # Print the coefficients and intercept for multinomial logistic regression old_tracking_uri = kiwi.get_tracking_uri() cnt = 0 # should_start_run tests whether or not calling log_model() automatically starts a run. for should_start_run in [False, True]: for dfs_tmp_dir in [None, os.path.join(str(tmpdir), "test")]: print("should_start_run =", should_start_run, "dfs_tmp_dir =", dfs_tmp_dir) try: tracking_dir = os.path.abspath(str(tmpdir.join("mlruns"))) kiwi.set_tracking_uri("file://%s" % tracking_dir) if should_start_run: kiwi.start_run() artifact_path = "model%d" % cnt cnt += 1 sparkm.log_model( artifact_path=artifact_path, spark_model=spark_model_estimator.model, dfs_tmpdir=dfs_tmp_dir) model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=artifact_path) # test reloaded model reloaded_model = sparkm.load_model(model_uri=model_uri, dfs_tmpdir=dfs_tmp_dir) preds_df = reloaded_model.transform(spark_model_estimator.spark_df) preds = [x.prediction for x in preds_df.select("prediction").collect()] assert spark_model_estimator.predictions == preds finally: kiwi.end_run() kiwi.set_tracking_uri(old_tracking_uri) x = dfs_tmp_dir or sparkm.DFS_TMP shutil.rmtree(x) shutil.rmtree(tracking_dir)
def test_autolog_ends_auto_created_run(): kiwi.gluon.autolog() data = DataLoader(LogsDataset(), batch_size=128, last_batch="discard") model = HybridSequential() model.add(Dense(64, activation="relu")) model.add(Dense(64, activation="relu")) model.add(Dense(10)) model.initialize() model.hybridize() trainer = Trainer(model.collect_params(), "adam", optimizer_params={ "learning_rate": .001, "epsilon": 1e-07 }) est = estimator.Estimator(net=model, loss=SoftmaxCrossEntropyLoss(), metrics=Accuracy(), trainer=trainer) with warnings.catch_warnings(): warnings.simplefilter("ignore") est.fit(data, epochs=3) assert kiwi.active_run() is None
def test_log_fn_args_as_params(args, kwargs, expected, start_run): # pylint: disable=W0613 log_fn_args_as_params(dummy_fn, args, kwargs) client = kiwi.tracking.MlflowClient() params = client.get_run(kiwi.active_run().info.run_id).data.params for arg, value in zip(['arg1', 'arg2', 'arg3'], expected): assert arg in params assert params[arg] == value
def test_log_model_without_specified_conda_env_uses_default_env_with_expected_dependencies( sklearn_knn_model, tmpdir): sk_model_path = os.path.join(str(tmpdir), "knn.pkl") with open(sk_model_path, "wb") as f: pickle.dump(sklearn_knn_model, f) pyfunc_artifact_path = "pyfunc_model" with kiwi.start_run(): kiwi.pyfunc.log_model(artifact_path=pyfunc_artifact_path, data_path=sk_model_path, loader_module=os.path.basename(__file__)[:-3], code_path=[__file__]) run_id = kiwi.active_run().info.run_id pyfunc_model_path = _download_artifact_from_uri( "runs:/{run_id}/{artifact_path}".format( run_id=run_id, artifact_path=pyfunc_artifact_path)) pyfunc_conf = _get_flavor_configuration( model_path=pyfunc_model_path, flavor_name=kiwi.pyfunc.FLAVOR_NAME) conda_env_path = os.path.join(pyfunc_model_path, pyfunc_conf[kiwi.pyfunc.ENV]) with open(conda_env_path, "r") as f: conda_env = yaml.safe_load(f) assert conda_env == kiwi.pyfunc.model.get_default_conda_env()
def test_log_model_persists_specified_conda_env_in_mlflow_model_directory( saved_tf_iris_model, tf_custom_env): artifact_path = "model" with kiwi.start_run(): kiwi.tensorflow.log_model( tf_saved_model_dir=saved_tf_iris_model.path, tf_meta_graph_tags=saved_tf_iris_model.meta_graph_tags, tf_signature_def_key=saved_tf_iris_model.signature_def_key, artifact_path=artifact_path, conda_env=tf_custom_env) model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=artifact_path) model_path = _download_artifact_from_uri(artifact_uri=model_uri) pyfunc_conf = _get_flavor_configuration(model_path=model_path, flavor_name=pyfunc.FLAVOR_NAME) saved_conda_env_path = os.path.join(model_path, pyfunc_conf[pyfunc.ENV]) assert os.path.exists(saved_conda_env_path) assert saved_conda_env_path != tf_custom_env with open(tf_custom_env, "r") as f: tf_custom_env_text = f.read() with open(saved_conda_env_path, "r") as f: saved_conda_env_text = f.read() assert saved_conda_env_text == tf_custom_env_text
def test_autologging_of_datasources_with_different_formats( spark_session, format_to_file_path): kiwi.spark.autolog() for data_format, file_path in format_to_file_path.items(): base_df = spark_session.read.format(data_format).option("header", "true").\ option("inferSchema", "true").load(file_path) base_df.createOrReplaceTempView("temptable") table_df0 = spark_session.table("temptable") table_df1 = spark_session.sql( "SELECT number1, number2 from temptable LIMIT 5") dfs = [ base_df, table_df0, table_df1, base_df.filter("number1 > 0"), base_df.select("number1"), base_df.limit(2), base_df.filter("number1 > 0").select("number1").limit(2) ] for df in dfs: with kiwi.start_run(): run_id = kiwi.active_run().info.run_id df.collect() time.sleep(1) run = kiwi.get_run(run_id) _assert_spark_data_logged(run=run, path=file_path, data_format=data_format)
def test_log_model_persists_specified_conda_env_dict_in_mlflow_model_directory( sklearn_knn_model, tmpdir, pyfunc_custom_env_dict): sk_model_path = os.path.join(str(tmpdir), "knn.pkl") with open(sk_model_path, "wb") as f: pickle.dump(sklearn_knn_model, f) pyfunc_artifact_path = "pyfunc_model" with kiwi.start_run(): kiwi.pyfunc.log_model(artifact_path=pyfunc_artifact_path, data_path=sk_model_path, loader_module=os.path.basename(__file__)[:-3], code_path=[__file__], conda_env=pyfunc_custom_env_dict) run_id = kiwi.active_run().info.run_id pyfunc_model_path = _download_artifact_from_uri( "runs:/{run_id}/{artifact_path}".format( run_id=run_id, artifact_path=pyfunc_artifact_path)) pyfunc_conf = _get_flavor_configuration( model_path=pyfunc_model_path, flavor_name=kiwi.pyfunc.FLAVOR_NAME) saved_conda_env_path = os.path.join(pyfunc_model_path, pyfunc_conf[kiwi.pyfunc.ENV]) assert os.path.exists(saved_conda_env_path) with open(saved_conda_env_path, "r") as f: saved_conda_env_parsed = yaml.safe_load(f) assert saved_conda_env_parsed == pyfunc_custom_env_dict
def test_log_fn_args_as_params_ignores_unwanted_parameters(start_run): # pylint: disable=W0613 args, kwargs, unlogged = ('arg1', { 'arg2': 'value' }, ['arg1', 'arg2', 'arg3']) log_fn_args_as_params(dummy_fn, args, kwargs, unlogged) client = kiwi.tracking.MlflowClient() params = client.get_run(kiwi.active_run().info.run_id).data.params assert len(params.keys()) == 0
def test_delete_tag(): """ Confirm that fluent API delete tags actually works :return: """ kiwi.set_tag('a', 'b') run = MlflowClient().get_run(kiwi.active_run().info.run_id) print(run.info.run_id) assert 'a' in run.data.tags kiwi.delete_tag('a') run = MlflowClient().get_run(kiwi.active_run().info.run_id) assert 'a' not in run.data.tags with pytest.raises(MlflowException): kiwi.delete_tag('a') with pytest.raises(MlflowException): kiwi.delete_tag('b') kiwi.end_run()
def test_fastai_autolog_ends_auto_created_run(iris_data, fit_variant): kiwi.fastai.autolog() model = fastai_model(iris_data) if fit_variant == 'fit_one_cycle': model.fit_one_cycle(1) else: model.fit(1) assert kiwi.active_run() is None
def test_tf_core_autolog_logs_scalars(tf_core_random_tensors): assert 'a' in tf_core_random_tensors.data.metrics assert tf_core_random_tensors.data.metrics['a'] == 3.0 assert 'b' in tf_core_random_tensors.data.metrics assert tf_core_random_tensors.data.metrics['b'] == 4.0 client = kiwi.tracking.MlflowClient() all_a = client.get_metric_history(tf_core_random_tensors.info.run_id, 'a') assert all((x.step - 1) % 4 == 0 for x in all_a) assert kiwi.active_run() is None
def test_log_model_calls_register_model(onnx_model, onnx_custom_env): import kiwi.onnx artifact_path = "model" register_model_patch = mock.patch("mlflow.register_model") with kiwi.start_run(), register_model_patch: kiwi.onnx.log_model(onnx_model=onnx_model, artifact_path=artifact_path, conda_env=onnx_custom_env, registered_model_name="AdsModel1") model_uri = "runs:/{run_id}/{artifact_path}".format(run_id=kiwi.active_run().info.run_id, artifact_path=artifact_path) kiwi.register_model.assert_called_once_with(model_uri, "AdsModel1")
def test_log_model_calls_register_model(model): artifact_path = "model" register_model_patch = mock.patch("mlflow.register_model") with kiwi.start_run(), register_model_patch: kiwi.keras.log_model(model, artifact_path=artifact_path, registered_model_name="AdsModel1") model_uri = "runs:/{run_id}/{artifact_path}".format( run_id=kiwi.active_run().info.run_id, artifact_path=artifact_path) kiwi.register_model.assert_called_once_with(model_uri, "AdsModel1")
def test_start_deleted_run(): run_id = None with kiwi.start_run() as active_run: run_id = active_run.info.run_id tracking.MlflowClient().delete_run(run_id) with pytest.raises(MlflowException, matches='because it is in the deleted state.'): with kiwi.start_run(run_id=run_id): pass assert kiwi.active_run() is None
def _log_event(event): """ Extracts metric information from the event protobuf """ if not kiwi.active_run(): try_mlflow_log(kiwi.start_run) global _AUTOLOG_RUN_ID _AUTOLOG_RUN_ID = kiwi.active_run().info.run_id if event.WhichOneof('what') == 'summary': summary = event.summary for v in summary.value: if v.HasField('simple_value'): if (event.step - 1) % _LOG_EVERY_N_STEPS == 0: _thread_pool.submit(_add_to_queue, key=v.tag, value=v.simple_value, step=event.step, time=int(time.time() * 1000), run_id=kiwi.active_run().info.run_id)
def pretrained_model(): model_path = "model" with kiwi.start_run(): X = np.array([-2, -1, 0, 1, 2, 1]).reshape(-1, 1) y = np.array([0, 0, 1, 1, 1, 0]) lr = LogisticRegression(solver='lbfgs') lr.fit(X, y) kiwi.sklearn.log_model(lr, model_path) run_id = kiwi.active_run().info.run_id model_uri = "runs:/" + run_id + "/" + model_path return TrainedModel(model_path, run_id, model_uri)