Example #1
0
from databricks_cli.sdk import JobsService
from mlflow import ActiveRun
from mlflow.entities import Experiment
from mlflow.entities.run import Run, RunInfo, RunData
from requests import HTTPError

from dbx.commands.configure import configure
from dbx.commands.deploy import deploy, _update_job  # noqa
from dbx.utils.common import write_json, DEFAULT_DEPLOYMENT_FILE_PATH
from .utils import DbxTest, invoke_cli_runner, test_dbx_config

run_info = RunInfo(
    run_uuid="1",
    experiment_id="1",
    user_id="dbx",
    status="STATUS",
    start_time=dt.datetime.now(),
    end_time=dt.datetime.now(),
    lifecycle_stage="STAGE",
    artifact_uri="dbfs:/Shared/dbx-testing",
)
run_data = RunData()
run_mock = ActiveRun(Run(run_info, run_data))


class DeployTest(DbxTest):
    @patch("databricks_cli.sdk.service.DbfsService.get_status", return_value=None)
    @patch(
        "databricks_cli.configure.provider.ProfileConfigProvider.get_config",
        return_value=test_dbx_config,
    )
    @patch(
Example #2
0
import pandas as pd
from mlflow import ActiveRun
from mlflow.entities import Experiment
from mlflow.entities.run import Run, RunInfo, RunData

from dbx.commands.configure import configure
from dbx.commands.deploy import deploy
from dbx.commands.launch import launch
from dbx.utils.common import write_json, DEFAULT_DEPLOYMENT_FILE_PATH
from .utils import DbxTest, invoke_cli_runner, test_dbx_config

run_info = RunInfo(
    run_uuid="1",
    experiment_id="1",
    user_id="dbx",
    status="STATUS",
    start_time=dt.datetime.now(),
    end_time=dt.datetime.now(),
    lifecycle_stage="STAGE",
)
run_data = RunData()
run_mock = ActiveRun(Run(run_info, run_data))

DEFAULT_DATA_MOCK = {
    "data": base64.b64encode(json.dumps({
        "sample": "1"
    }).encode("utf-8"))
}
RUN_SUBMIT_DATA_MOCK = {
    "data":
    base64.b64encode(