Exemple #1
0
    def test_list_model(self, created_entities):
        client = Client()
        model1 = client.get_or_create_registered_model()
        created_entities.append(model1)
        label = model1._msg.name + "label1"
        model1.add_label(label)
        model1.add_label("label2")
        model2 = client.get_or_create_registered_model()
        created_entities.append(model2)
        model = client.get_or_create_registered_model()
        created_entities.append(model)
        model.add_label(label)
        runner = CliRunner()
        result = runner.invoke(
            cli,
            [
                'registry', 'list', 'registeredmodel', '--filter',
                "labels == \"{}\"".format(label)
            ],
        )

        assert not result.exception
        assert model1.name in result.output
        assert model.name in result.output

        result = runner.invoke(
            cli,
            ['registry', 'list', 'registeredmodel', "--output=json"],
        )

        assert not result.exception
        assert model1.name in result.output
        assert model.name in result.output
        assert model2.name in result.output
Exemple #2
0
    def test_list_version(self, created_entities):
        client = Client()
        runner = CliRunner()

        model1 = client.get_or_create_registered_model()
        created_entities.append(model1)
        version1_name = "version1"
        version2_name = "version2"
        model1.get_or_create_version(version1_name)
        version2 = model1.get_or_create_version(version2_name)
        label = model1._msg.name + "label1"
        result = runner.invoke(
            cli,
            ['registry', 'list', 'registeredmodelversion', model1._msg.name])

        assert not result.exception
        assert version1_name in result.output
        assert version2_name in result.output

        version2.add_label(label)
        model2 = client.get_or_create_registered_model()
        created_entities.append(model2)
        version2_1_name = "version2_1"
        version2_2_name = "version2_2"
        version21 = model2.get_or_create_version(version2_1_name)
        version21.add_label(label)
        model2.get_or_create_version(version2_2_name)
        result = runner.invoke(cli, [
            'registry', 'list', 'registeredmodelversion', '--filter',
            "labels == \"{}\"".format(label), "--output=json"
        ])

        assert not result.exception
        assert version2_1_name in result.output
        assert version2_name in result.output
Exemple #3
0
def downloadArtifact(proj, exp_name, exp_run, serialization):
    client = Client("http://localhost:3000")
    proj = client.set_project(proj)
    expt = client.set_experiment(exp_name)
    run = client.set_experiment_run(exp_run)
    if serialization.lower() == 'pickle':
        run.download_model('model.pkl')
def get_modeldb_client(
        experiment_name: str
) -> "verta._tracking.experimentrun.ModelDBExperiment":
    modeldb_job = f"{get_project_name()}-modeldb-frontend"
    my_user = get_neuro_user()
    cluster = get_neuro_cluster()
    uri = f"http://{modeldb_job}--{my_user}.platform-jobs:3000"
    print(f"Connecting to ModelDB client {uri}")
    client = ModelDBlient(uri)
    exp = client.set_experiment(experiment_name)

    return client
Exemple #5
0
def client(host, port, email, dev_key):
    client = Client(host, port, email, dev_key, debug=True)

    yield client

    if client.proj is not None:
        utils.delete_project(client.proj.id, client._conn)
Exemple #6
0
def client(host, port, email, dev_key, created_entities):
    client = Client(host, port, email, dev_key, debug=True)

    yield client

    proj = client._ctx.proj
    if proj is not None and proj.id not in {entity.id for entity in created_entities}:
        proj.delete()
Exemple #7
0
def client_3(host, port, email_3, dev_key_3, created_entities):
    """For collaboration tests."""
    if not (email_3 and dev_key_3):
        pytest.skip("second account credentials not present")

    client = Client(host, port, email_3, dev_key_3, debug=True)

    return client
Exemple #8
0
    def test_list_endpoint(self, created_endpoints):
        client = Client()
        path = _utils.generate_default_name()
        path2 = _utils.generate_default_name()
        endpoint1 = client.get_or_create_endpoint(path)
        endpoint2 = client.get_or_create_endpoint(path2)
        created_endpoints.append(endpoint1)
        created_endpoints.append(endpoint2)
        runner = CliRunner()
        result = runner.invoke(
            cli,
            ['deployment', 'list', 'endpoint'],
        )

        assert not result.exception
        assert path in result.output
        assert path2 in result.output
Exemple #9
0
def client(host, port, email, dev_key):
    print("[TEST LOG] test setup begun {} UTC".format(
        datetime.datetime.utcnow()))
    client = Client(host, port, email, dev_key, debug=True)

    yield client

    if client.proj is not None:
        utils.delete_project(client.proj.id, client._conn)
    print("[TEST LOG] test teardown completed {} UTC".format(
        datetime.datetime.utcnow()))
Exemple #10
0
def download_docker_context(run_id, model_version_id, output):
    """Download a registered model version's or experiment run's context entry.
    """
    client = Client()

    if run_id:
        try:
            entity = client.get_experiment_run(id=run_id)
        except ValueError:
            raise click.BadParameter(
                "experiment run {} not found".format(run_id))
    elif model_version_id:
        try:
            entity = client.get_registered_model_version(id=model_version_id)
        except ValueError:
            raise click.BadParameter(
                "version {} not found".format(model_version_id))
    else:
        raise click.BadParameter(
            "--run-id or --model-version-id should be specified")
    entity.download_docker_context(output)
Exemple #11
0
    def test_create_workspace_config(self, client, organization, in_tempdir,
                                     created_entities):
        model_name = _utils.generate_default_name()
        version_name = _utils.generate_default_name()

        client_config = {"workspace": organization.name}

        filepath = "verta_config.json"
        with open(filepath, "w") as f:
            json.dump(client_config, f)

        runner = CliRunner()
        runner.invoke(
            cli,
            ['registry', 'create', 'registeredmodel', model_name],
        )

        client = Client()
        model = client.get_registered_model(model_name)
        created_entities.append(model)
        assert model.workspace == organization.name
Exemple #12
0
    def test_list_endpoint(self, organization, created_entities):
        client = Client()
        path = _utils.generate_default_name()
        path2 = _utils.generate_default_name()
        endpoint1 = client.get_or_create_endpoint(path,
                                                  workspace=organization.name)
        endpoint2 = client.get_or_create_endpoint(path2,
                                                  workspace=organization.name)
        created_entities.append(endpoint1)
        created_entities.append(endpoint2)
        runner = CliRunner()
        result = runner.invoke(
            cli,
            [
                'deployment', 'list', 'endpoint', '--workspace',
                organization.name
            ],
        )

        assert not result.exception
        assert path in result.output
        assert path2 in result.output
Exemple #13
0
def client(host, port, email, dev_key):
    print("[TEST LOG] test setup begun {} UTC".format(
        datetime.datetime.utcnow()))
    client = Client(host, port, email, dev_key, debug=True)

    yield client

    proj = client._ctx.proj
    if proj is not None:
        proj.delete()

    print("[TEST LOG] test teardown completed {} UTC".format(
        datetime.datetime.utcnow()))
Exemple #14
0
def client_2(host, port, email_2, dev_key_2):
    """For collaboration tests."""
    if not (email_2 and dev_key_2):
        pytest.skip("second account credentials not present")
    print("[TEST LOG] test setup begun {} UTC".format(
        datetime.datetime.utcnow()))

    client = Client(host, port, email_2, dev_key_2, debug=True)

    yield client

    print("[TEST LOG] test teardown completed {} UTC".format(
        datetime.datetime.utcnow()))
Exemple #15
0
    def test_create_workspace_config(self, client, organization, in_tempdir,
                                     created_endpoints):
        client_config = {"workspace": organization.name}

        filepath = "verta_config.json"
        with open(filepath, "w") as f:
            json.dump(client_config, f)

        endpoint_name = _utils.generate_default_name()

        runner = CliRunner()
        result = runner.invoke(
            cli,
            ['deployment', 'create', 'endpoint', endpoint_name],
        )

        assert not result.exception

        client = Client()
        endpoint = client.get_endpoint(endpoint_name)
        assert endpoint.workspace == organization.name

        created_endpoints.append(endpoint)
Exemple #16
0
def client(host, port, email, dev_key):
    print("[TEST LOG] test setup begun {} UTC".format(
        datetime.datetime.utcnow()))
    client = Client(host, port, email, dev_key, debug=True)

    yield client

    proj = client._ctx.proj
    if proj is not None:
        utils.delete_project(proj.id, client._conn)

    model = client._ctx.registered_model
    if model is not None:
        utils.delete_registered_model(model.id, client._conn)

    print("[TEST LOG] test teardown completed {} UTC".format(
        datetime.datetime.utcnow()))
Exemple #17
0
def https_client(host, email, dev_key, created_entities):
    """A Client that is guaranteed to be using HTTPS for its connection.

    Our test suite uses HTTP by default to make faster intra-cluster requests.

    """
    https_verta_url = os.environ.get(constants.HTTPS_VERTA_URL_ENV_VAR)
    if not https_verta_url and ".verta.ai" in host and not host.startswith("http://"):
        https_verta_url = host
    if not https_verta_url:
        pytest.skip("no HTTPS Verta URL available")

    client = Client(https_verta_url, email=email, dev_key=dev_key, debug=True)

    yield client

    proj = client._ctx.proj
    if proj is not None and proj.id not in {entity.id for entity in created_entities}:
        proj.delete()
def get_clients():
    """Return test account clients using available env var credentials.

    Returns
    -------
    list of :class:`~verta.Client`

    """
    credentials = [
        (constants.EMAIL, constants.DEV_KEY),
        (constants.EMAIL_2, constants.DEV_KEY_2),
        (constants.EMAIL_3, constants.DEV_KEY_3),
    ]

    clients = []
    for email, dev_key in credentials:
        if email and dev_key:
            clients.append(
                Client(constants.HOST, email=email, dev_key=dev_key)
            )

    return clients
Exemple #19
0
def get_client(url):
    return Client(url)
Exemple #20
0
from verta import Client

client = Client('https://dev.verta.ai')
client.set_project('Demo - Jenkins+Prometheus')
client.set_experiment('Demo')
run = client.set_experiment_run()


class Predictor(object):
    def __init__(self):
        pass

    def predict(self, X):
        return X


run.log_model(Predictor())
Exemple #21
0
from nntoolbox.utils import get_device
from nntoolbox.callbacks import *
from nntoolbox.metrics import Accuracy, Loss
from nntoolbox.losses import SmoothedCrossEntropy

from verta import Client
from verta.client import ExperimentRun
from experii.verta import ModelDBCB
from experii.ax import AxTuner

torch.backends.cudnn.benchmark = True
EXPERIMENT_NAME = "Hyperparameter Tuning"

# Set up ModelDB:
client = Client(
    CLIENT_PARA
)  # supply your own ModelDB'S client parameters here (see VertaAI's notebooks)
proj = client.set_project("My second ModelDB project")
exp = client.set_experiment(EXPERIMENT_NAME)


# Define model generating function:
def model_fn(parameterization: Dict[str, Any]) -> nn.Module:
    model = Sequential(
        ConvolutionalLayer(in_channels=3,
                           out_channels=16,
                           kernel_size=3,
                           activation=nn.ReLU),
        ResidualBlockPreActivation(in_channels=16, activation=nn.ReLU),
        ConvolutionalLayer(in_channels=16,
                           out_channels=32,
Exemple #22
0
from verta import Client
import cloudpickle
import os

verta_host = os.environ['VERTA_HOST']
run_id = os.environ['RUN_ID']

client = Client(verta_host)
run = client.set_experiment_run(id=run_id)
commit, _ = run.get_commit()

# retrieve model
model = run.get_model()
with open('model.pkl', 'wb') as f:
    cloudpickle.dump(model, f)

# retrieve Python version
env_ver = commit.get("env/python")
python_ver = '.'.join(
    map(str, [
        env_ver._msg.python.version.major,
        env_ver._msg.python.version.minor,
    ]))
with open("Dockerfile", 'r+') as f:
    contents = [line.strip() for line in f.readlines()]
    contents[0] == "FROM python:{}".format(python_ver)
    f.seek(0)
    f.write('\n'.join(contents))

# retrieve Python package version pins
requirements = '\n'.join([
Exemple #23
0
import verta
from verta import Client
from verta.utils import ModelAPI

HOST = "http://localhost:3009"
PROJECT_NAME = "readmission_shared_data_preprocess_v0"
EXPERIMENT_NAME = "readmission_shared_data_preprocess_v0_first_run"

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Template Main Function')
    parser.add_argument('--input', type=str, help='input folder')
    parser.add_argument('--output', type=str, help='output folder')
    parser.add_argument('--vis', type=str, help='vis folder')
    args = parser.parse_args()

    client = Client(HOST)
    proj = client.set_project(PROJECT_NAME)
    expt = client.set_experiment(EXPERIMENT_NAME)
    run = client.set_experiment_run()

    cmd = "readmission_shared_data_preprocess_trainvalidationtest.sh"  # modify
    lib_param = {}
    lib_param["--input"] = args.input
    lib_param["--output"] = args.output
    lib_param["--vis"] = args.vis

    for k, v in lib_param.items():
        cmd = cmd + " " + str(k) + " " + str(v)

    cmd = "bash " + cmd
    print("executing cmd: \n", cmd)
import os
import random
import multiprocessing

from verta import Client
from verta.utils import ModelAPI

os.environ['VERTA_EMAIL'] = '*****@*****.**'
os.environ['VERTA_DEV_KEY'] = '3e078522-e479-4cd2-b78c-04ffcacae3f4'

HOST = "dev.verta.ai"
EXPERIMENT_NAME = "Scaling"

client = Client(HOST)
proj = client.set_project('Scaling Test 100 jobs of 500k models')
expt = client.set_experiment(EXPERIMENT_NAME)

# Hyperparam random choice of values
c_list = [0.0001, 0.0002, 0.0004]
solver_list = ['lgfgs', 'grad']
max_iter_list = [7, 15, 28]

# results into 30 metric or hyp keys
paramKeyLimit = 10


def getMetrics(key_limit):
    metric_obj = {}
    for i in range(key_limit):
        metric_obj['val_acc' + str(i)] = random.uniform(0.5, 0.9)
        metric_obj['loss' + str(i)] = random.uniform(0.6, 0.8)
Exemple #25
0
from verta import Client
import cloudpickle

client = Client('https://dev.verta.ai')
proj = client.set_project('Demo - Jenkins+Prometheus')
run = proj.expt_runs[0]

model = run.get_model()
with open('model.pkl', 'wb') as f:
    cloudpickle.dump(model, f)
Exemple #26
0
from verta import Client
client = Client("http://localhost:3000")


def uploadSerializedObject(proj, exp_name, exp_run, objName, serialization,
                           library):
    proj = client.set_project(proj)
    expt = client.set_experiment(exp_name)
    run = client.set_experiment_run(exp_run)
    run.log_model('./' + objName, overwrite=True)
    print("Username:"******"Experiment:", exp_name)
    print("Experiment Run:", exp_run)
    print("Serialization:", serialization)
    print("Library:", library)


uploadSerializedObject("MajorII", "CovidPredictor", "Version 1", "model.pkl",
                       "pickle", "pytorch")
Exemple #27
0
 def _create_client():
     return Client(host, email=email, dev_key=dev_key, debug=True)