def downloadArtifact(proj, exp_name, exp_run, serialization): client = Client("http://localhost:3000") proj = client.set_project(proj) expt = client.set_experiment(exp_name) run = client.set_experiment_run(exp_run) if serialization.lower() == 'pickle': run.download_model('model.pkl')
def get_modeldb_client( experiment_name: str ) -> "verta._tracking.experimentrun.ModelDBExperiment": modeldb_job = f"{get_project_name()}-modeldb-frontend" my_user = get_neuro_user() cluster = get_neuro_cluster() uri = f"http://{modeldb_job}--{my_user}.platform-jobs:3000" print(f"Connecting to ModelDB client {uri}") client = ModelDBlient(uri) exp = client.set_experiment(experiment_name) return client
import os import random import multiprocessing from verta import Client from verta.utils import ModelAPI os.environ['VERTA_EMAIL'] = '*****@*****.**' os.environ['VERTA_DEV_KEY'] = '3e078522-e479-4cd2-b78c-04ffcacae3f4' HOST = "dev.verta.ai" EXPERIMENT_NAME = "Scaling" client = Client(HOST) proj = client.set_project('Scaling Test 100 jobs of 500k models') expt = client.set_experiment(EXPERIMENT_NAME) # Hyperparam random choice of values c_list = [0.0001, 0.0002, 0.0004] solver_list = ['lgfgs', 'grad'] max_iter_list = [7, 15, 28] # results into 30 metric or hyp keys paramKeyLimit = 10 def getMetrics(key_limit): metric_obj = {} for i in range(key_limit): metric_obj['val_acc' + str(i)] = random.uniform(0.5, 0.9) metric_obj['loss' + str(i)] = random.uniform(0.6, 0.8)
from verta import Client client = Client('https://dev.verta.ai') client.set_project('Demo - Jenkins+Prometheus') client.set_experiment('Demo') run = client.set_experiment_run() class Predictor(object): def __init__(self): pass def predict(self, X): return X run.log_model(Predictor())