예제 #1
0
파일: model_run.py 프로젝트: geffy/ebonite
def main():
    #  create remote ebonite client from saved configuration
    ebnt = Ebonite.from_config_file('client_config.json')
    model = ebnt.get_model('add_one_model', 'my_task', 'my_project')

    # run flask service with this model
    run_test_model_server(model)
예제 #2
0
def main():
    #  create remote ebonite client. This client stores metadata in postgres and artifacts in s3
    ebnt = Ebonite.custom_client(
        'sqlalchemy',
        's3',
        meta_kwargs={
            'db_uri': 'postgresql://*****:*****@localhost:5435/ebonite'
        },
        artifact_kwargs={
            'endpoint': 'http://localhost:8008',
            'bucket_name': 'ebonite'
        })
    # save client configuration for later use
    ebnt.save_client_config('client_config.json')
    #  obtain Task
    task = ebnt.get_or_create_task('my_project', 'my_task')

    #  remove model if it exists (for demo purposes)
    if task.models.contains('add_one_model'):
        model = task.models('add_one_model')
        task.delete_model(model)

    #  create model from function add_one and numpy array as data sample
    model = ebonite.create_model(add_one, np.array([0]), 'add_one_model')

    #  persist model
    task.push_model(model)
예제 #3
0
def image_to_delete(ebnt: Ebonite, model, mock_env):
    task = ebnt.get_or_create_task('Project', 'Task')
    ebnt.meta_repo.create_environment(mock_env)
    image = Image('image', BuildableMock(), id=None, task_id=task.id, params=Image.Params())
    image.environment = mock_env
    image.params.name = 'image'
    image.bind_builder(mock_env.params.get_builder())
    image = ebnt.meta_repo.create_image(image)
    yield image
예제 #4
0
def main():
    #  create remote ebonite client from saved configuration
    os.environ['S3_ACCESS_KEY'] = 'eboniteAccessKey'
    os.environ['S3_SECRET_KEY'] = 'eboniteSecretKey'
    ebnt = Ebonite.from_config_file('client_config.json')
    model = ebnt.get_model('add_one_model', 'my_task', 'my_project')

    # run flask service with this model
    run_model_server(model)
예제 #5
0
def main():
    #  obtain trained model and data sample
    reg, data = train_model()

    ebnt = Ebonite.inmemory()
    #  this changes docker image builder behaviour to get ebonite from local installation instead of pip
    with use_local_installation():
        ebnt.create_instance_from_model('my_model', reg, data, task_name='my_task',
                                        instance_name='magic-one-line-ebnt-service', run_instance=True, detach=False)
예제 #6
0
def main():
    #  create local ebonite client. This client stores metadata and artifacts on local fs.
    ebnt = Ebonite.local()

    model = ebnt.get_model(project='my_project',
                           task='regression_is_my_profession',
                           model_name='mymodel')

    # run flask service with this model
    run_model_server(model)
예제 #7
0
def main():
    #  create local ebonite client. This client stores metadata and artifacts on local fs.
    #  clear=True means it will erase previous data (this is for demo purposes)
    ebnt = Ebonite.local(clear=True)

    #  create sample data
    data = pd.DataFrame([{'value': 1}])
    #  create model with name 'custom_code_model' from function 'run_my_model' and pandas data sample
    #  and push this model to repository
    ebnt.create_model('custom_code_model', run_my_model, data,
                      project_name='custom_code_project', task_name='custom_code_task')
예제 #8
0
def main():
    #  create local ebonite client. This client stores metadata and artifacts on local fs.
    ebnt = Ebonite.local()

    task = ebnt.get_or_create_task('my_project', 'regression_is_my_profession')
    model = task.models('mymodel')

    #  this changes docker image builder behaviour to get ebonite from local installation instead of pip
    #  1. for developing reasons 2. we dont have ebonite on pip yet
    with use_local_installation():
        #  build docker image from model and run it
        ebnt.build_and_run_service("sklearn_model_service", model, detach=False, force_overwrite=True)
예제 #9
0
def main():
    #  create local ebonite client. This client stores metadata and artifacts on local fs.
    ebnt = Ebonite.local()

    task = ebnt.get_or_create_task('my_project', 'regression_is_my_profession')
    model = task.models('mymodel')

    #  build docker image from model and run it
    ebnt.build_and_run_instance("sklearn_model_service",
                                model,
                                runner_kwargs={'detach': False},
                                builder_kwargs={'force_overwrite': True})
예제 #10
0
def main():
    #  create local ebonite client. This client stores metadata and artifacts on local fs.
    #  clear=True means it will erase previous data (this is for demo purposes)
    ebnt = Ebonite.local(clear=True)

    #  create a Task, container for models
    task = ebnt.get_or_create_task('custom_code_project', 'custom_code_task')

    #  create sample data
    data = pd.DataFrame([{'value': 1}])
    #  create model with name 'custom_code_model' from function 'run_my_model' and pandas data sample
    #  and push this model to repository
    task.create_and_push_model(run_my_model, data, 'custom_code_model')
예제 #11
0
def main():
    ebnt = Ebonite.local(clear=True)
    task = ebnt.get_or_create_task('local_deployment', 'local_deployment')

    model = create_model(model_function, 0, model_name='dummy_function')
    task.add_model(model)

    image = ebnt.build_image('dummy_image', model, force_overwrite=True)

    instance = ebnt.create_instance('dummy_service', image)
    instance.run()
    for log in instance.logs(stream=True):
        try:
            print(log, end='')
        except KeyboardInterrupt:  # FIXME does not work since we stuck in generator
            break

    ebnt.stop_instance(instance)