def docker_builder_remote_registry(): with use_local_installation(), DockerContainer( 'registry:latest').with_exposed_ports(REGISTRY_PORT) as container: host = f'localhost:{container.get_exposed_port(REGISTRY_PORT)}' yield DockerBuilder( ProviderMock(), DockerImage(IMAGE_NAME, registry=RemoteDockerRegistry(host)))
def docker_builder_remote_registry(): with use_local_installation(), DockerContainer( 'registry:latest').with_bind_ports(REGISTRY_PORT, REGISTRY_PORT): yield DockerBuilder( ProviderMock(), DockerImage(IMAGE_NAME, registry=RemoteDockerRegistry(REGISTRY_HOST)))
def test_create_service_from_model(service_name): reg, data = train_model() with use_local_installation(): create_service_from_model(model_name='test_model', model_object=reg, model_input=data, service_name=service_name, run_service=True) assert is_container_running(service_name)
def main(): # obtain trained model and data sample reg, data = train_model() ebnt = Ebonite.inmemory() # this changes docker image builder behaviour to get ebonite from local installation instead of pip with use_local_installation(): ebnt.create_instance_from_model('my_model', reg, data, task_name='my_task', instance_name='magic-one-line-ebnt-service', run_instance=True, detach=False)
def main(): # create local ebonite client. This client stores metadata and artifacts on local fs. ebnt = Ebonite.local() task = ebnt.get_or_create_task('my_project', 'regression_is_my_profession') model = task.models('mymodel') # this changes docker image builder behaviour to get ebonite from local installation instead of pip # 1. for developing reasons 2. we dont have ebonite on pip yet with use_local_installation(): # build docker image from model and run it ebnt.build_and_run_service("sklearn_model_service", model, detach=False, force_overwrite=True)
def run_model(name): """This function creates a model using myext extension""" ebnt = ebonite.Ebonite.local(clear=True) t = ebnt.get_or_create_task('project', 'task') model = t.create_and_push_model('ahaha', 1, 'model') with use_local_installation(): build_model_docker(name, model) run_docker_img(name, name)
def test_python_builder__distr_loadable(tmpdir, python_builder, created_model, pandas_data, request): python_builder: PythonBuilder = request.getfixturevalue(python_builder) prediction = created_model.wrapper.call_method('predict', pandas_data) with use_local_installation(): python_builder._write_distribution(tmpdir) iface = _load(ModelLoader(), tmpdir) prediction2 = iface.execute('predict', {'vector': pandas_data}) np.testing.assert_almost_equal(prediction, prediction2)
def test_python_build_context__distr_contents_local(tmpdir, python_build_context_mock): with use_local_installation(): python_build_context_mock._write_distribution(tmpdir) _check_basic_distr_contents(tmpdir) assert os.path.isdir(os.path.join(tmpdir, 'ebonite')) from setup import setup_args _check_requirements( tmpdir, { *setup_args['install_requires'], *_get_builder_requirements(python_build_context_mock) })
def main(): # obtain trained model and data sample reg, data = train_model() # this changes docker image builder behaviour to get ebonite from local installation instead of pip # 1. for developing reasons 2. we dont have ebonite on pip yet with use_local_installation(): create_service_from_model('my_model', reg, data, task_name='my_task', service_name='magic-one-line-ebnt-service', run_service=True)
def test_python_multi_builder__distr_loadable(tmpdir, python_multi_build_context, created_model, pandas_data): prediction = created_model.wrapper.call_method('predict', pandas_data) with use_local_installation(): python_multi_build_context._write_distribution(tmpdir) iface = _load(MultiModelLoader(), tmpdir) prediction2 = iface.execute(f'{created_model.name}_predict', {'vector': pandas_data}) np.testing.assert_almost_equal(prediction, prediction2)
def remote_ebnt(tmpdir, postgres_server, postgres_meta, s3server, s3_artifact): # noqa with use_local_installation(): # we reconstruct all objects here to ensure that config-related code is covered by tests ebnt = Ebonite.custom_client( metadata="sqlalchemy", meta_kwargs={ "db_uri": postgres_meta.db_uri }, artifact="s3", artifact_kwargs={ "bucket_name": s3_artifact.bucket_name, "endpoint": s3_artifact.endpoint, "region": s3_artifact.region }) cfg_path = os.path.join(tmpdir, 'config.json') ebnt.save_client_config(cfg_path) yield Ebonite.from_config_file(cfg_path)
def test_python_build_context__distr_loadable(tmpdir, python_build_context, created_model, pandas_data, request): python_build_context: PythonBuildContext = request.getfixturevalue( python_build_context) prediction = created_model.wrapper.call_method('predict', pandas_data) with use_local_installation(): python_build_context._write_distribution(tmpdir) assert python_build_context.provider.get_python_version( ) == platform.python_version() iface = _load(ModelLoader(), tmpdir) prediction2 = iface.execute('predict', {'vector': pandas_data}) np.testing.assert_almost_equal(prediction, prediction2)
def main(): # create local ebonite client ebnt = ebonite.Ebonite.local() # get task task: Task = ebnt.get_or_create_task('custom_code_project', 'custom_code_task') # get saved model model: Model = ebnt.get_model(task=task, model_name='custom_code_model') # this changes docker image builder behaviour to get ebonite from local installation instead of pip # 1. for developing reasons 2. we dont have ebonite on pip yet with use_local_installation(): # build docker container from model build_model_docker('custom_code_model_container', model, force_overwrite=True) # run docker conatainer run_docker_img('custom_code_model_container', 'custom_code_model_container', port_mapping={9000: 9000})
def main(): # load extension # you just use plain module name, if it's installed from pip # or, you can just directly import your classes # to automatically load extension on startup, set EBONITE_EXTENSIONS env variable ebonite.load_extensions('myext.extension_source') # set up client and task ebnt = ebonite.Ebonite.local(clear=True) task = ebnt.get_or_create_task('project', 'task') # create a model using myext extension model = task.create_and_push_model('ahaha', 1, 'model') with use_local_installation(): # your extension code will be inside docker image in form of files # if you have local files, or requirement if you installed it from pip build_model_docker('local_ext_model', model, force_overwrite=True) run_docker_img('local_ext_model', 'local_ext_model')
def main(): # create local ebonite client ebnt = ebonite.Ebonite.local() # get task task: Task = ebnt.get_or_create_task('custom_code_project', 'custom_code_task') # get saved model model: Model = ebnt.get_model(task=task, model_name='custom_code_model') # this changes docker image builder behaviour to get ebonite from local installation instead of pip with use_local_installation(): # build docker container from model image = ebnt.build_image('custom_code_model_container', model, force_overwrite=True) # run docker container ebnt.run_instance('custom_code_model_container', image, detach=False)
def _prepare_distribution(target_dir, python_builder): with use_local_installation(): python_builder._write_distribution(target_dir) # prevent escaping from interpreter installation used for running tests run_sh = os.path.join(target_dir, 'run.sh') with open(run_sh, 'r') as f: contents = f.read() # windows paths are deadly for shell scripts under Cygwin python_exe = sys.executable.replace('\\', '/') contents = contents.replace(' python ', f' {python_exe} ') with open(run_sh, 'w') as f: f.write(contents) args = ['sh', run_sh] # prevent leak of PYTHONPATH used for running tests env = os.environ.copy() env['PYTHONPATH'] = str(target_dir) return args, env
def docker_builder_local_registry(): with use_local_installation(): yield DockerBuilder(ProviderMock(), DockerImage(IMAGE_NAME))
def _generate_dockerfile(**kwargs): with use_local_installation(): return _DockerfileGenerator(**kwargs).generate({})
def dockerenv_local(): with use_local_installation(): yield DockerEnv()
def _generate_dockerfile(unix_packages=None, **kwargs): with use_local_installation(): return _cut_empty_lines( _DockerfileGenerator(**kwargs).generate({}, unix_packages))
def test_build_model_docker(model, server, img_name, container_name): with use_local_installation(): build_model_flask_docker(img_name, model, force_overwrite=True) run_docker_img(container_name, img_name, detach=True) assert is_container_running(container_name)
def dockerenv_remote(docker_registry, docker_daemon): with use_local_installation(): yield DockerEnv(registry=docker_registry, daemon=docker_daemon)
def test_build_model_docker(model, server, img_name): with use_local_installation(): build_model_docker(img_name, model, server) assert has_local_image(img_name)
def ebnt(tmpdir): with use_local_installation(): yield Ebonite.local(str(tmpdir))
def img_name(): img_name = "helper-test-image" with use_local_installation(): yield img_name rm_image(img_name + ":latest") # FIXME later
def docker_builder(): with use_local_installation(): yield DockerBuilder(ProviderMock(), IMAGE_NAME)