def test_remote_flow_local_executors(mocker, replicas): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'yamls')]) GATEWAY_LOCAL_GATEWAY = 'flow_glg.yml' GATEWAY_LOCAL_LOCAL_GATEWAY = 'flow_gllg.yml' for flow_yaml in [ GATEWAY_LOCAL_GATEWAY, GATEWAY_LOCAL_LOCAL_GATEWAY, ]: response_mock = mocker.Mock() flow_id = client.flows.create(workspace_id=workspace_id, filename=flow_yaml, envs={'REPLICAS': replicas}) args = client.flows.get(flow_id)['arguments']['object']['arguments'] Client( host=__default_host__, port=args['port_expose'], protocol=args['protocol'], ).post( on='/', inputs=(Document(blob=np.random.random([1, 100])) for _ in range(NUM_DOCS)), on_done=response_mock, show_progress=True, ) response_mock.assert_called() assert client.flows.delete(flow_id) assert client.workspaces.delete(workspace_id)
def test_update_custom_container(): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create( paths=[ os.path.join(cur_dir, '../../daemon/unit/models/good_ws/.jinad'), os.path.join(cur_dir, 'flow_app_ws/requirements.txt'), ] ) workspace_details = client.workspaces.get(id=workspace_id) workspace_details = WorkspaceItem(**workspace_details) container_id = workspace_details.metadata.container_id assert container_id image_id = workspace_details.metadata.image_id assert image_id assert len(workspace_details.arguments.requirements.split()) == 2 workspace_id = client.workspaces.update( id=workspace_id, paths=[os.path.join(cur_dir, 'sklearn_encoder_ws/requirements.txt')], ) workspace_details = client.workspaces.get(id=workspace_id) workspace_details = WorkspaceItem(**workspace_details) new_container_id = workspace_details.metadata.container_id assert new_container_id assert new_container_id != container_id new_image_id = workspace_details.metadata.image_id assert new_image_id assert new_image_id != image_id
def test_remote_flow_containerized_executors(docker_image, mocker): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create(paths=[os.path.join(cur_dir, 'yamls')]) GATEWAY_CONTAINER_GATEWAY = 'flow_gcg.yml' GATEWAY_CONTAINER_LOCAL_GATEWAY = 'flow_gclg.yml' GATEWAY_LOCAL_CONTAINER_GATEWAY = 'flow_gclg.yml' GATEWAY_CONTAINER_LOCAL_CONTAINER_GATEWAY = 'flow_gclcg.yml' for flow_yaml in [ GATEWAY_CONTAINER_GATEWAY, GATEWAY_CONTAINER_LOCAL_GATEWAY, GATEWAY_LOCAL_CONTAINER_GATEWAY, GATEWAY_CONTAINER_LOCAL_CONTAINER_GATEWAY, ]: response_mock = mocker.Mock() flow_id = client.flows.create(workspace_id=workspace_id, filename=flow_yaml) args = client.flows.get(flow_id)['arguments']['object']['arguments'] Client( host=__default_host__, port=args['port_expose'], protocol=args['protocol'], ).post( on='/', inputs=(Document(blob=np.random.random([1, 100])) for _ in range(NUM_DOCS)), on_done=response_mock, ) response_mock.assert_called() assert client.flows.delete(flow_id) assert client.workspaces.delete(workspace_id)
def test_port_expose_env_var(port_expose, func): jinad_client = JinaDClient(host='localhost', port=8000) workspace_id = jinad_client.workspaces.create( paths=[os.path.join(cur_dir, 'envvars_ws')] ) flow_id = jinad_client.flows.create( workspace_id=workspace_id, filename='flow.yml', envs={'PORT_EXPOSE': port_expose, 'FUNC': func}, ) r = Client(host='localhost', port=port_expose, protocol='http').post( on='/blah', inputs=(Document(text=f'text {i}') for i in range(2)), return_results=True, ) for d in r[0].data.docs: assert d.text.endswith(func) r = requests.get(f'http://localhost:{port_expose}/status') assert r.status_code == 200 envs = r.json()['envs'] assert envs['JINA_LOG_WORKSPACE'] == '/workspace/logs' assert envs['JINA_HUB_CACHE_DIR'] == '/workspace/.cache/jina' assert envs['JINA_HUB_ROOT'] == '/workspace/.jina/hub-packages' assert jinad_client.flows.delete(flow_id) assert jinad_client.workspaces.delete(workspace_id)
def test_flow_error_in_partial_daemon(): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'wrong_flow.yml')]) error_msg = client.flows.create(workspace_id=workspace_id, filename='wrong_flow.yml') assert 'jina.excepts.RuntimeFailToStart' in error_msg assert 'jina.excepts.ExecutorFailToLoad' in error_msg assert 'FileNotFoundError: can not find executor_ex.yml' in error_msg assert client.workspaces.delete(id=workspace_id)
def test_workspace_clear(): client = JinaDClient(host=__default_host__, port=8000) for _ in range(2): workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'empty_flow.yml')]) assert DaemonID(workspace_id).type == 'workspace' assert (WorkspaceItem(**client.workspaces.get( id=workspace_id)).state == RemoteWorkspaceState.ACTIVE) assert workspace_id in client.workspaces.list() assert client.workspaces.clear()
def test_remote_flow(): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'empty_flow.yml')] ) assert DaemonID(workspace_id).type == 'workspace' flow_id = client.flows.create(workspace_id=workspace_id, filename='empty_flow.yml') assert DaemonID(flow_id).type == 'flow' assert client.flows.get(flow_id) assert flow_id in client.flows.list() assert_request('get', url=f'http://localhost:23456/status/', expect_rcode=200) assert client.flows.delete(flow_id) assert client.workspaces.delete(workspace_id)
def test_pea_error_in_partial_daemon(): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create() status, error_msg = client.peas.create( workspace_id=workspace_id, payload={ 'name': 'blah-pea', 'py_modules': ['abc.py'] }, ) assert not status assert 'jina.excepts.RuntimeFailToStart' in error_msg assert 'FileNotFoundError: can not find abc.py' in error_msg assert client.workspaces.delete(id=workspace_id)
def test_create_custom_container(): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create(paths=[ os.path.join(cur_dir, '../../daemon/unit/models/good_ws/.jinad') ]) workspace_details = client.workspaces.get(id=workspace_id) workspace_details = WorkspaceItem(**workspace_details) assert workspace_details.metadata.container_id container = docker.from_env().containers.get( workspace_details.metadata.container_id) assert container.name == workspace_id workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'custom_workspace_no_run')]) workspace_details = client.workspaces.get(id=workspace_id) workspace_details = WorkspaceItem(**workspace_details) assert not workspace_details.metadata.container_id
def test_remote_workspace_value(): """ This tests the value set in `self.workspace` in a remote Flow. It should always be `/workspace/ExecutorName/... """ HOST = __default_host__ client = JinaDClient(host=HOST, port=8000) workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'yamls')]) flow_id = client.flows.create(workspace_id=workspace_id, filename='flow_workspace_validate.yml') args = client.flows.get(flow_id)['arguments']['object']['arguments'] response = Client(host=HOST, port=args['port_expose'], protocol=args['protocol']).post(on='/', inputs=[Document()], show_progress=True, return_results=True) assert (response[0].data.docs[0].text.startswith( f'{__partial_workspace__}/WorkspaceValidator/0')) assert client.flows.delete(flow_id) assert client.workspaces.delete(workspace_id)
def test_delete_custom_container(): client = JinaDClient(host=__default_host__, port=8000) workspace_id = client.workspaces.create( paths=[ os.path.join(cur_dir, 'custom_workspace_blocking'), ] ) # check that container was created workspace_details = client.workspaces.get(id=workspace_id) workspace_details = WorkspaceItem(**workspace_details) container_id = workspace_details.metadata.container_id assert container_id client.workspaces.delete( id=workspace_id, container=True, network=False, files=False, everything=False ) # check that deleted container is gone workspace_details = client.workspaces.get(id=workspace_id) workspace_details = WorkspaceItem(**workspace_details) container_id = workspace_details.metadata.container_id assert not container_id
import os from contextlib import contextmanager from daemon.clients import JinaDClient from jina.types.request import Response from jina.helper import random_identity from jina import Document, Client, __default_host__, Flow cur_dir = os.path.dirname(os.path.abspath(__file__)) HOST = __default_host__ client = JinaDClient(host=HOST, port=8000) @contextmanager def RemoteFlow(workspace_id) -> Response: flow_id = client.flows.create(workspace_id=workspace_id, filename='flow_cache_validator.yml') args = client.flows.get(flow_id)['arguments']['object']['arguments'] yield Client(host=HOST, port=args['port_expose'], protocol=args['protocol']).post(on='/', inputs=[Document()], show_progress=True, return_results=True) assert client.flows.delete(flow_id) def test_cache_validate_remote_flow(): """ This test validates that files (e.g.- pre-trained model) downloaded in an Executor in a remote Flow should be available to be accessed during the 2nd time an
import os from contextlib import contextmanager from typing import Dict import pytest from daemon.clients import JinaDClient from jina import Client, Document, __default_host__ cur_dir = os.path.dirname(os.path.abspath(__file__)) jinad_client = JinaDClient(host=__default_host__, port=8000) @contextmanager def RemoteFlow(directory, filename: str, envs: Dict[str, str] = {}): flow_id = None try: workspace_id = jinad_client.workspaces.create( paths=[os.path.join(cur_dir, directory)]) flow_id = jinad_client.flows.create(workspace_id=workspace_id, filename=filename, envs=envs) yield finally: if flow_id: assert jinad_client.flows.delete( flow_id), 'Flow termination failed' print(f'Remote Flow {flow_id} successfully terminated') @pytest.mark.parametrize('filename',
from jina import Document, Client, __default_host__ from jina.logging.logger import JinaLogger from daemon.clients import JinaDClient cur_dir = os.path.dirname(os.path.abspath(__file__)) compose_yml = os.path.join(cur_dir, 'docker-compose.yml') HOST = __default_host__ JINAD_PORT = 8003 REST_PORT_DBMS = 9000 REST_PORT_QUERY = 9001 DUMP_PATH = '/jinad_workspace/dump' logger = JinaLogger('test-dump') client = JinaDClient(host=HOST, port=JINAD_PORT) SHARDS = 3 EMB_SIZE = 10 @pytest.fixture def executor_images(): import docker client = docker.from_env() dbms_dir = os.path.join(cur_dir, 'deployments', 'dbms') query_dir = os.path.join(cur_dir, 'deployments', 'query') client.images.build(path=dbms_dir, tag='dbms-executor') client.images.build(path=query_dir, tag='query-executor')
from jina.logging.logger import JinaLogger from daemon.clients import JinaDClient cur_dir = os.path.dirname(os.path.abspath(__file__)) compose_yml = os.path.join(cur_dir, 'docker-compose.yml') JINAD_PORT = '8000' JINAD_PORT_DBMS = '8001' JINAD_PORT_QUERY = '8001' REST_PORT_DBMS = '9000' REST_PORT_QUERY = '9001' DUMP_PATH_DOCKER = '/workspace/dump' logger = JinaLogger('test-dump') client = JinaDClient(host='localhost', port=JINAD_PORT) SHARDS = 3 EMB_SIZE = 10 def _path_size_remote(this_dump_path, container_id): os.system( f'docker exec {container_id} /bin/bash -c "du -sh {this_dump_path}" > dump_size.txt' ) contents = open('dump_size.txt').readline() dir_size = float(contents.split('K')[0].split('M')[0]) return dir_size def _create_flows():
def test_timeout(): client = JinaDClient(host='1.2.3.4', port=8000) assert client.peas.timeout.total == 10 * 60 client = JinaDClient(host='1.2.3.4', port=8000, timeout=10) assert client.peas.timeout.total == 10
def jinad_client(): return JinaDClient(host=HOST, port=PORT)
HOST = __default_host__ # change this if you are using remote jinad JINAD_PORT = 8000 # change this if you start jinad on a different port DUMP_PATH = '/jinad_workspace/dump' # the path where to dump SHARDS = 1 # change this if you change pods/query_indexer.yml DUMP_RELOAD_INTERVAL = 10 # time between dump - rolling update calls DATA_FILE = 'data/toy.txt' # change this if you get the full data DOCS_PER_ROUND = 5 # nr of documents to index in each round STORAGE_FLOW_YAML_FILE = 'storage.yml' # indexing Flow yaml name QUERY_FLOW_YAML_FILE = 'query.yml' # querying Flow yaml name STORAGE_REST_PORT = 9000 # REST port of storage Flow, defined in flows/storage.yml QUERY_REST_PORT = 9001 # REST port of Query Flow, defined in flows/query.yml logger = JinaLogger('jina') cur_dir = os.path.dirname(os.path.abspath(__file__)) jinad_client = JinaDClient(host=HOST, port=JINAD_PORT, timeout=10 * 60) def docarray_from_file(filename): docs = [] with open(filename) as f: for line in f: docs.append(Document(text=line)) return DocumentArray(docs) def query_restful(): while True: text = input('please type a sentence: ') if not text: break