def test_flow_not_returned(inspect, protocol): def validate_func(resp): for doc in resp.data.docs: assert len(doc.evaluations) == 0 f = (Flow(protocol=protocol, inspect=inspect, port=exposed_port).add().inspect(uses=AddEvaluationExecutor, )) with f: res = Client(protocol=protocol, port=exposed_port, return_responses=True).index(inputs=docs) validate_func(res[0])
def test_single_executor(): f = Flow(port=exposed_port).add(uses={ 'jtype': 'MatchAdder', 'with': { 'traversal_paths': 'r,m' } }) with f: results = Client(port=exposed_port).post(on='index', inputs=Document(), return_responses=True) validate_results(results)
def test_remote_executor_via_pymodules(upload_files, uses, py_modules): f = Flow(port_expose=exposed_port).add( host=CLOUD_HOST, uses=uses, py_modules=py_modules, upload_files=upload_files, ) with f: resp = Client(port=exposed_port).post( on='/', inputs=Document(text=py_modules), return_results=True, ) assert resp[0].data.docs[0].text == py_modules * 2
def test_func_simple_routing(): class MyExecutor(Executor): @requests(on='/search') def foo(self, **kwargs): for j in ('docs', 'parameters'): assert j in kwargs assert len(kwargs['docs']) == 3 assert kwargs['parameters']['hello'] == 'world' assert kwargs['parameters']['topk'] == 10 kwargs['docs'][0].tags['hello'] = 'world' f = Flow(port=1234).add(uses=MyExecutor) with f: results = Client(port=1234).post( on='/search', inputs=[(Document(), Document()) for _ in range(3)], parameters={ 'hello': 'world', 'topk': 10 }, return_responses=True, ) assert results[0].header.status.code == 0 assert results[0].data.docs[0].tags['hello'] == 'world' with f: results = Client(port=1234).post( on='/random', inputs=[Document() for _ in range(3)], parameters={ 'hello': 'world', 'topk': 10 }, return_responses=True, ) assert results[0].header.status.code == 0
def test_uses_before_reduce(): flow = (Flow(port=exposed_port).add(uses=Executor1, name='pod0').add( uses=Executor2, needs='gateway', name='pod1').add(uses=Executor3, needs='gateway', name='pod2').add(needs=['pod0', 'pod1', 'pod2'], name='pod3', uses_before='BaseExecutor')) with flow as f: da = DocumentArray([Document() for _ in range(5)]) resp = Client(port=exposed_port, return_responses=True).post('/', inputs=da) # assert reduce happened because there is only BaseExecutor as uses_before assert len(resp[0].docs) == 5
async def test_pods_trivial_topology(port_generator): worker_port = port_generator() head_port = port_generator() port = port_generator() graph_description = '{"start-gateway": ["pod0"], "pod0": ["end-gateway"]}' pod_addresses = f'{{"pod0": ["0.0.0.0:{head_port}"]}}' # create a single worker pod worker_pod = _create_worker_pod(worker_port) # create a single head pod connection_list_dict = {'0': [f'127.0.0.1:{worker_port}']} head_pod = _create_head_pod(head_port, connection_list_dict) # create a single gateway pod gateway_pod = _create_gateway_pod(graph_description, pod_addresses, port) with gateway_pod, head_pod, worker_pod: # this would be done by the Pod, its adding the worker to the head head_pod.wait_start_success() worker_pod.wait_start_success() # send requests to the gateway gateway_pod.wait_start_success() c = Client(host='localhost', port=port, asyncio=True) responses = c.post('/', inputs=async_inputs, request_size=1, return_responses=True) response_list = [] async for response in responses: response_list.append(response) assert len(response_list) == 20 assert len(response_list[0].docs) == 1
def test_uvicorn_ssl_with_flow(cert_pem, key_pem, protocol, capsys): with Flow( protocol=protocol, uvicorn_kwargs=[ f'ssl_certfile: {cert_pem}', f'ssl_keyfile: {key_pem}', 'ssl_keyfile_password: abcd', ], ) as f: os.environ['JINA_LOG_LEVEL'] = 'ERROR' Client(protocol=protocol, port=f.port_expose, https=True).index([Document()]) assert ( '''certificate verify failed: self signed certificate''' in capsys.readouterr().out )
def test_sharding_tail_pod(num_replicas, num_shards): """TODO(Maximilian): Make (1, 2) and (2, 1) also workable""" f = Flow(port=1234).add( uses=DummyExecutor, replicas=num_replicas, shards=num_shards, uses_after=MatchMerger, ) with f: results = Client(port=1234, return_responses=True).post( on='/search', inputs=Document(matches=[Document()]), ) assert len(results[0].docs[0].matches) == num_shards
def test_multi_executor(): f = ( Flow(port_expose=exposed_port) .add(uses={'jtype': 'MatchAdder', 'with': {'traversal_paths': 'r'}}) .add(uses={'jtype': 'MatchAdder', 'with': {'traversal_paths': 'm'}}) ) with f: results = Client(port=exposed_port).post( on='index', inputs=Document(), return_results=True, ) validate_results(results)
def test_upload_via_yaml(replicas): f = (Flow(port_expose=exposed_port).add().add( uses='mwu_encoder.yml', host=CLOUD_HOST, replicas=replicas, upload_files=cur_dir, ).add()) with f: responses = Client(port=exposed_port).index( inputs=(Document(tensor=np.random.random([1, 100])) for _ in range(NUM_DOCS)), return_results=True, ) assert len(responses) > 0 assert len(responses[0].docs) > 0
def test_func_failure(): class MyExecutor(Executor): @requests(on='/search') def foo(self, **kwargs): raise Exception() f = Flow(port_expose=1234).add(uses=MyExecutor) with f: results = Client(port=1234).post( on='/search', inputs=[(Document(), Document()) for _ in range(3)], return_results=True, ) assert results[0].header.status.code == 3
def test_uses_before_no_reduce_real_executor_uses(): flow = (Flow(port=exposed_port).add(uses=Executor1, name='pod0').add( uses=Executor2, needs='gateway', name='pod1').add(uses=Executor3, needs='gateway', name='pod2').add(needs=['pod0', 'pod1', 'pod2'], name='pod3', uses=DummyExecutor)) with flow as f: da = DocumentArray([Document() for _ in range(5)]) resp = Client(port=exposed_port, return_responses=True).post('/', inputs=da) # assert no reduce happened assert len(resp[0].docs) == 1 assert resp[0].docs[0].id == 'fake_document'
def test_override_requests(): class MyExec(Executor): @requests def foo(self, docs, **kwargs): for d in docs: d.text = 'foo' def bar(self, docs, **kwargs): for d in docs: d.text = 'bar' @requests(on=['/1', '/2']) def foobar(self, docs, **kwargs): for d in docs: d.text = 'foobar' # original f = Flow(port=exposed_port).add(uses=MyExec) with f: req = Client(port=exposed_port, return_responses=True).post( '/index', Document() ) assert req[0].docs[0].text == 'foo' # change bind to bar() f = Flow(port=exposed_port).add(uses=MyExec, uses_requests={'/index': 'bar'}) with f: req = Client(port=exposed_port, return_responses=True).post( '/index', Document() ) assert req[0].docs[0].text == 'bar' req = Client(port=exposed_port, return_responses=True).post('/1', Document()) assert req[0].docs[0].text == 'foobar' # change bind to foobar() f = Flow(port=exposed_port).add(uses=MyExec, uses_requests={'/index': 'foobar'}) with f: req = Client(port=exposed_port, return_responses=True).post( '/index', Document() ) assert req[0].docs[0].text == 'foobar' req = Client(port=exposed_port, return_responses=True).post( '/index-blah', Document() ) assert req[0].docs[0].text == 'foo' # change default bind to foo() f = Flow(port=exposed_port).add(uses=MyExec, uses_requests={'/default': 'bar'}) with f: req = Client(port=exposed_port, return_responses=True).post( '/index', Document() ) assert req[0].docs[0].text == 'bar'
def test_uses_after_no_reduce(n_shards, n_docs): search_flow = Flow(port=exposed_port).add( uses=ShardsExecutor, shards=n_shards, uses_after=DummyExecutor, polling='all', uses_with={'n_docs': n_docs}, ) with search_flow as f: da = DocumentArray([Document() for _ in range(5)]) resp = Client(port=exposed_port, return_responses=True).post('/search', inputs=da) # assert no reduce happened assert len(resp[0].docs) == 1 assert resp[0].docs[0].id == 'fake_document'
def test_client_on_error_deprecation(protocol): class OnError: def __init__(self): self.is_called = False def __call__(self, response): # this is deprecated self.is_called = True on_error = OnError() Client(host='0.0.0.0', protocol=protocol, port=12345).post( '/blah', inputs=DocumentArray.empty(10), on_error=on_error, ) assert on_error.is_called
def _request(status_codes, durations, index): with reraise: start = time.time() on_done = functools.partial( _validate, start=start, status_codes=status_codes, durations=durations, index=index, ) results = Client(port=port, protocol=protocol, return_responses=True).index( inputs=(Document() for _ in range(256)), _size=16, ) assert len(results) > 0 for result in results: on_done(result)
def test_client_on_always_after_exception(protocol): class OnAlways: def __init__(self): self.is_called = False def __call__(self, response): self.is_called = True on_always = OnAlways() Client(host='0.0.0.0', protocol=protocol, port=12345).post( '/blah', inputs=DocumentArray.empty(10), on_always=on_always, ) assert on_always.is_called
def test_override_config_params_shards(): flow = Flow(port=exposed_port).add( uses=os.path.join(cur_dir, 'default_config.yml'), uses_with={'param1': 50, 'param2': 30}, uses_metas={'workspace': 'different_workspace'}, shards=2, ) with flow: resps = Client(port=exposed_port, return_responses=True).search( inputs=[Document()] ) doc = resps[0].docs[0] assert doc.tags['param1'] == 50 assert doc.tags['param2'] == 30 assert doc.tags['param3'] == 10 # not overriden assert doc.tags['name'] == 'name' # not override assert doc.tags['workspace'] == 'different_workspace'
def test_func_return_(): class MyExecutor(Executor): @requests def foo(self, **kwargs): return DocumentArray([Document(), Document()]) f = Flow(port_expose=1234).add(uses=MyExecutor) with f: Client(port=1234).post( on='/some_endpoint', inputs=[Document() for _ in range(3)], parameters={ 'hello': 'world', 'topk': 10 }, on_done=print, )
def test_reduce_needs(): flow = (Flow(port=exposed_port).add(uses=Executor1, name='pod0').add( uses=Executor2, needs='gateway', name='pod1').add(uses=Executor3, needs='gateway', name='pod2').add(needs=['pod0', 'pod1', 'pod2'], name='pod3')) with flow as f: da = DocumentArray([Document() for _ in range(5)]) resp = Client(port=exposed_port, return_responses=True).post('/', inputs=da) assert len(resp[0].docs) == 5 for doc in resp[0].docs: assert doc.text == 'exec1' assert doc.tags == {'a': 'b'} assert doc.modality == 'image' assert (doc.embedding == np.zeros(3)).all()
def test_remote_flow_local_executors(replicas, jinad_client): with RemoteFlow( filename='flow_with_env.yml', envs={ 'context_var_1': 'val1', 'context_var_2': 'val2', 'num_replicas': replicas, }, jinad_client=jinad_client, ): resp = Client(host=HOST, port=FLOW_PORT).post( on='/', inputs=[Document(id=str(idx)) for idx in range(NUM_DOCS)], return_results=True, ) for doc in resp[0].data.docs: assert doc.tags['key1'] == 'val1' assert doc.tags['key2'] == 'val2' assert doc.tags['replicas'] == replicas
def test_override_config_params_shards(docker_image): flow = Flow(port_expose=exposed_port, return_results=True).add( uses='docker://override-config-test', uses_with={ 'param1': 50, 'param2': 30 }, uses_metas={'workspace': 'different_workspace'}, shards=2, ) with flow: resps = Client(port=exposed_port).search(inputs=[Document()], return_results=True) doc = resps[0].docs[0] assert doc.tags['param1'] == 50 assert doc.tags['param2'] == 30 assert doc.tags['param3'] == 10 # not overriden assert doc.tags['name'] == 'name' # not override assert doc.tags['workspace'] == 'different_workspace'
def test_func_default_routing(): class MyExecutor(Executor): @requests def foo(self, **kwargs): for j in ('docs', 'parameters'): assert j in kwargs assert len(kwargs['docs']) == 3 f = Flow(port_expose=1234).add(uses=MyExecutor) with f: Client(port=1234).post( on='/some_endpoint', inputs=[Document() for _ in range(3)], parameters={ 'hello': 'world', 'topk': 10 }, )
def test_func_joiner(mocker): class Joiner(Executor): @requests def foo(self, docs, **kwargs): for d in docs: d.text += '!!!' return docs class M1(Executor): @requests def foo(self, docs, **kwargs): for idx, d in enumerate(docs): d.text = f'hello {idx}' class M2(Executor): @requests def foo(self, docs, **kwargs): for idx, d in enumerate(docs): d.text = f'world {idx}' f = (Flow(port_expose=1234).add(uses=M1).add(uses=M2, needs='gateway').add( uses=Joiner, needs=['executor0', 'executor1'])) mock = mocker.Mock() def validate(req): texts = {d.text for d in req.docs} assert len(texts) == 6 mock() with f: Client(port=1234).post( on='/some_endpoint', inputs=[Document() for _ in range(3)], parameters={ 'hello': 'world', 'topk': 10 }, on_done=validate, ) mock.assert_called_once()
def test_client_on_error_raise_exception(protocol, exception): class OnError: def __init__(self): self.is_called = False def __call__(self, response, exception_param: Optional[Exception] = None): self.is_called = True assert type(exception_param) == exception on_error = OnError() Client(host='0.0.0.0', protocol=protocol, port=12345).post( '/blah', inputs=DocumentArray.empty(10), on_error=on_error, ) assert on_error.is_called
def test_flow_returned_collect(protocol): # TODO(Joan): This test passes because we pass the `SlowExecutor` but I do not know how to make the `COLLECT` deployment # use an specific executor. def validate_func(resp): num_evaluations = 0 scores = set() for doc in resp.data.docs: num_evaluations += len(doc.evaluations) scores.add(doc.evaluations['evaluate'].value) assert num_evaluations == 1 assert 10.0 in scores f = (Flow(protocol=protocol, inspect='COLLECT', port=exposed_port).add().inspect(uses=AddEvaluationExecutor, )) with f: response = Client(port=exposed_port, protocol=protocol, return_responses=True).index(inputs=docs) validate_func(response[0])
def test_remote_workspace_value(): """ This tests the value set in `self.workspace` in a remote Flow. It should always be `/workspace/ExecutorName/... """ HOST = __default_host__ client = JinaDClient(host=HOST, port=8000) workspace_id = client.workspaces.create( paths=[os.path.join(cur_dir, 'yamls')]) flow_id = client.flows.create(workspace_id=workspace_id, filename='flow_workspace_validate.yml') args = client.flows.get(flow_id)['arguments']['object']['arguments'] response = Client(host=HOST, port=args['port_expose'], protocol=args['protocol']).post(on='/', inputs=[Document()], show_progress=True, return_results=True) assert (response[0].data.docs[0].text.startswith( f'{__partial_workspace__}/WorkspaceValidator/0')) assert client.flows.delete(flow_id) assert client.workspaces.delete(workspace_id)
def test_target_executor_with_overlaped_name(mocker): class FailExecutor(Executor): @requests def fail(self, **kwargs): raise RuntimeError class PassExecutor(Executor): @requests def success(self, **kwargs): pass f = (Flow(port_expose=1234).add(uses=FailExecutor, name='foo_with_what_ever_suffix').add( uses=PassExecutor, name='foo')) with f: # both deployments are called, create no error mock = mocker.Mock() Client(port=1234).post(on='/foo', target_executor='foo', inputs=Document(), on_done=mock) mock.assert_called()
def test_reduce_status(): n_shards = 2 flow = Flow(port_expose=exposed_port).add(uses=ExecutorStatus, name='pod0', shards=n_shards, polling='all') with flow as f: da = DocumentArray([Document() for _ in range(5)]) resp = Client(port=exposed_port).post('/status', parameters={'foo': 'bar'}, inputs=da, return_results=True) assert resp[0].parameters['foo'] == 'bar' assert len(resp[0].parameters['__results__']) == n_shards for _, param in resp[0].parameters['__results__'].items(): assert 'shard_id' in param.keys() assert 'happy_status' in param.keys() for doc in resp[0].docs: assert doc.text == 'exec-status'
def test_upload_via_pymodule(replicas): from .mwu_encoder import MWUEncoder f = (Flow(port_expose=exposed_port).add().add( uses=MWUEncoder, uses_with={ 'greetings': 'hi' }, host=CLOUD_HOST, replicas=replicas, py_modules='mwu_encoder.py', upload_files=cur_dir, ).add()) with f: responses = Client(port=exposed_port).index( inputs=(Document(tensor=np.random.random([1, 100])) for _ in range(NUM_DOCS)), return_results=True, ) assert len(responses) > 0 assert len(responses[0].docs) > 0 for doc in responses[0].docs: assert doc.tags['greetings'] == 'hi'