def from_hub( cls: Type[T], uri: str, context: Optional[Dict[str, Any]] = None, uses_with: Optional[Dict] = None, uses_metas: Optional[Dict] = None, uses_requests: Optional[Dict] = None, **kwargs, ) -> T: """Construct an Executor from Hub. :param uri: a hub Executor scheme starts with `jinahub://` :param context: context replacement variables in a dict, the value of the dict is the replacement. :param uses_with: dictionary of parameters to overwrite from the default config's with field :param uses_metas: dictionary of parameters to overwrite from the default config's metas field :param uses_requests: dictionary of parameters to overwrite from the default config's requests field :param kwargs: other kwargs accepted by the CLI ``jina hub pull`` :return: the Hub Executor object. .. highlight:: python .. code-block:: python from jina import Executor from docarray import Document, DocumentArray executor = Executor.from_hub( uri='jinahub://CLIPImageEncoder', install_requirements=True ) """ from jina.hubble.helper import is_valid_huburi _source = None if is_valid_huburi(uri): from jina.hubble.hubio import HubIO from jina.parsers.hubble import set_hub_pull_parser _args = ArgNamespace.kwargs2namespace( { 'no_usage': True, **kwargs }, set_hub_pull_parser(), positional_args=(uri, ), ) _source = HubIO(args=_args).pull() if not _source or _source.startswith('docker://'): raise ValueError( f'Can not construct a native Executor from {uri}. Looks like you want to use it as a ' f'Docker container, you may want to use it in the Flow via `.add(uses={uri})` instead.' ) return cls.load_config( _source, context=context, uses_with=uses_with, uses_metas=uses_metas, uses_requests=uses_requests, )
def test_peastore_add(partial_pea_store): partial_store_item = partial_pea_store.add( args=ArgNamespace.kwargs2namespace(PeaModel().dict(), set_pea_parser())) assert partial_store_item assert partial_pea_store.object assert partial_store_item.arguments['runtime_cls'] == 'ZEDRuntime' assert partial_store_item.arguments['host_in'] == __default_host__ assert partial_store_item.arguments['host_out'] == __default_host__
async def _create(pea: 'PeaModel'): """ .. #noqa: DAR101 .. #noqa: DAR201""" try: args = ArgNamespace.kwargs2namespace(pea.dict(), set_pea_parser()) return store.add(args) except Exception as ex: raise PartialDaemon400Exception from ex
async def _create(pod: 'PodModel', envs: Optional[Dict] = {}): """ .. #noqa: DAR101 .. #noqa: DAR201""" try: args = ArgNamespace.kwargs2namespace(pod.dict(), set_pod_parser()) return store.add(args, envs) except Exception as ex: raise PartialDaemon400Exception from ex
async def _create(flow: 'FlowModel', ports: Optional[PortMappings] = None): """ .. #noqa: DAR101 .. #noqa: DAR201""" try: args = ArgNamespace.kwargs2namespace(flow.dict(), set_flow_parser()) return store.add(args, ports) except Exception as ex: raise PartialDaemon400Exception from ex
def test_flowstore_add(monkeypatch, partial_flow_store): flow_model = FlowModel() flow_model.uses = f'{cur_dir}/flow.yml' args = ArgNamespace.kwargs2namespace(flow_model.dict(), set_flow_parser()) partial_store_item = partial_flow_store.add(args) assert partial_store_item assert isinstance(partial_flow_store.object, Flow) assert 'executor1' in partial_store_item.yaml_source assert partial_flow_store.object.port_expose == 12345
async def test_concurrent_requests(): args = ArgNamespace.kwargs2namespace({}, set_gateway_parser()) mock_zmqlet = ZmqletMock() streamer = ZmqGatewayStreamer(args, mock_zmqlet) request = _generate_request() response = streamer.stream(iter([request])) async for r in response: assert r.proto == request
def test_podstore_add(partial_pod_store): partial_store_item = partial_pod_store.add( args=ArgNamespace.kwargs2namespace(PodModel().dict(), set_pod_parser()), envs={'key1': 'val1'}, ) assert partial_store_item assert partial_pod_store.object assert partial_store_item.arguments['runtime_cls'] == 'WorkerRuntime' assert partial_pod_store.object.env['key1'] == 'val1' assert partial_store_item.arguments['host_in'] == __default_host__ assert partial_store_item.arguments['host'] == __default_host__
def test_flowstore_scale(partial_flow_store, mocker): flow_model = FlowModel() flow_model.uses = f'{cur_dir}/flow.yml' args = ArgNamespace.kwargs2namespace(flow_model.dict(), set_flow_parser()) partial_flow_store.add(args) scale_mock = mocker.Mock() partial_flow_store.object.scale = scale_mock partial_flow_store.scale(pod_name='executor1', replicas=2) scale_mock.assert_called()
def test_flowstore_rolling_update(partial_flow_store, mocker): flow_model = FlowModel() flow_model.uses = f'{cur_dir}/flow.yml' args = ArgNamespace.kwargs2namespace(flow_model.dict(), set_flow_parser()) partial_flow_store.add(args) rolling_update_mock = mocker.Mock() partial_flow_store.object.rolling_update = rolling_update_mock partial_flow_store.rolling_update(pod_name='executor1', uses_with={}) rolling_update_mock.assert_called()
async def test_concurrent_requests(): args = ArgNamespace.kwargs2namespace({}, set_gateway_parser()) mock_zmqlet = ZmqletMock() servicer = PrefetchCaller(args, mock_zmqlet) request = _generate_request() response = servicer.send(iter([request])) async for r in response: assert r.proto == request await servicer.close()
async def test_flowstore_scale(partial_flow_store, mocker): flow_model = FlowModel() flow_model.uses = f'{cur_dir}/flow.yml' args = ArgNamespace.kwargs2namespace(flow_model.dict(), set_flow_parser()) partial_flow_store.add(args) future = asyncio.Future() future.set_result(PartialStoreItem()) mocker.patch('daemon.stores.partial.PartialFlowStore.scale', return_value=future) resp = await partial_flow_store.scale(deployment_name='executor1', replicas=2) assert resp
def __init__(self, args: Optional[argparse.Namespace] = None, **kwargs): if args and isinstance(args, argparse.Namespace): self.args = args else: self.args = ArgNamespace.kwargs2namespace(kwargs, set_hub_parser()) self.logger = JinaLogger(self.__class__.__name__, **vars(args)) with ImportExtensions(required=True): import rich import cryptography import filelock assert rich #: prevent pycharm auto remove the above line assert cryptography assert filelock
def test_flowstore_update(partial_flow_store, mocker): flow_model = FlowModel() flow_model.uses = f'{cur_dir}/flow.yml' port_expose = helper.random_port() args = ArgNamespace.kwargs2namespace(flow_model.dict(), set_flow_parser()) partial_flow_store.add(args, port_expose) update_mock = mocker.Mock() partial_flow_store.object.rolling_update = update_mock partial_flow_store.update(kind=UpdateOperation.ROLLING_UPDATE, dump_path='', pod_name='pod1', shards=1) update_mock.assert_called()
def __init__( self, args: Optional['argparse.Namespace'] = None, **kwargs, ): if args and isinstance(args, argparse.Namespace): self.args = args else: self.args = ArgNamespace.kwargs2namespace( kwargs, set_client_cli_parser(), warn_unknown=True ) self.logger = JinaLogger(self.__class__.__name__, **vars(self.args)) if not self.args.proxy and os.name != 'nt': # (Han 2020 12.12): gRPC channel is over HTTP2 and it does not work when we have proxy # as many enterprise users are behind proxy, a quick way to # surpass it is by temporally unset proxy. Please do NOT panic as it will NOT # affect users os-level envs. os.unsetenv('http_proxy') os.unsetenv('https_proxy') self._inputs = None
async def _create(pod: 'PodModel'): try: args = ArgNamespace.kwargs2namespace(pod.dict(), set_pod_parser()) return store.add(args) except Exception as ex: raise Runtime400Exception from ex