def test_exception_invalid_yaml(): cur_dir = os.path.dirname(os.path.abspath(__file__)) yaml = os.path.join(cur_dir, 'invalid.yml') with pytest.raises(BadConfigSource): BaseExecutor.load_config(yaml) with pytest.raises(BadConfigSource): Flow.load_config(yaml)
def test_runtime_args(): b = BaseExecutor.load_config('BaseExecutor', metas={'name': 'b123'}, runtime_args={'hello': 'world'}) assert b.runtime_args.hello == 'world' assert b.metas.name == 'b123'
def test_import_with_new_module_structure_should_pass(): """ This is a valid and **RECOMMENDED** structure: - python code for the executor organized in a package structure inside the ``executor/`` folder - core logic in ``executor/my_executor.py`` - the ``executor/__init__.py`` contains ``from .my_executor import GoodCrafterNew``, which makes sure the custom executor class gets registered - all imports are relative - so in ``executor/my_executor.py`` the ``helper`` module is imported as ``from .helper import foo`` File structure: my_cust_module/ |- executor/ |- __init__.py |- my_executor.py |- helper.py |- config.yml |- py_modules |- executor/__init__.py """ b = BaseExecutor.load_config('good_new/crafter.yml') assert b.__class__.__name__ == 'GoodCrafterNew'
def test_import_with_old_module_structure_should_pass(): """ This is a valid structure, but not recommended: - "my_cust_module" is a python module - all core logic of your customized executor goes to ``__init__.py`` - to import ``foo.py``, you should use relative import, e.g. ``from .foo import bar`` This is not a recommended structure because: - putting core logic inside ``__init__.py`` is not how python packages are usually written - Importing from the workspace disables you from trying out the executor in the console, or test files at the root of the workspace, making development more cumbersome - the main directory is now cluttered with python files - extracting all python files to a separate directory is how python packages are usually composed File structure: my_cust_module |- __init__.py |- helper.py |- config.yml |- py_modules |- __init__.py """ b = BaseExecutor.load_config('good_old/crafter.yml') assert b.__class__.__name__ == 'GoodCrafterOld'
def test_load_from_dict(): # !BaseEncoder # metas: # name: ${{BE_TEST_NAME}} # batch_size: ${{BATCH_SIZE}} # pod_id: ${{pod_id}} # workspace: ${{this.name}}-${{this.batch_size}} d1 = { 'jtype': __default_executor__, 'metas': { 'name': '${{ BE_TEST_NAME }}', 'workspace': '${{this.name}}', }, } # !CompoundExecutor # components: # - !BinaryPbIndexer # with: # index_filename: tmp1 # metas: # name: test1 # - !BinaryPbIndexer # with: # index_filename: tmp2 # metas: # name: test2 # metas: # name: compound1 d = {'BE_TEST_NAME': 'hello123'} b1 = BaseExecutor.load_config(d1, context=d) assert isinstance(b1, BaseExecutor) assert b1.metas.name == 'hello123'
def test_default_args_from_load_config(): b = BaseExecutor.load_config('!BaseExecutor {}') assert isinstance(b.runtime_args, SimpleNamespace) assert isinstance(b.metas, SimpleNamespace) # name is always auto-assigned assert b.metas.name
def test_default_args_from_python(): b = BaseExecutor() assert isinstance(b.runtime_args, SimpleNamespace) assert isinstance(b.metas, SimpleNamespace) # name is always auto-assigned assert b.metas.name
def _load_executor(self): """Load the executor to this runtime, specified by ``uses`` CLI argument.""" try: self._executor: BaseExecutor = BaseExecutor.load_config( self.args.uses, uses_with=self.args.uses_with, uses_metas=self.args.uses_metas, uses_requests=self.args.uses_requests, runtime_args={ 'workspace': self.args.workspace, 'shard_id': self.args.shard_id, 'shards': self.args.shards, 'replicas': self.args.replicas, 'name': self.args.name, 'py_modules': self.args.py_modules, }, extra_search_paths=self.args.extra_search_paths, ) except BadConfigSource as ex: self.logger.error( f'fail to load config from {self.args.uses}, if you are using docker image for --uses, ' f'please use "docker://YOUR_IMAGE_NAME"') raise ExecutorFailToLoad from ex except FileNotFoundError as ex: self.logger.error(f'fail to load file dependency') raise ExecutorFailToLoad from ex except Exception as ex: self.logger.critical( f'can not load the executor from {self.args.uses}') raise ExecutorFailToLoad from ex
def test_runtime_args_not_serialisable(): param = NotSerialisable() b = BaseExecutor.load_config( 'BaseExecutor', runtime_args={'hello': 'world', 'not_seri': param}, ) assert b.runtime_args.hello == 'world' assert b.runtime_args.not_seri is param
def test_jtype(tmpdir): flow_path = os.path.join(tmpdir, 'flow.yml') exec_path = os.path.join(tmpdir, 'exec.yml') f = Flow() f.save_config(flow_path) with open(flow_path, 'r') as file: conf = yaml.safe_load(file) assert 'jtype' in conf assert conf['jtype'] == 'Flow' e = BaseExecutor() e.save_config(exec_path) with open(exec_path, 'r') as file: conf = yaml.safe_load(file) assert 'jtype' in conf assert conf['jtype'] == 'BaseExecutor' assert type(BaseExecutor.load_config(exec_path)) == BaseExecutor assert type(Flow.load_config(flow_path)) == Flow
def test_runtime_args_from_load_config(): y = ''' !BaseExecutor metas: name: my-mwu-encoder workspace: ./ ''' b = BaseExecutor.load_config(y) assert b.metas.workspace == './' assert b.metas.name == 'my-mwu-encoder'
def test_load_dataclass_executor(): executor_yaml = ''' jtype: MyDataClassExecutor with: my_field: this is my field metas: name: test-name-updated workspace: test-work-space-updated requests: /foo: baz ''' exec = BaseExecutor.load_config(executor_yaml) assert exec.my_field == 'this is my field' assert exec.requests['/foo'] == MyDataClassExecutor.baz assert exec.metas.name == 'test-name-updated' assert exec.metas.workspace == 'test-work-space-updated'
def _load_executor(self, metrics_registry: Optional['CollectorRegistry'] = None): """ Load the executor to this runtime, specified by ``uses`` CLI argument. :param metrics_registry: Optional prometheus metrics registry that will be passed to the executor so that it can expose metrics """ try: self._executor: BaseExecutor = BaseExecutor.load_config( self.args.uses, uses_with=self.args.uses_with, uses_metas=self.args.uses_metas, uses_requests=self.args.uses_requests, runtime_args={ # these are not parsed to the yaml config file but are pass directly during init 'workspace': self.args.workspace, 'shard_id': self.args.shard_id, 'shards': self.args.shards, 'replicas': self.args.replicas, 'name': self.args.name, 'metrics_registry': metrics_registry, }, py_modules=self.args.py_modules, extra_search_paths=self.args.extra_search_paths, ) self.logger.debug(f'{self._executor} is successfully loaded!') except BadConfigSource: self.logger.error( f'fail to load config from {self.args.uses}, if you are using docker image for --uses, ' f'please use `docker://YOUR_IMAGE_NAME`' ) raise except FileNotFoundError: self.logger.error(f'fail to load file dependency') raise except Exception: self.logger.critical(f'can not load the executor from {self.args.uses}') raise
def test_encoder_name_env_replace(): os.environ['BE_TEST_NAME'] = 'hello123' with BaseExecutor.load_config('yaml/test-encoder-env.yml') as be: assert be.metas.name == 'hello123'
def test_exec_from_python(): be = BaseExecutor(metas={'name': 'hello', 'random_name': 'random_value'}) assert be.metas.name == 'hello' assert be.metas.random_name == 'random_value'
def test_encoder_name_dict_replace(): d = {'BE_TEST_NAME': 'hello123'} with BaseExecutor.load_config('yaml/test-encoder-env.yml', context=d) as be: assert be.metas.name == 'hello123' assert be.metas.workspace == 'hello123'
def test_encoder_inject_config_via_kwargs(): with BaseExecutor.load_config('yaml/test-encoder-env.yml', metas={'shard_id': 345}) as be: assert be.metas.shard_id == 345
def test_import_casual_structure_should_fail(): # this structure is a copy-paste from # https://github.com/jina-ai/jina/issues/1546#issuecomment-751481422 with pytest.raises(ImportError): BaseExecutor.load_config('bad1/crafter.yml')
def test_load_external_success(): with BaseExecutor.load_config('yaml/dummy_ext_exec_success.yml') as e: assert e.__class__.__name__ == 'DummyExternalIndexer'
def test_use_from_local_dir_exe_level(): with BaseExecutor.load_config('dummyhub/config.yml'): pass
def test_load_external_fail(): with pytest.raises(yaml.constructor.ConstructorError): BaseExecutor.load_config('yaml/dummy_ext_exec.yml')