def _build_lookup_table(): all_keywords = {} import copy def build_invert_index(d, usage='jina'): for k in d['methods']: usg = f'{usage} {k["name"]}' if 'methods' in k: build_invert_index(k, usage=usg) if k['name'] not in all_keywords: all_keywords[k['name']] = [] _k = { 'name': k['name'], 'type': 'command', 'usage': usg, 'help': k['help'] } all_keywords[k['name']].append(_k) if 'options' in k: for kk in k['options']: if kk['name'] not in all_keywords: all_keywords[kk['name']] = [] _kk = copy.deepcopy(kk) _kk['usage'] = usg all_keywords[kk['name']].append(_kk) def build_noisy_index(d): noise2key = {} for k, z in d.items(): for v in z: noises = [k] noises.append(v.get('name', [])) noises.extend(v.get('option_strings', [])) dash_to_space = [ k.replace('-', ' ').replace('_', ' ') for k in noises ] no_dash = [k.replace('-', '').replace('_', '') for k in noises] no_leading_dash = [k.replace('--', '') for k in noises] noises.extend(dash_to_space) noises.extend(no_dash) noises.extend(no_leading_dash) no_ending_plural = [ k[:-1] if k.endswith('s') else k for k in noises ] noises.extend(no_ending_plural) for kk in set(noises): noise2key[kk] = k return noise2key build_invert_index(api_to_dict(show_all_args=True)) nkw2kw = build_noisy_index(all_keywords) return nkw2kw, all_keywords
def export_api(args: 'Namespace'): """ Export the API :param args: arguments coming from the CLI. """ import json from cli.export import api_to_dict from jina import __version__ from jina.jaml import JAML from jina.logging.predefined import default_logger from jina.schemas import get_full_schema if args.yaml_path: dump_api = api_to_dict() for yp in args.yaml_path: f_name = (yp % __version__) if '%s' in yp else yp with open(f_name, 'w', encoding='utf8') as fp: JAML.dump(dump_api, fp) default_logger.info(f'API is exported to {f_name}') if args.json_path: dump_api = api_to_dict() for jp in args.json_path: f_name = (jp % __version__) if '%s' in jp else jp with open(f_name, 'w', encoding='utf8') as fp: json.dump(dump_api, fp, sort_keys=True) default_logger.info(f'API is exported to {f_name}') if args.schema_path: dump_api = get_full_schema() for jp in args.schema_path: f_name = (jp % __version__) if '%s' in jp else jp with open(f_name, 'w', encoding='utf8') as fp: json.dump(dump_api, fp, sort_keys=True) default_logger.info(f'API is exported to {f_name}')
def fill_overload( cli_entrypoint, doc_str_title, doc_str_return, return_type, filepath, overload_fn, class_method, indent=' ' * 4, regex_tag=None, ): a = _cli_to_schema(api_to_dict(), cli_entrypoint) if class_method: cli_args = [ f'{indent}{indent}{k[0]}: Optional[{k[1]["type"]}] = {k[1]["default_literal"]}' for k in a ] args_str = ', \n'.join(cli_args + [f'{indent}{indent}**kwargs']) signature_str = f'def {overload_fn}(\n{indent}{indent}self,*,\n{args_str})' if return_type: signature_str += f' -> {return_type}:' return_str = f'\n{indent}{indent}:return: {doc_str_return}' else: signature_str += ':' return_str = '' else: cli_args = [ f'{indent}{k[0]}: Optional[{k[1]["type"]}] = {k[1]["default_literal"]}' for k in a ] args_str = ', \n'.join(cli_args + [f'{indent}**kwargs']) signature_str = f'def {overload_fn}(*, \n{args_str})' if return_type: signature_str += f' -> {return_type}:' return_str = f'\n{indent}:return: {doc_str_return}' else: signature_str += ':' return_str = '' if class_method: doc_str = '\n'.join( f'{indent}{indent}:param {k[0]}: {k[1]["description"]}' for k in a ) noqa_str = '\n'.join( f'{indent}{indent}.. # noqa: DAR{j}' for j in ['202', '101', '003'] ) else: doc_str = '\n'.join(f'{indent}:param {k[0]}: {k[1]["description"]}' for k in a) noqa_str = '\n'.join( f'{indent}.. # noqa: DAR{j}' for j in ['202', '101', '003'] ) if class_method: final_str = f'@overload\n{indent}{signature_str}\n{indent}{indent}"""{doc_str_title}\n\n{doc_str}{return_str}\n\n{noqa_str}\n{indent}{indent}"""' final_code = re.sub( rf'(# overload_inject_start_{regex_tag or cli_entrypoint}).*(# overload_inject_end_{regex_tag or cli_entrypoint})', f'\\1\n{indent}{final_str}\n{indent}\\2', open(filepath).read(), 0, re.DOTALL, ) else: final_str = f'@overload\n{signature_str}\n{indent}"""{doc_str_title}\n\n{doc_str}{return_str}\n\n{noqa_str}\n{indent}"""' final_code = re.sub( rf'(# overload_inject_start_{regex_tag or cli_entrypoint}).*(# overload_inject_end_{regex_tag or cli_entrypoint})', f'\\1\n{final_str}\n{indent}\\2', open(filepath).read(), 0, re.DOTALL, ) with open(filepath, 'w') as fp: fp.write(final_code)
def test_export_api(tmpdir): with open(tmpdir / 'test.yml', 'w', encoding='utf8') as fp: JAML.dump(api_to_dict(), fp) with open(tmpdir / 'test.json', 'w', encoding='utf8') as fp: json.dump(api_to_dict(), fp)
from cli.export import api_to_dict from jina.schemas.helper import _cli_to_schema _schema_flow_with = _cli_to_schema( api_to_dict(), 'flow', allow_addition=False, description='The config of Flow, unrecognized config arguments will be applied to all Pods')['Jina::Flow'] schema_flow = { 'Jina::Flow': { 'properties': { 'with': _schema_flow_with, 'jtype': { 'description': 'The type of Jina object (Flow, Executor, Driver).\n' 'A Flow is made up of several sub-tasks, and it manages the states and context of these sub-tasks.\n' 'The input and output data of Flows are Documents.', 'type': 'string', 'default': 'Flow', 'enum': ['Flow', 'AsyncFlow'] }, 'version': { 'description': 'The YAML version of this Flow.', 'type': 'string', 'default': '\'1\'', }, 'pods': { 'description': 'Define the steps in the Flow.\n' 'A Pod is a container and interface for one or multiple Peas that have the same properties.', 'type': 'array', 'items': {