def generate(which): from telethon_generator.parsers import parse_errors, parse_tl, find_layer from telethon_generator.generators import\ generate_errors, generate_tlobjects, generate_docs, clean_tlobjects with open(INVALID_BM_IN) as f: ib = set(json.load(f)) layer = find_layer(TLOBJECT_IN_TL) errors = list(parse_errors(ERRORS_IN_JSON, ERRORS_IN_DESC)) tlobjects = list(itertools.chain( parse_tl(TLOBJECT_IN_CORE_TL, layer=layer, invalid_bot_methods=ib), parse_tl(TLOBJECT_IN_TL, layer=layer, invalid_bot_methods=ib))) if not which: which.extend(('tl', 'errors')) clean = 'clean' in which action = 'Cleaning' if clean else 'Generating' if clean: which.remove('clean') if 'all' in which: which.remove('all') for x in ('tl', 'errors', 'docs'): if x not in which: which.append(x) if 'tl' in which: which.remove('tl') print(action, 'TLObjects...') if clean: clean_tlobjects(TLOBJECT_OUT) else: generate_tlobjects(tlobjects, layer, IMPORT_DEPTH, TLOBJECT_OUT) if 'errors' in which: which.remove('errors') print(action, 'RPCErrors...') if clean: if os.path.isfile(ERRORS_OUT): os.remove(ERRORS_OUT) else: with open(ERRORS_OUT, 'w', encoding='utf-8') as file: generate_errors(errors, file) if 'docs' in which: which.remove('docs') print(action, 'documentation...') if clean: if os.path.isdir(DOCS_OUT): shutil.rmtree(DOCS_OUT) else: generate_docs(tlobjects, errors, layer, DOCS_IN_RES, DOCS_OUT) if which: print('The following items were not understood:', which) print(' Consider using only "tl", "errors" and/or "docs".') print(' Using only "clean" will clean them. "all" to act on all.') print(' For instance "gen tl errors".')
def generate(which): from telethon_generator.parsers import parse_errors, parse_tl, find_layer from telethon_generator.generators import\ generate_errors, generate_tlobjects, generate_docs, clean_tlobjects tlobjects = list( itertools.chain(parse_tl(TLOBJECT_IN_CORE_TL), parse_tl(TLOBJECT_IN_TL))) errors = list(parse_errors(ERRORS_IN_JSON, ERRORS_IN_DESC)) layer = find_layer(TLOBJECT_IN_TL) if not which: which.extend(('tl', 'errors')) clean = 'clean' in which action = 'Cleaning' if clean else 'Generating' if clean: which.remove('clean') if 'all' in which: which.remove('all') for x in ('tl', 'errors', 'docs'): if x not in which: which.append(x) if 'tl' in which: which.remove('tl') print(action, 'TLObjects...') if clean: clean_tlobjects(TLOBJECT_OUT) else: generate_tlobjects(tlobjects, layer, IMPORT_DEPTH, TLOBJECT_OUT) if 'errors' in which: which.remove('errors') print(action, 'RPCErrors...') if clean: if os.path.isfile(ERRORS_OUT): os.remove(ERRORS_OUT) else: with open(ERRORS_OUT, 'w', encoding='utf-8') as file: generate_errors(errors, file) if 'docs' in which: which.remove('docs') print(action, 'documentation...') if clean: if os.path.isdir(DOCS_OUT): shutil.rmtree(DOCS_OUT) else: generate_docs(tlobjects, errors, layer, DOCS_IN_RES, DOCS_OUT) if which: print('The following items were not understood:', which) print(' Consider using only "tl", "errors" and/or "docs".') print(' Using only "clean" will clean them. "all" to act on all.') print(' For instance "gen tl errors".')
def generate(which, action='gen'): from telethon_generator.parsers import\ parse_errors, parse_methods, parse_tl, find_layer from telethon_generator.generators import\ generate_errors, generate_tlobjects, generate_docs, clean_tlobjects layer = next(filter(None, map(find_layer, TLOBJECT_IN_TLS))) errors = list(parse_errors(ERRORS_IN)) methods = list( parse_methods(METHODS_IN, FRIENDLY_IN, {e.str_code: e for e in errors})) tlobjects = list( itertools.chain(*(parse_tl(file, layer, methods) for file in TLOBJECT_IN_TLS))) if not which: which.extend(('tl', 'errors')) clean = action == 'clean' action = 'Cleaning' if clean else 'Generating' if 'all' in which: which.remove('all') for x in ('tl', 'errors', 'docs'): if x not in which: which.append(x) if 'tl' in which: which.remove('tl') print(action, 'TLObjects...') if clean: clean_tlobjects(TLOBJECT_OUT) else: generate_tlobjects(tlobjects, layer, IMPORT_DEPTH, TLOBJECT_OUT) if 'errors' in which: which.remove('errors') print(action, 'RPCErrors...') if clean: if ERRORS_OUT.is_file(): ERRORS_OUT.unlink() else: with ERRORS_OUT.open('w') as file: generate_errors(errors, file) if 'docs' in which: which.remove('docs') print(action, 'documentation...') if clean: if DOCS_OUT.is_dir(): shutil.rmtree(str(DOCS_OUT)) else: in_path = DOCS_IN_RES.resolve() with TempWorkDir(DOCS_OUT): generate_docs(tlobjects, methods, layer, in_path) if 'json' in which: which.remove('json') print(action, 'JSON schema...') json_files = [x.with_suffix('.json') for x in TLOBJECT_IN_TLS] if clean: for file in json_files: if file.is_file(): file.unlink() else: def gen_json(fin, fout): meths = [] constructors = [] for tl in parse_tl(fin, layer): if tl.is_function: meths.append(tl.to_dict()) else: constructors.append(tl.to_dict()) what = {'constructors': constructors, 'methods': meths} with open(fout, 'w') as f: json.dump(what, f, indent=2) for fs in zip(TLOBJECT_IN_TLS, json_files): gen_json(*fs) if which: print( 'The following items were not understood:', which, '\n Consider using only "tl", "errors" and/or "docs".' '\n Using only "clean" will clean them. "all" to act on all.' '\n For instance "gen tl errors".')
def generate(which): from telethon_generator.parsers import parse_errors, parse_tl, find_layer from telethon_generator.generators import\ generate_errors, generate_tlobjects, generate_docs, clean_tlobjects with open(INVALID_BM_IN) as f: invalid_bot_methods = set(json.load(f)) layer = find_layer(TLOBJECT_IN_TL) errors = list(parse_errors(ERRORS_IN_JSON, ERRORS_IN_DESC)) tlobjects = list(itertools.chain( parse_tl(TLOBJECT_IN_CORE_TL, layer, invalid_bot_methods), parse_tl(TLOBJECT_IN_TL, layer, invalid_bot_methods))) if not which: which.extend(('tl', 'errors')) clean = 'clean' in which action = 'Cleaning' if clean else 'Generating' if clean: which.remove('clean') if 'all' in which: which.remove('all') for x in ('tl', 'errors', 'docs'): if x not in which: which.append(x) if 'tl' in which: which.remove('tl') print(action, 'TLObjects...') if clean: clean_tlobjects(TLOBJECT_OUT) else: generate_tlobjects(tlobjects, layer, IMPORT_DEPTH, TLOBJECT_OUT) if 'errors' in which: which.remove('errors') print(action, 'RPCErrors...') if clean: if os.path.isfile(ERRORS_OUT): os.remove(ERRORS_OUT) else: with open(ERRORS_OUT, 'w', encoding='utf-8') as file: generate_errors(errors, file) if 'docs' in which: which.remove('docs') print(action, 'documentation...') if clean: if os.path.isdir(DOCS_OUT): shutil.rmtree(DOCS_OUT) else: generate_docs(tlobjects, errors, layer, DOCS_IN_RES, DOCS_OUT) if 'json' in which: which.remove('json') print(action, 'JSON schema...') mtproto = 'mtproto_api.json' telegram = 'telegram_api.json' if clean: for x in (mtproto, telegram): if os.path.isfile(x): os.remove(x) else: def gen_json(fin, fout): methods = [] constructors = [] for tl in parse_tl(fin, layer): if tl.is_function: methods.append(tl.to_dict()) else: constructors.append(tl.to_dict()) what = {'constructors': constructors, 'methods': methods} with open(fout, 'w') as f: json.dump(what, f, indent=2) gen_json(TLOBJECT_IN_CORE_TL, mtproto) gen_json(TLOBJECT_IN_TL, telegram) if which: print('The following items were not understood:', which) print(' Consider using only "tl", "errors" and/or "docs".') print(' Using only "clean" will clean them. "all" to act on all.') print(' For instance "gen tl errors".')
from telethon_generator.parsers import parse_errors, parse_tl, find_layer from telethon_generator.generators import\ generate_errors, generate_tlobjects, generate_docs import itertools ERRORS_INPUT_JSON = 'data/errors.json' ERRORS_INPUT_DESC = 'data/error_descriptions' ERRORS_OUTPUT = '../telethon/errors/rpc_error_list.py' TLOBJECT_INPUT_CORE_TL = 'data/mtproto_api.tl' TLOBJECT_INPUT_TL = 'data/telegram_api.tl' TLOBJECT_OUTPUT = '../telethon/tl' DOCS_INPUT_RES = 'data/html' DOCS_OUTPUT = '../docs' if __name__ == '__main__': tlobjects = list( itertools.chain(parse_tl(TLOBJECT_INPUT_CORE_TL), parse_tl(TLOBJECT_INPUT_TL))) errors = list(parse_errors(ERRORS_INPUT_JSON, ERRORS_INPUT_DESC)) layer = find_layer(TLOBJECT_INPUT_TL) generate_tlobjects(tlobjects, layer, TLOBJECT_OUTPUT) with open(ERRORS_OUTPUT, 'w', encoding='utf-8') as file: generate_errors(errors, file) generate_docs(tlobjects, errors, layer, DOCS_INPUT_RES, DOCS_OUTPUT)
def generate(which, action='gen'): from telethon_generator.parsers import\ parse_errors, parse_methods, parse_tl, find_layer from telethon_generator.generators import\ generate_errors, generate_tlobjects, generate_docs, clean_tlobjects layer = next(filter(None, map(find_layer, TLOBJECT_IN_TLS))) errors = list(parse_errors(ERRORS_IN)) methods = list(parse_methods(METHODS_IN, {e.str_code: e for e in errors})) tlobjects = list(itertools.chain(*( parse_tl(file, layer, methods) for file in TLOBJECT_IN_TLS))) if not which: which.extend(('tl', 'errors')) clean = action == 'clean' action = 'Cleaning' if clean else 'Generating' if 'all' in which: which.remove('all') for x in ('tl', 'errors', 'docs'): if x not in which: which.append(x) if 'tl' in which: which.remove('tl') print(action, 'TLObjects...') if clean: clean_tlobjects(TLOBJECT_OUT) else: generate_tlobjects(tlobjects, layer, IMPORT_DEPTH, TLOBJECT_OUT) if 'errors' in which: which.remove('errors') print(action, 'RPCErrors...') if clean: if ERRORS_OUT.is_file(): ERRORS_OUT.unlink() else: with ERRORS_OUT.open('w') as file: generate_errors(errors, file) if 'docs' in which: which.remove('docs') print(action, 'documentation...') if clean: if DOCS_OUT.is_dir(): shutil.rmtree(str(DOCS_OUT)) else: generate_docs(tlobjects, methods, layer, DOCS_IN_RES, DOCS_OUT) if 'json' in which: which.remove('json') print(action, 'JSON schema...') json_files = [x.with_suffix('.json') for x in TLOBJECT_IN_TLS] if clean: for file in json_files: if file.is_file(): file.unlink() else: def gen_json(fin, fout): meths = [] constructors = [] for tl in parse_tl(fin, layer): if tl.is_function: meths.append(tl.to_dict()) else: constructors.append(tl.to_dict()) what = {'constructors': constructors, 'methods': meths} with open(fout, 'w') as f: json.dump(what, f, indent=2) for fs in zip(TLOBJECT_IN_TLS, json_files): gen_json(*fs) if which: print('The following items were not understood:', which) print(' Consider using only "tl", "errors" and/or "docs".') print(' Using only "clean" will clean them. "all" to act on all.') print(' For instance "gen tl errors".')