def get_grammar() -> ArithmeticGrammar: grammar = _raw_grammar() if get_config_value('resume_notices'): resume_notices_on(grammar) elif get_config_value('history_tracking'): set_tracer(grammar, trace_history) return grammar
def get_grammar() -> json_fail_tolerantGrammar: grammar = _raw_grammar() if get_config_value('resume_notices'): resume_notices_on(grammar) elif get_config_value('history_tracking'): set_tracer(grammar, trace_history) return grammar
def get_grammar() -> LyrikGrammar: """Returns a thread/process-exclusive LyrikGrammar-singleton.""" THREAD_LOCALS = access_thread_locals() try: grammar = THREAD_LOCALS.Lyrik_00000001_grammar_singleton except AttributeError: THREAD_LOCALS.Lyrik_00000001_grammar_singleton = LyrikGrammar() if hasattr(get_grammar, 'python_src__'): THREAD_LOCALS.Lyrik_00000001_grammar_singleton.python_src__ = get_grammar.python_src__ grammar = THREAD_LOCALS.Lyrik_00000001_grammar_singleton if get_config_value('resume_notices'): resume_notices_on(grammar) elif get_config_value('history_tracking'): set_tracer(grammar, trace_history) return grammar
def batch_process(file_names: List[str], out_dir: str, *, submit_func: Callable = None, log_func: Callable = None) -> List[str]: """Compiles all files listed in filenames and writes the results and/or error messages to the directory `our_dir`. Returns a list of error messages files. """ def gen_dest_name(name): return os.path.join(out_dir, os.path.splitext(os.path.basename(name))[0] \ + RESULT_FILE_EXTENSION) error_list = [] if get_config_value('batch_processing_parallelization'): def run_batch(submit_func: Callable): nonlocal error_list err_futures = [] for name in file_names: dest_name = gen_dest_name(name) err_futures.append(submit_func(process_file, name, dest_name)) for file_name, err_future in zip(file_names, err_futures): error_filename = err_future.result() if log_func: log_func('Compiling "%s"' % file_name) if error_filename: error_list.append(error_filename) if True or submit_func is None: import concurrent.futures import multiprocessing with concurrent.futures.ProcessPoolExecutor( multiprocessing.cpu_count()) as pool: run_batch(pool.submit) else: run_batch(submit_func) else: for name in filenames: if log_func: log_func(name, gen_dest_name(name)) error_filename = process_file(name, gen_dest_name(name), log_func) if error_filename: error_list.append(error_filename) return error_list