def make_cmp(p, start, end): p, op, value = p[:-2], p[-2], p[-1] query_name, query, pdict, _ = check_function(p, 'query', start, end) num_values = _get_query_num_values(query, start, end) if isinstance(value.value, str): if query.rv.type.startswith('enum['): enum_values_list = query.rv.type[5:-1].split(',') value.value = enum_values_list.index(value.value) if value.value == -1: emit_error(f'Enum "{value.value}" is not a valid return value of "{query.name}"', start, end) raise LogError() else: emit_error(f'Query "{query.name}" does not return an enum', start, end) raise LogError() if op == '==' or op == '!=': matched = {value.value} if 0 <= value.value < num_values else set() unmatched = set(i for i in range(num_values) if i != value.value) elif op == '<' or op == '>=': matched = set(range(min(num_values, value.value))) unmatched = set(range(value.value, num_values)) else: matched = set(range(min(num_values, value.value + 1))) unmatched = set(range(value.value + 1, num_values)) if op in ('!=', '>=', '>'): matched, unmatched = unmatched, matched if not matched or not unmatched: emit_warning(f'always true or always false check', start, end) return ((query, pdict), [(matched, True), (unmatched, False)])
def make_in(p, start, end): p, values = p[:-1], p[-1] query_name, query, pdict, _ = check_function(p, 'query', start, end) num_values = _get_query_num_values(query, start, end) matched = set() unmatched = set(range(num_values)) enum_values = {} if query.rv.type.startswith('enum['): enum_values_list = query.rv.type[5:-1].split(',') enum_values = {v.strip(): i for i, v in enumerate(enum_values_list)} for value in values: if isinstance(value.value, str): if not enum_values: emit_error(f'Query "{query.name}" does not return an enum', start, end) raise LogError() if value.value not in enum_values: emit_error(f'Enum "{value.value}" is not a valid return value of "{query.name}"', start, end) raise LogError() value.value = enum_values[value.value] if 0 > value.value or num_values <= value.value: emit_warning('{value.value} never returned by {query_name}, ignored', start, end) continue matched.add(value.value) unmatched.remove(value.value) if not matched or not unmatched: emit_warning(f'always true or always false check', start, end) return ((query, pdict), [(matched, True), (unmatched, False)])
def process_file(filename, output_dir, output_name, actor_gen, optimizer_flags, **kwargs): init_logger(filename) if_ = Path(filename) of = output_dir / output_name if output_name else output_dir / if_.with_suffix( '.bfevfl').name name = if_.with_suffix('').name if not if_.exists(): emit_error('file not found, skipping') raise LogError() with if_.open('rt') as f: evfl = f.read() setup_logger(evfl) tokens = tokenize(evfl) roots, actors = parse(tokens, actor_gen, exported_tco=optimizer_flags['exported_tco'], **kwargs) if optimizer_flags['merge_duplicate']: optimize_merge_identical(roots) if optimizer_flags['short_event_names']: optimize_names(roots, make_compact_renamer) else: optimize_names(roots, make_counter_renamer) nodes: Set[Node] = set() entrypoints = set(r.name for r in roots) for root in roots: for node in find_postorder(root): if node in nodes: continue if isinstance(node, ActionNode): node.action.mark_used() elif isinstance(node, SwitchNode): node.query.mark_used() elif isinstance(node, SubflowNode): if node.ns == '': if node.called_root_name not in entrypoints: emit_error( f'subflow call for {node.called_root_name} but matching flow/entrypoint not found' ) raise LogError() nodes.add(node) bfevfl = File(name, actors, list(nodes)) with of.open('wb') as f: f.write(bfevfl.prepare_bitstream().bytes)
def make_switch(n, start, end): p, branches = n[:-1], n[-1] cases = branches[0] + branches[2] default = branches[1] query_name, query, pdict, _ = check_function(p, 'query', start, end) sw = SwitchNode(f'Event{next_id()}', query, pdict) entrypoints = [] enum_values = {} if query.rv.type.startswith('enum['): enum_values_list = query.rv.type[5:-1].split(',') enum_values = {v.strip(): i for i, v in enumerate(enum_values_list)} for values, block in cases: eps, node, connector = block entrypoints.extend(eps) sw.add_out_edge(node) connector.add_out_edge(sw.connector) for value in values: if isinstance(value, str): if not enum_values: emit_error(f'Query "{query.name}" does not return an enum', start, end) raise LogError() if value not in enum_values: emit_error(f'Enum "{value}" is not a valid return value of "{query.name}"', start, end) raise LogError() value = enum_values[value] sw.add_case(node, value) num_values = _get_query_num_values(query, start, end) default_values = set(range(num_values)) - set(sum((v for v, n in cases), [])) if default_values: if default is not None: _, default, connector = default connector.add_out_edge(sw.connector) default_branch = default or sw.connector sw.add_out_edge(default_branch) for value in default_values: sw.add_case(default_branch, value) elif default: emit_warning(f'default branch for {query_name} call is dead code, ignoring', start, end) return entrypoints, (sw,)
def make_while(n, start, end): table, (eps, node, connector) = n next_connector = ConnectorNode(f'Connector{next_id()}') if isinstance(table, TypedValue): if table.value: # while true connector.add_out_edge(node) return eps, (node, connector) else: # while false if eps: emit_error('entrypoints in while-false not supported', start, end) raise LogError() return [], (connector, connector) else: next_ = _expand_table(table, node, next_connector) connector.add_out_edge(next_) return eps, (next_, next_connector)
def check_function(p, type_, start, end): if len(p) == 1: p = p[0] if isinstance(p, dict): actor = (p.pop('.actor_name'), p.pop('.actor_secondary')) name = p.pop('.name') negated = p.pop('.negated') params = pdict = p prepare_params = False else: actor_name, actor_secondary, name, params = p actor = (actor_name, actor_secondary or '') negated = False prepare_params = True function_name = f'EventFlow{type_.capitalize()}{name}' if actor not in actors: actors[actor] = gen_actor(*actor) mp = getattr(actors[actor], ['actions', 'queries'][type_ == 'query']) if function_name not in mp: emit_warning(f'no {type_} with name "{function_name}" found, using empty call', start, end) if type_ == 'action': actors[actor].register_action(Action(actor, function_name, [])) else: actors[actor].register_query(Query(actor, function_name, [], IntType, False)) function = mp[function_name] if prepare_params: try: pdict = function.prepare_param_dict([p for name, p in params if name is None]) for name, p in params: if name is not None: if name in pdict: emit_warning(f'keyword argument name {name} matches positional argument name', start, end) pdict[name] = p except AssertionError as e: emit_error(str(e), start, end) raise LogError() return function_name, function, pdict, negated
def verify_params(name, root, params_list): for param, type_, value in params_list: if value is not None and type_ != value.type: emit_error(f'variable definition for {param} in {name} is of type {type_} but has default value of type {value.type}') raise LogError() params = {param: type_ for param, type_, value in params_list} for node in find_postorder(root): if isinstance(node, (ActionNode, SwitchNode)): for param, value in node.params.items(): if isinstance(value.value, Argument): if value.value not in params: emit_error(f'variable {value.value} not defined in flow {name}') raise LogError() expected_type = params[value.value] actual_type = value.type if param.startswith('EntryVariableKey'): if param[16:].startswith('Int_'): actual_type = IntType elif param[16:].startswith('Bool_'): actual_type = BoolType elif param[16:].startswith('Float_'): actual_type = FloatType elif param[16:].startswith('String_'): actual_type = StringType else: value.type = ArgumentType if actual_type != AnyType and expected_type != actual_type: emit_error(f'variable {value.value} has the wrong type, defined to be {expected_type} but used as {actual_type}') raise LogError() if isinstance(node, SubflowNode): for param, value in node.params.items(): if isinstance(value.value, Argument): if value.value not in params: emit_error(f'variable {value.value} not defined in flow {name}') raise LogError() value.type = params[value.value]
def inner(t, start, end): try: return {param: f(type_, t)} except Exception as e: emit_error(str(e), start, end) raise LogError()
def __process_local_calls(roots: List[RootNode], local_roots: Dict[str, RootNode], exported_roots: Dict[str, RootNode], exported_tco: bool): post_calls: Dict[str, Set[Node]] = {} for root in roots: for node in find_postorder(root): if isinstance(node, SubflowNode): if node.ns == '': if node.called_root_name not in exported_roots and node.called_root_name not in local_roots: emit_warning(f'{node.called_root_name} called but not defined') if node.out_edges and node.called_root_name in local_roots: assert len(node.out_edges) == 1 post_calls[node.called_root_name] = post_calls.get(node.called_root_name, set()) post_calls[node.called_root_name].add(node.out_edges[0]) called_node = local_roots[node.called_root_name] if node.called_root_name in local_roots else exported_roots[node.called_root_name] if called_node.entrypoint: if node.params: emit_error(f'entrypoint "{node.called_root_name}" should not be called with any parameters') raise LogError() continue default_vardefs = {v.name: v for v in called_node.vardefs if v.initial_value is not None} if len(called_node.vardefs) - len(default_vardefs) > len(node.params): emit_error(f'{node.called_root_name} expects at least {len(called_node.vardefs) - len(default_vardefs)} parameters but received {len(node.params)}') raise LogError() for vardef in called_node.vardefs: if vardef.name not in node.params and vardef.name not in default_vardefs: emit_error(f'{node.called_root_name} expects parameter "{vardef.name}" of type {vardef.type}') raise LogError() if vardef.name not in node.params: continue param = node.params[vardef.name] param_type = param.type if param_type == ArgumentType: vcand = [v.type for v in root.vardefs if v.name == param.value] if not vcand: emit_error(f'variable {param.value} not defined') raise LogError() param_type = vcand[0] if param_type != vardef.type: emit_error(f'{node.called_root_name} expects parameter "{vardef.name}" to be of type {vardef.type} but received {param_type} instead') raise LogError() for param in node.params.values(): if isinstance(param.value, Argument): param.type = ArgumentType for name, root in list(local_roots.items()): if name not in post_calls: continue if len(post_calls[name]) == 1 and all(v.initial_value is None for v in root.vardefs): continue_ = next(iter(post_calls[name])) if continue_ is TerminalNode: continue __replace_node(root, TerminalNode, continue_) else: emit_warning(f'flow {name} is local but forcing export') exported_roots[name] = local_roots[name] del local_roots[name] reroutes: Dict[SubflowNode, Node] = {} for root in roots: for node in find_postorder(root): if isinstance(node, SubflowNode) and node.ns == '': if not all(p.type == ArgumentType for p in node.params.values()): continue tail_call = (len(node.out_edges) == 1 and node.out_edges[0] is TerminalNode) if node.called_root_name in local_roots: reroutes[node] = local_roots[node.called_root_name].out_edges[0] elif tail_call and exported_tco: # TODO fix mutual recursive case when turned off reroutes[node] = exported_roots[node.called_root_name].out_edges[0] changed = True while changed: changed = False for from_, to in list(reroutes.items()): if to in reroutes: assert isinstance(to, SubflowNode) reroutes[from_] = reroutes[to] changed = True for root in roots: s: List[Node] = [root] added = set() while s: node = s.pop() for child in list(node.out_edges): if isinstance(child, SubflowNode) and child in reroutes: node.reroute_out_edge(child, reroutes[child]) for child in node.out_edges: if child not in added: s.append(child) added.add(child)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--functions', metavar='functions.csv', default='functions.csv', help= 'functions.csv for EventFlow function type information (default: ./functions.csv)' ) parser.add_argument('-d', metavar='output_directory', help='output directory') parser.add_argument('-o', metavar='file', help='file to output to, overrides -d, ' + 'cannot be used for multiple input files') parser.add_argument('files', metavar='evfl_file', nargs='+', help='.evfl files to compile') parser.add_argument('--optimize', action='store_true', help='Enable all optimizer flags') optimizer = parser.add_argument_group( 'Optimizer Flags', description='Optimizer flags for smaller output') optimizer.add_argument( '--fexported-tco', action='store_true', help='Perform tail-call-optimization for exported flows') optimizer.add_argument('--fmerge-duplicate', action='store_true', help='Merge duplicate segments of code') optimizer.add_argument( '--fshort-event-names', action='store_true', help= 'Rename all events to very short names (instead of Event0, Event1, etc.)' ) args = parser.parse_args() if args.optimize: args.fexported_tco = True args.fmerge_duplicate = True args.fshort_event_names = True optimizer_flags = dict(vars(args)) for k in list(optimizer_flags.keys()): if k.startswith('f'): optimizer_flags[k[1:]] = optimizer_flags[k] del optimizer_flags[k] try: if len(args.files) > 1 and args.o: emit_fatal('-o cannot be used with multiple input files') raise LogFatal() fcsv = Path(args.functions) if not fcsv.exists() or not fcsv.is_file(): emit_fatal(f'cannot open {args.f}') raise LogFatal() with fcsv.open('rt') as f: actor_gen, action_rules, query_rules, query_op_rules = actor_gen_prepare( csv.reader(f)) function_prefix = [ ('ID', None), ('ACTOR', 'placeholder'), ('DOT', None), ('ID', None), ('LPAREN', None), ] custom_action_parsers = parse_custom_rules(action_rules, function_prefix) custom_query_parsers = parse_custom_rules(query_rules, function_prefix) custom_query_op_parser = parse_custom_rules(query_op_rules, [])[1] output_dir = Path('.') output_name = None if args.d and not args.o: output_dir = Path(args.d) if not output_dir.exists(): output_dir.mkdir() if output_dir.is_file(): emit_fatal('output directory is a file') raise LogFatal() if args.o: output_name = args.o success = True for filename in args.files: try: process_file( filename, output_dir, output_name, actor_gen, optimizer_flags, custom_action_parser_pfx=custom_action_parsers[0], custom_action_parser_reg=custom_action_parsers[1], custom_query_parser_pfx=custom_query_parsers[0], custom_query_parser_reg=custom_query_parsers[1], custom_query_parser_op=custom_query_op_parser, ) except LogError: success = False except NoParseError as e: success = False pos, msg = e.msg.split(':', 1) start, end = pos.split('-', 1) start = tuple(int(x) for x in start.split(',', 1)) end = tuple(int(x) for x in end.split(',', 1)) emit_error(e.msg, start, end) if not success: sys.exit(1) except LogFatal: sys.exit(2)