def feed(s, inp): s.g = Grammar() # this calls symbol_new() repeatedly inp.num_syms times, and gathers the # results in a list # this is too smart. also, todo: make symbol_new throw exceptions s.c_syms = list(starmap(s.g.symbol_new, repeat(tuple(), inp.num_syms))) for k, v in iteritems(inp.symbol_ranks): s.g.symbol_rank_set(k, v) log(("symbol rank:", k, v)) if args.log_parsing: log("s.c_syms:%s", s.c_syms) s.g.start_symbol_set(s.c_syms[inp.start]) s.c_rules = [] for rule in inp.rules: if rule[0]: # its a sequence _, _, lhs, rhs, action, sep, min, prop = rule s.c_rules.append(s.g.sequence_new(lhs, rhs, sep, min, prop)) else: _, _, lhs, rhs, _, rank = rule cr = s.g.rule_new(lhs, rhs) if cr != -2: s.c_rules.append(cr) if rank != 0: s.g.rule_rank_set(cr, rank) log(("rank ", rule, rank)) if args.graph_grammar: graphing_wrapper.generate_png() # graphing_wrapper.generate_gv() s.g.precompute() # check_accessibility() s.send(Dotdict(message="precomputed", for_node=inp.for_node))
def set_atts(editor, proxied_atts): atts = {} for k,v in iteritems(proxied_atts): if v: atts[k] = unproxy_atts(v) else: atts[k] = None log("setting atts under cursor to %s", pp(atts)) editor.atts = Atts(atts) editor.on_atts_change.emit()
def handler_str(mod): for k,h in iteritems(global_keys[frozenset(mod)]): name = None if type(h) == tuple: if h[1] == HIDDEN: continue elif type(h[1]) == str: name = h[1] h = h[0] if name == None: name = h.__name__ yield key_str(k) + ": " + name
def add_keys(node, sup, handlers): if sup == -1: node.keys = node.__bases__[-1].keys.copy() elif sup == None: node.keys = odict() else: node.keys = sup.keys.copy() for k, v in iteritems(handlers): if type(k.key) == tuple: for key in k.key: node.keys[K(k.mods, key)] = v else: node.keys[k] = v
def potential_handlers(trip): """return every handler for the elements in trip and parents, whose checker passess. mods and key are irrelevant, we arent dealing with any particular event """ output = odict() elems = [x.get(Att.elem) for x in [trip.middle or {}, trip.left or {}, trip.right or {}]] while any(elems): for elem, sidedness, atts in [ (elems[0], None, trip.middle), (elems[1], LEFT, trip.left), (elems[2], RIGHT, trip.right)]: if elem: for keys, handler in iteritems(elem.keys): if handler.sidedness in (None, sidedness): if not handler.checker or handler.checker(elem, atts): if keys not in output: yield keys, (elem, handler) output[keys] = True for i in range(3): if elems[i]: elems[i] = elems[i].parent
def click(s,e): for i,r in iteritems(s.drawn_rects): if collidepoint(r, e.pos): s.counterpart.sel = i s.counterpart.accept() break
def cache(): global invert, mono invert = args.invert mono = args.mono for k,v in iteritems(default_colors.__dict__): colors.__dict__[k] = modify(v)
def precompute_grammar(s, inp): log('precompute_grammar...') s.debug_sym_names = inp.debug_sym_names s.rules = inp.rules s.debug = inp.debug graphing_wrapper.clear() s.g = Grammar() # this calls symbol_new() repeatedly inp.num_syms times, and gathers the # results in a list # this is too smart. also, todo: make symbol_new throw exceptions s.c_syms = list(starmap(s.g.symbol_new, repeat(tuple(), inp.num_syms))) for x in s.c_syms: marpa_cffi.marpa.lib.marpa_g_symbol_is_completion_event_set(s.g.g, x, True) marpa_cffi.marpa.lib.marpa_g_symbol_is_prediction_event_set(s.g.g, x, True) for k, v in iteritems(inp.symbol_ranks): s.g.symbol_rank_set(k, v) log(("symbol rank:", k, v)) # if args.log_parsing: # log('s.c_syms:%s',s.c_syms) s.g.start_symbol_set(s.c_syms[inp.start]) s.c_rules = [] for rule in inp.rules: log(rule) if rule[0]: # its a sequence _, _, lhs, rhs, action, sep, min, prop = rule s.c_rules.append(s.g.sequence_new(lhs, rhs, sep, min, prop)) else: _, _, lhs, rhs, _, rank = rule cr = s.g.rule_new(lhs, rhs) if cr != -2: s.c_rules.append(cr) if rank != 0: s.g.rule_rank_set(cr, rank) log(("rank ", rule, rank)) if args.graph_grammar: def gen(): graphing_wrapper.generate_bnf() graph = graphing_wrapper.generate('grammar') graphing_wrapper.generate_gv_dot(graph) import graphviz print("generate svg") graphviz.render('dot', 'svg', 'grammar.gv') graph = graphing_wrapper.generate2('grammar2', s.c_syms[inp.start]) graphing_wrapper.generate_gv_dot(graph) print("generate svg") graphviz.render('dot', 'svg', 'grammar2.gv') import threading thr = threading.Thread(target=gen) thr.run() if s.g.precompute() == -2: s.send(Dotdict(message='precompute error', for_node=inp.for_node)) for x in s.c_syms: if not s.g.symbol_is_accessible(x): log("inaccessible symbol: %s " % s.symbol2debug_name(x)) s.send(Dotdict(message='precomputed', for_node=inp.for_node))
def click(s,e): for i,r in iteritems(s.rects): if collidepoint(r, e.pos): s.sel = s.items_on_screen.index(i) s.accept() break
def anything_excepts(s): for k, v in iteritems(s.excepts): for ch, sym in iteritems(s.known_chars): if ch != k: s.rule(ch + " is anything except "+k, v, [sym])
def worksas_magic(s, scope): #ok here we're gonna walk thru WorkAssess and BindsTighters and do the precedence and associativity magic # http://pages.cs.wisc.edu/~fischer/cs536.s08/course.hold/html/NOTES/3.CFG.html#prec # https://metacpan.org/pod/distribution/Marpa-R2/pod/Scanless/DSL.pod#priority """ so, i think we should walk thru all syntaxeds some appear in other nodes grammar literally, like a TypedParameter in a list_of(TypedParameter). for this reason, i would assign each syntaxed a symbol. Additionally, some are denoted with "*sub works as *sup", to work as, for example, an expression. presumably, there will be only one sup for any sub. """ worksases = collections.defaultdict(list) asoc = collections.defaultdict(lambda: "left") pris = collections.defaultdict(lambda: 1000) worksases2 = collections.defaultdict(lambda: collections.defaultdict(list)) log("magic:") import nodes simple = 0 for n in scope: if n.__class__.__name__ == 'WorksAs': sup = n.ch.sup.parsed sub = n.ch.sub.parsed if not isinstance(sup, nodes.Ref) or not isinstance(sub, nodes.Ref): print("invalid sub or sup in worksas") return sup_target = sup.target sub_target = sub.target #if not isinstance(sub_target, (nodes.SyntaxedNodecl, nodes.CustomNodeDef)): # continue worksases[sup_target].append(sub_target) if simple: rule_lhs = sup_target.symbol(s) rule_rhs = sub_target.symbol(s) if args.log_parsing: log('%s worksas %s\n (%s := %s)'%(sub_target, sup_target, rule_lhs, rule_rhs)) if rule_lhs != None and rule_rhs != None: r = s.rule(str(n), rule_lhs, rule_rhs) else: print('%s or %s is None'%(rule_lhs, rule_rhs)) elif n.__class__ == nodes.CustomNode and n.decl.name == 'haspriority': k = nodes.deref_def(n.ch.node) assert k not in pris pris[k] = n.ch.priority.pyval elif n.__class__.__name__ == 'HasAssociativity': k = deref_def(n.ch.node) assert k not in asoc asoc[k] = n.ch.value.pyval if simple: return for sup,subs in iteritems(worksases): for sub in subs: worksases2[sup][pris[sub]].append(sub) for sup, priority_levels in iteritems(worksases2): priority_levels = [v for k, v in sorted(priority_levels.items())] def lhs_symbol(level_index): if level_index == len(priority_levels): level_index = 0 if level_index == 0: return sup.symbol(s) else: return s.named_symbol2(sup.name + str(level_index)) for level_index in range(len(priority_levels)): lhs = lhs_symbol(level_index) next_level_lhs = lhs_symbol(level_index+1) if lhs != next_level_lhs and level_index != len(priority_levels)-1: s.rule("|"+s.symbol2debug_name(lhs) + ":=" + s.symbol2debug_name(next_level_lhs), lhs, next_level_lhs) for sub in priority_levels[level_index]: if not isinstance(sub, (nodes.SyntaxedNodecl, nodes.CustomNodeDef)): s.rule(s.symbol2debug_name(lhs) + ":=" + s.symbol2debug_name(sub.symbol(s)), lhs, sub.symbol(s)) continue for sy in sub.instance_syntaxes: syntax = [] slot_idx = 0 for i in sy.rendering_tags: if isinstance(i, str): a = s.known_string(i) else: slot = nodes.deref_decl(sub.instance_slots[i.name]) if slot == sup: if slot_idx == 0: a = lhs else: a = next_level_lhs else: a = slot.symbol(s) slot_idx+=1 syntax.append(a) s.rule(s.symbol2debug_name(lhs) + ":=" + "".join([s.symbol2debug_name(i) for i in syntax]), lhs, syntax, action=lambda x,decl=sub,syntax=sy: nodes.SyntaxedBase.from_parse(decl, syntax, x, whole_words=True)) """ for sup,levels in iteritems(worksas2): for level in levels: for sub in level: if asoc[node] == "left": rules[sup].append( if asoc[node] == "right": """ """ nodes = DefaultDict(list) asoc = DefaultDict(lambda: "left") pris = DefaultDict(lambda: 1000) for n in scope: if n.__class__.__name__ in ['SyntaxedNodecl']: nodes.append(n) if n.__class__.__name__ == 'HasPriority': k = deref_def(n.ch.node) assert k not in pris pris[k] = n.ch.value.pyval if n.__class__.__name__ == 'HasAssociativity': k = deref_def(n.ch.node) assert k not in asoc asoc[k] = n.ch.value.pyval for sup,subs in iteritems(worksas): for sub in subs: worksas2[k][pris[v]].append(v) for sup,levels in iteritems(worksas2): for level in levels: for sub in level: if asoc[node] == "left": rules[sup].append( if asoc[node] == "right": """ """ worksas = DefaultDict(list) asoc = DefaultDict(lambda: "left") pris = DefaultDict(lambda: 1000) for i in scope: if i.__class__.__name__ == 'WorksAs': worksas[i.ch.sup.target].append(i.ch.sub.target) if n.__class__.__name__ == 'HasPriority': k = deref_def(n.ch.node) assert k not in pris pris[k] = n.ch.value.pyval if n.__class__.__name__ == 'HasAssociativity': k = deref_def(n.ch.node) assert k not in asoc asoc[k] = n.ch.value.pyval for sup,subs in iteritems(worksas): for sub in subs: worksas2[k][pris[v]].append(v) for sup,levels in iteritems(worksas2): for level in levels: for sub in level: if asoc[node] == "left": rules[sup].append( if asoc[node] == "right": """ """ levels = {} import collections levels = collections.OrderedDict(sorted(levels.items())) for k,v """ """