Exemple #1
0
def str_to_tree_warn(s,paren_after_root=False,max=None):
    toks=tree.tokenizer.findall(s)
    if len(toks)>2 and toks[0] == '(' and toks[1]=='(' and toks[-2]==')' and toks[-1]==')': #berkeley parse ( (tree) )
        toks=toks[1:-1]
    (t,n)=tree.scan_tree(toks,0,paren_after_root)
    if t is None:
        warn("scan_tree failed",": %s of %s: %s ***HERE*** %s"%(n,len(toks),' '.join(toks[:n]),' '.join(toks[n:])),max=max)
    return t
Exemple #2
0
def parse_sbmt_lhs(s,require_arrow=True):
    (tokens,spans)=tokenize(lhs_tokens,s,rule_arrow_s)
    if not len(tokens):
        raise Exception("sbmt rule has no LHS tokens: %s"%s)
    (_,p2)=spans[-1]
    if tokens[-1] == rule_arrow_s:
        tokens.pop()
    elif require_arrow:
        raise Exception("sbmt rule LHS not terminated in %s: %s"%(rule_arrow_s,tokens))
    (t,endt)=tree.scan_tree(tokens,0,True)
    if t is None or endt != len(tokens):
        raise Exception("sbmt rule LHS tokens weren't parsed into a tree: %s TREE_ENDS_HERE unparsed = %s"%(tokens[0:endt],tokens[endt:]))
    (_,p1)=spans[endt]
    return (t,p2,p1)
Exemple #3
0
def parse_sbmt_lhs(s, require_arrow=True):
    (tokens, spans) = tokenize(lhs_tokens, s, rule_arrow_s)
    if not len(tokens):
        raise Exception("sbmt rule has no LHS tokens: %s" % s)
    (_, p2) = spans[-1]
    if tokens[-1] == rule_arrow_s:
        tokens.pop()
    elif require_arrow:
        raise Exception("sbmt rule LHS not terminated in %s: %s" %
                        (rule_arrow_s, tokens))
    (t, endt) = tree.scan_tree(tokens, 0, True)
    if t is None or endt != len(tokens):
        raise Exception(
            "sbmt rule LHS tokens weren't parsed into a tree: %s TREE_ENDS_HERE unparsed = %s"
            % (tokens[0:endt], tokens[endt:]))
    (_, p1) = spans[endt]
    return (t, p2, p1)