Пример #1
0
            output.append(Term(Tag(nodeName), None, (lhs, rhs), None))
        assert len(output) == 1
        return output[0], err

    def collapseTrailers(self, base, trailers):
        node = base
        for tr in trailers:
            node = tr[0](node, *tr[1:])
        return node


def trace():
    import pdb
    pdb.set_trace()


EParser = loadGrammar(monte, "monte", globals(), EParserBase)


def makeParser(source, origin="<string>"):
    stream = makeTokenStream(source, origin)
    return EParser(stream, stream=True)


def parse(source, origin="<string>", tracefunc=None):
    from parsley import _GrammarWrapper
    p = makeParser(source, origin)
    if tracefunc:
        p._trace = tracefunc
    return _GrammarWrapper(p, source).start()
Пример #2
0

def makeTerm(t, args=None):
    if args is None:
        return t
    else:
        if isinstance(t, QTerm):
            if t.data:
                if not args:
                    return t
                else:
                    raise ValueError("Literal terms can't have arguments")
    return QTerm(t.asFunctor(), None, args and tuple(args))


QTermParser = loadGrammar(terml, "quasiterm", TermLParser.globals, TermLParser)
QTermParser.globals.update(globals())


def quasiterm(termString):
    """
    Build a quasiterm from a string.
    """
    p = QTermParser(termString)
    result, error = p.apply("term")
    try:
        p.input.head()
    except EOFError:
        pass
    else:
        raise error
Пример #3
0
 def __init__(self, *args, **kwargs):
     self.bindings = self.setupBindings()
     self.grammar = wrapGrammar(
         loadGrammar(parseproto.dns, "grammar", self.bindings))
Пример #4
0
import ometa
from ometa.runtime import OMetaGrammarBase
from ometa.grammar import OMeta
from ometa.grammar import loadGrammar
from terml.nodes import termMaker as t

OMeta1 = loadGrammar(ometa, "pymeta_v1", globals(), OMetaGrammarBase)
Пример #5
0
    return Term(Tag(".tuple."), None, tuple(args))


def Bag(args):
    return Term(Tag(".bag."), None, tuple(args))


def LabelledBag(f, arg):
    return Term(f.asFunctor(), None, (arg, ))


def Attr(k, v):
    return Term(Tag(".attr."), None, (k, v))


TermLParser = loadGrammar(terml, "terml", globals())


def parseTerm(termString):
    """
    Build a TermL term tree from a string.
    """
    p = TermLParser(termString)
    result, error = p.apply("term")
    try:
        p.input.head()
    except EOFError:
        pass
    else:
        raise error
    return result
Пример #6
0
import ometa
from ometa.runtime import OMetaGrammarBase
from ometa.grammar import OMeta
from ometa.grammar import loadGrammar
from terml.nodes import termMaker as t

OMeta1 = loadGrammar(ometa, "pymeta_v1",
                     globals(), OMetaGrammarBase)
Пример #7
0
 def __init__(self, *args, **kwargs):
     self.bindings = self.setupBindings()
     self.grammar = wrapGrammar(loadGrammar(parseproto.dns, "grammar",
                                            self.bindings))
Пример #8
0
    return Term(t.asFunctor(), None, args and tuple(args))


def Tuple(args):
    return Term(Tag(".tuple."), None, tuple(args))

def Bag(args):
    return Term(Tag(".bag."), None, tuple(args))

def LabelledBag(f, arg):
    return Term(f.asFunctor(), None, (arg,))

def Attr(k, v):
    return Term(Tag(".attr."), None, (k, v))

TermLParser = loadGrammar(terml, "terml", globals())


def parseTerm(termString):
    """
    Build a TermL term tree from a string.
    """
    p = TermLParser(termString)
    result, error = p.apply("term")
    try:
        p.input.head()
    except EOFError:
        pass
    else:
        raise error
    return result
Пример #9
0
            rhs = output.pop()
            lhs = output.pop()
            output.append(Term(Tag(nodeName), None, (lhs, rhs), None))
        assert len(output) == 1
        return output[0], err

    def collapseTrailers(self, base, trailers):
        node = base
        for tr in trailers:
            node = tr[0](node, *tr[1:])
        return node

def trace():
    import pdb; pdb.set_trace()

EParser = loadGrammar(monte, "monte", globals(), EParserBase)
def makeParser(source, origin="<string>"):
    stream = makeTokenStream(source, origin)
    return EParser(stream, stream=True)

def parse(source, origin="<string>", tracefunc=None):
    from parsley import _GrammarWrapper
    p = makeParser(source, origin)
    if tracefunc:
        p._trace = tracefunc
    try:
        return _GrammarWrapper(p, source).start()
    except ParseError as e:
        prettyParseErrorPrinter(e, source)
        import sys
        sys.exit(1)