示例#1
0
    def test_basic_h(self):
        import parsing.grammar
        import parsing.automaton
        parsing.grammar.in_h = True

        class TestGlrParser(parsing.Glr):
            def __init__(self, spec):
                parsing.Glr.__init__(self, spec)

        try:
            from parsing.tests.specs import h

            spec = parsing.Spec(h, logFile='h.log', skinny=False)

            parser = TestGlrParser(spec)
            parser.token_from_class(h.TokenI)
            parser.token_from_class(h.TokenPlus)
            parser.token_from_class(h.TokenI)
            parser.token_from_class(h.TokenStar)
            parser.token_from_class(h.TokenI)
            parser.eoi()
            self.assertEqual(len(parser.start), 1)
            self.assertEqual(repr(parser.start[0]), '(i + (i * i))')
        finally:
            parsing.grammar.in_h = False
示例#2
0
def _compile_parsers(build_lib, inplace=False):
    import parsing

    import edb.edgeql.parser.grammar.single as edgeql_spec
    import edb.edgeql.parser.grammar.block as edgeql_spec2
    import edb.edgeql.parser.grammar.sdldocument as schema_spec

    for spec in (edgeql_spec, edgeql_spec2, schema_spec):
        spec_path = pathlib.Path(spec.__file__).parent
        subpath = pathlib.Path(str(spec_path)[len(str(ROOT_PATH)) + 1:])
        pickle_name = spec.__name__.rpartition('.')[2] + '.pickle'
        pickle_path = subpath / pickle_name
        cache = build_lib / pickle_path
        cache.parent.mkdir(parents=True, exist_ok=True)
        parsing.Spec(spec, pickleFile=str(cache), verbose=True)
        if inplace:
            shutil.copy2(cache, ROOT_PATH / pickle_path)
示例#3
0
    def test_basic_h(self):
        class TestGlrParser(parsing.Glr):
            def __init__(self, spec):
                parsing.Glr.__init__(self, spec)

        from parsing.tests.specs import h

        spec = parsing.Spec(h, skinny=False)

        parser = TestGlrParser(spec)
        parser.token(h.TokenI(parser))
        parser.token(h.TokenPlus(parser))
        parser.token(h.TokenI(parser))
        parser.token(h.TokenStar(parser))
        parser.token(h.TokenI(parser))
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(repr(parser.start[0]), '(i + (i * i))')
示例#4
0
    def _compile_parsers(self):
        import parsing

        import edb.lang.edgeql.parser.grammar.single as edgeql_spec
        import edb.lang.edgeql.parser.grammar.block as edgeql_spec2
        import edb.server.pgsql.parser.pgsql as pgsql_spec
        import edb.lang.schema.parser.grammar.declarations as schema_spec
        import edb.lang.graphql.parser.grammar.document as graphql_spec

        base_path = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))

        for spec in (edgeql_spec, edgeql_spec2, pgsql_spec, schema_spec,
                     graphql_spec):
            subpath = os.path.dirname(spec.__file__)[len(base_path) + 1:]
            cache_dir = os.path.join(self.build_lib, subpath)
            os.makedirs(cache_dir, exist_ok=True)
            cache = os.path.join(cache_dir,
                                 spec.__name__.rpartition('.')[2] + '.pickle')
            parsing.Spec(spec, pickleFile=cache, verbose=True)
示例#5
0
文件: parsing.py 项目: versada/edgedb
    def get_parser_spec(self):
        cls = self.__class__

        try:
            spec = cls.__dict__['parser_spec']
        except KeyError:
            pass
        else:
            if spec is not None:
                return spec

        mod = self.get_parser_spec_module()
        spec = parsing.Spec(mod,
                            pickleFile=self.localpath(mod, "pickle"),
                            skinny=not self.get_debug(),
                            logFile=self.localpath(mod, "log"),
                            verbose=self.get_debug())

        self.__class__.parser_spec = spec
        return spec
示例#6
0
    def test_basic_d(self):
        class TestParser(parsing.Glr):
            def __init__(self, spec):
                parsing.Glr.__init__(self, spec)

        from parsing.tests.specs import d

        spec = parsing.Spec(d, skinny=False)

        parser = TestParser(spec)
        parser.token_from_class(d.id)
        parser.token_from_class(d.star)
        parser.token_from_class(d.id)
        parser.token_from_class(d.plus)
        parser.token_from_class(d.id)
        parser.token_from_class(d.star)
        parser.token_from_class(d.id)
        parser.eoi()

        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[[ID * ID] + [ID * ID]]')
示例#7
0
    def test_basic_pickle(self):
        class TestGlrParser(parsing.Glr):
            def __init__(self, spec):
                parsing.Glr.__init__(self, spec)

        from parsing.tests.specs import b

        spec = parsing.Spec(b, skinny=False)
        import six.moves.cPickle
        specPickle = six.moves.cPickle.dumps(spec)
        spec2 = six.moves.cPickle.loads(specPickle)

        parser = TestGlrParser(spec2)
        parser.token_from_class(b.id)
        parser.token_from_class(b.star)
        parser.token_from_class(b.id)
        parser.token_from_class(b.plus)
        parser.token_from_class(b.id)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[[ID * ID] + ID]')
示例#8
0
def _compile_parsers(build_lib, inplace=False):
    import parsing

    import edb.lang.edgeql.parser.grammar.single as edgeql_spec
    import edb.lang.edgeql.parser.grammar.block as edgeql_spec2
    import edb.server.pgsql.parser.pgsql as pgsql_spec
    import edb.lang.schema.parser.grammar.declarations as schema_spec
    import edb.lang.graphql.parser.grammar.document as graphql_spec

    base_path = pathlib.Path(__file__).parent.resolve()

    for spec in (edgeql_spec, edgeql_spec2, pgsql_spec,
                 schema_spec, graphql_spec):
        spec_path = pathlib.Path(spec.__file__).parent
        subpath = pathlib.Path(str(spec_path)[len(str(base_path)) + 1:])
        pickle_name = spec.__name__.rpartition('.')[2] + '.pickle'
        pickle_path = subpath / pickle_name
        cache = build_lib / pickle_path
        cache.parent.mkdir(parents=True, exist_ok=True)
        parsing.Spec(spec, pickleFile=str(cache), verbose=True)
        if inplace:
            shutil.copy2(cache, base_path / pickle_path)
示例#9
0
    def test_basic_a(self):
        class TestParser(parsing.Lr):
            def __init__(self, spec):
                parsing.Lr.__init__(self, spec)

        from parsing.tests.specs import a
        spec = parsing.Spec(a)

        parser = TestParser(spec)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenStar)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenPlus)
        parser.token_from_class(a.TokenId)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[[ID * ID] + ID]')

        parser = TestParser(spec)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenPlus)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenStar)
        parser.token_from_class(a.TokenId)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[ID + [ID * ID]]')

        parser = TestParser(spec)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenStar)
        parser.token_from_class(a.TokenLparen)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenPlus)
        parser.token_from_class(a.TokenId)
        parser.token_from_class(a.TokenRparen)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[ID * ([ID + ID])]')
示例#10
0
    def test_basic_b(self):
        class TestParser(parsing.Glr):
            def __init__(self, spec):
                parsing.Glr.__init__(self, spec)

        from parsing.tests.specs import b
        spec = parsing.Spec(b)

        parser = TestParser(spec)
        parser.token_from_class(b.id)
        parser.token_from_class(b.star)
        parser.token_from_class(b.id)
        parser.token_from_class(b.plus)
        parser.token_from_class(b.id)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[[ID * ID] + ID]')

        parser = TestParser(spec)
        parser.token_from_class(b.id)
        parser.token_from_class(b.plus)
        parser.token_from_class(b.id)
        parser.token_from_class(b.star)
        parser.token_from_class(b.id)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[ID + [ID * ID]]')

        parser = TestParser(spec)
        parser.token_from_class(b.id)
        parser.token_from_class(b.star)
        parser.token_from_class(b.lparen)
        parser.token_from_class(b.id)
        parser.token_from_class(b.plus)
        parser.token_from_class(b.id)
        parser.token_from_class(b.rparen)
        parser.eoi()
        self.assertEqual(len(parser.start), 1)
        self.assertEqual(parser.start[0].val, '[ID * ([ID + ID])]')
示例#11
0
                except:
                    raise SyntaxError("Unrecognized token: %s" % word)

        # Tell the parser that the end of input has been reached.
        self.eoi()


# ===============================================================================
# Main code.

# Introspect this module to generate a parser.  Enable all the bells and
# whistles.
adapter = ModuleSpecSource(sys.modules[__name__])
spec = Parsing.Spec(adapter,
                    pickleFile="example1.pickle",
                    skinny=False,
                    logFile="example1.log",
                    graphFile="example1.dot",
                    verbose=True)
# example1.log is a human-readable representation of the parser tables.
# Suppose that you are trying to figure out what the parser is doing for a
# particular input, in this case, "2 * * 3".  If you have parsing verbosity
# enabled, you will see something like this:
#
#   STACK: <e>
#          0
#   INPUT: int
#      --> [shift 1]
#   STACK: <e> int
#          0   1
#   INPUT: star
#      --> [reduce Expr ::= int. [none]]
示例#12
0
    g = tokenize.generate_tokens(f.readline)
    l = []
    while g:
        code, val, spos, epos, line = g.next()
        name = tokenize.tok_name[code]
        # see tokenize docs to grok the 'NL' token
        if name not in ('COMMENT', 'NL', 'INDENT'):
            #print '*****', code, name, repr(val)
            #print
            yield (tokenize.tok_name[code], val)


spec = parsing.Spec(
    sys.modules[__name__],
    pickleFile="meta.pickle",
    #skinny=False,
    #logFile="meta.log",
    #graphFile="meta.dot",
    #verbose=True
)


def parse_grammar(filename):
    p = parser(spec)
    #p.verbose = True
    g = tokenize_grammar(filename)
    return p.scan(g)


if __name__ == '__main__':
    import sys
    from pprint import pprint as pp