Exemple #1
0
def test_stream_simple():
    stream = StringIO("SELECT 1; SELECT 2;")

    tokens = lexer.tokenize(stream)
    assert len(list(tokens)) == 9

    stream.seek(0)
    tokens = list(lexer.tokenize(stream))
    assert len(tokens) == 9

    stream.seek(0)
    tokens = list(lexer.tokenize(stream))
    assert len(tokens) == 9
Exemple #2
0
def test_tokenize_simple():
    s = 'select * from foo;'
    stream = lexer.tokenize(s)
    assert isinstance(stream, types.GeneratorType)
    tokens = list(stream)
    assert len(tokens) == 8
    assert len(tokens[0]) == 2
    assert tokens[0] == (T.Keyword.DML, 'select')
    assert tokens[-1] == (T.Punctuation, ';')
Exemple #3
0
def test_tokenize_inline_keywords():
    # issue 7
    s = "create created_foo"
    tokens = list(lexer.tokenize(s))
    assert len(tokens) == 3
    assert tokens[0][0] == T.Keyword.DDL
    assert tokens[2][0] == T.Name
    assert tokens[2][1] == 'created_foo'
    s = "enddate"
    tokens = list(lexer.tokenize(s))
    assert len(tokens) == 1
    assert tokens[0][0] == T.Name
    s = "join_col"
    tokens = list(lexer.tokenize(s))
    assert len(tokens) == 1
    assert tokens[0][0] == T.Name
    s = "left join_col"
    tokens = list(lexer.tokenize(s))
    assert len(tokens) == 3
    assert tokens[2][0] == T.Name
    assert tokens[2][1] == 'join_col'
Exemple #4
0
    def run(self, sql, encoding=None):
        stream = lexer.tokenize(sql, encoding)
        # Process token stream
        for filter_ in self.preprocess:
            stream = filter_.process(stream)

        stream = StatementSplitter().process(stream)

        # Output: Stream processed Statements
        for stmt in stream:
            if self._grouping:
                gc = grouping_class.grouping()
                # stmt = grouping.group(stmt)
                stmt = gc.group(stmt)

            for filter_ in self.stmtprocess:
                filter_.process(stmt)

            for filter_ in self.postprocess:
                stmt = filter_.process(stmt)

            yield stmt
Exemple #5
0
def test_tokenize_negative_numbers():
    s = "values(-1)"
    tokens = list(lexer.tokenize(s))
    assert len(tokens) == 4
    assert tokens[2][0] == T.Number.Integer
    assert tokens[2][1] == '-1'
Exemple #6
0
def test_tokenize_linebreaks(s):
    # issue1
    tokens = lexer.tokenize(s)
    assert ''.join(str(x[1]) for x in tokens) == s
Exemple #7
0
def test_tokenize_backticks():
    s = '`foo`.`bar`'
    tokens = list(lexer.tokenize(s))
    assert len(tokens) == 3
    assert tokens[0] == (T.Name, '`foo`')
Exemple #8
0
def test_stream_error():
    stream = StringIO("FOOBAR{")

    tokens = list(lexer.tokenize(stream))
    assert len(tokens) == 2
    assert tokens[1][0] == T.Error