コード例 #1
0
ファイル: test_tokenizer.py プロジェクト: abg/holland_restore
def test_tokenize_blank():
    tokenize_blank = token_rules.tokenize_blank
    scanner = Scanner("\n")
    token = tokenize_blank(scanner.next(), scanner)
    assert_equals(token.symbol, 'BlankLine')
    scanner = Scanner(["foo\n"])
    token = tokenize_blank(scanner.next(), scanner)
    ok_(token is None, "Got token %r, expected None" % token)
コード例 #2
0
ファイル: test_tokenizer.py プロジェクト: abg/holland_restore
def test_tokenize_trigger():
    text = textwrap.dedent("""
    DELIMITER ;;
    /*!50003 CREATE*/ /*!50017 DEFINER=`root`@`localhost`*/ /*!50003 TRIGGER `customer_create_date` BEFORE INSERT ON `customer` FOR EACH ROW SET NEW.create_date = NOW() */;;
    DELIMITER ;
    """).lstrip()

    scanner = Scanner(StringIO(text))
    token = token_rules.tokenize_delimiter(scanner.next(), scanner)
    assert_equals(token.symbol, 'CreateTrigger')
    assert_equals(token.text, text)
コード例 #3
0
ファイル: test_tokenizer.py プロジェクト: abg/holland_restore
def test_make_token():
    stream = textwrap.dedent("""
    -- Host: localhost    Database: sakila
    -- Dumping data for table `actor`
    """).splitlines()
    scanner = Scanner(stream)
    line = scanner.next()
    token = token_rules.make_token('SqlComment', line, scanner)

    assert_equals(token.text, line)
    assert_equals(token.symbol, 'SqlComment')
    assert_equals(token.offset, 0)
コード例 #4
0
ファイル: test_tokenizer.py プロジェクト: abg/holland_restore
def test_tokenizer_prefix():
    """Test that we tokenize a line that matches a prefix"""
    stream = textwrap.dedent("""\
    -- Dumping data for table `actor`
    """).splitlines()
    scanner = Scanner(stream)

    tokenize_prefix = token_rules.tokenize_prefix
    make_token = token_rules.make_token
    tokenizer = tokenize_prefix('-- Dumping data', 
                                make_token, 
                                'TableData')
    token = tokenizer(scanner.next(), scanner)
    assert_equals(token.symbol, 'TableData')
コード例 #5
0
ファイル: test_scanner.py プロジェクト: abg/holland_restore
def test_scanner_position():
    """Test scanner line counts and byte offsets"""
    lines = [
        "foo\n",
        "bar\n",
        "baz\n",
    ]

    scanner = Scanner(lines)
    i = iter(scanner)
    # initial position
    assert_equals(scanner.position, (0, 0))
    i.next()
    assert_equals(scanner.position, (1, 0))
    token = i.next()
    assert_equals(scanner.position, (2, 4))
    scanner.push_back(token)
    assert_equals(scanner.position, (1, 0))
コード例 #6
0
ファイル: test_tokenizer.py プロジェクト: abg/holland_restore
def test_tokenize_multiline():
    text = textwrap.dedent("""
    CREATE TABLE `actor` (
        `actor_id` smallint(5) unsigned NOT NULL AUTO_INCREMENT,
        `first_name` varchar(45) NOT NULL,
        `last_name` varchar(45) NOT NULL,
        `last_update` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
        PRIMARY KEY (`actor_id`),
        KEY `idx_actor_last_name` (`last_name`)
    ) ENGINE=InnoDB AUTO_INCREMENT=201 DEFAULT CHARSET=utf8;
    """).lstrip()

    tokenize_multi_line = token_rules.tokenize_multi_line
    scanner = Scanner(StringIO(text))
    token = tokenize_multi_line('CreateTable', ';', scanner.next(), scanner)
    assert_equals(token.symbol, 'CreateTable')
    assert_equals(token.line_range, (1, text.count("\n")))
    ok_(token.text.startswith('CREATE TABLE `actor`'))
    ok_(token.text.rstrip().endswith(';'))
コード例 #7
0
ファイル: test_scanner.py プロジェクト: abg/holland_restore
def test_scanner_pushback():
    """Test scanner push_back"""
    lines = [
        "foo\n",
        "bar\n",
        "baz\n",
    ]
    scanner = Scanner(lines)

    i = iter(scanner)
    first_token = i.next()
    assert_equals(first_token, lines[0])
    scanner.push_back(first_token)
    second_token = i.next()
    assert_equals(second_token, first_token)
    third_token = i.next()
    assert_equals(third_token, lines[1])
    fourth_token = i.next()
    assert_equals(fourth_token, lines[2])
    assert_raises(StopIteration, i.next)
    scanner.push_back(fourth_token)
    fifth_token = i.next()
    assert_equals(fourth_token, fifth_token)
コード例 #8
0
ファイル: test_tokenizer.py プロジェクト: abg/holland_restore
def test_tokenize_stored_procedure():
    text = textwrap.dedent("""
    DELIMITER ;;
    /*!50003 CREATE*/ /*!50020 DEFINER=`root`@`localhost`*/ /*!50003 PROCEDURE `film_in_stock`(IN p_film_id INT, IN p_store_id INT, OUT p_film_count INT)
        READS SQL DATA
    BEGIN
        SELECT inventory_id
        FROM inventory
        WHERE film_id = p_film_id
        AND store_id = p_store_id
        AND inventory_in_stock(inventory_id);

        SELECT FOUND_ROWS() INTO p_film_count;
    END */;;
    DELIMITER ;
    """).lstrip()

    scanner = Scanner(StringIO(text))
    tokenize_delimiter = token_rules.tokenize_delimiter
    token = tokenize_delimiter(scanner.next(),
                               scanner)
    assert_equals(token.symbol, 'CreateRoutine')
    assert_equals(token.text, text)