Esempio n. 1
0
 def test_indent_arrow(self):
     data = ("q4 0 -> P1 R q3\n" "   1 -> Px q4\n\n" "   x -> P0 L q3\n\n")
     expected = [  # The space is an indent token
         "q4",
         "0",
         "->",
         "P1",
         "R",
         "q3",
         "\n",
         " ",
         "1",
         "->",
         "Px",
         "q4",
         "\n",
         " ",
         "x",
         "->",
         "P0",
         "L",
         "q3",
         "\n",
     ]
     assert list(tokenizer(data)) == expected
 def test_single_mconf_in_first_line(self):
     data = (
         "A \n"
         "  b -> Pxx2 R E alpha\n"
     )
     expected = ["A", "b", "->", "Pxx2", "R", "E", "alpha", "\n",]
     assert list(tokenizer(data)) == expected
 def test_empty_lines(self):
     data = (
         "\n\nq312 0 -> P1 R\n\n\n"
         "              P0 L L q2\n\n"
     )
     expected = ["q312", "0", "->", "P1", "R", "P0", "L", "L", "q2", "\n"]
     assert list(tokenizer(data)) == expected
 def test_simple_multiline_rule(self):
     data = (
         "q3 0 -> P1 R\n"
         "        P0 R q2\n"
     )
     expected = ["q3", "0", "->", "P1", "R", "P0", "R", "q2", "\n"]
     assert list(tokenizer(data)) == expected
Esempio n. 5
0
 def test_three_full_line_rules(self):
     data = ("q1 1 -> P0 R q2\n" "q2 0 -> P1 q2\n" "q2 1 -> E L q1\n")
     expected = [
         "q1",
         "1",
         "->",
         "P0",
         "R",
         "q2",
         "\n",
         "q2",
         "0",
         "->",
         "P1",
         "q2",
         "\n",
         "q2",
         "1",
         "->",
         "E",
         "L",
         "q1",
         "\n",
     ]
     assert list(tokenizer(data)) == expected
 def test_indent_arrow(self):
     data = (
         "q4 0 -> P1 R q3\n"
         "   1 -> Px q4\n\n"
         "   x -> P0 L q3\n\n"
     )
     expected = [ # The space is an indent token
         "q4", "0", "->", "P1", "R", "q3", "\n",
         " ", "1", "->", "Px", "q4", "\n",
         " ", "x", "->", "P0", "L", "q3", "\n",
     ]
     assert list(tokenizer(data)) == expected
 def test_three_full_line_rules(self):
     data = (
         "q1 1 -> P0 R q2\n"
         "q2 0 -> P1 q2\n"
         "q2 1 -> E L q1\n"
     )
     expected = [
         "q1", "1", "->", "P0", "R", "q2", "\n",
         "q2", "0", "->", "P1", "q2", "\n",
         "q2", "1", "->", "E", "L", "q1", "\n",
     ]
     assert list(tokenizer(data)) == expected
Esempio n. 8
0
 def test_single_mconf_in_first_line(self):
     data = ("A \n" "  b -> Pxx2 R E alpha\n")
     expected = [
         "A",
         "b",
         "->",
         "Pxx2",
         "R",
         "E",
         "alpha",
         "\n",
     ]
     assert list(tokenizer(data)) == expected
 def test_square_brackets_in_symbols(self):
     data = (
         "q1 \n"
         "  [0 4] -> Pinf R aba\n"
         "  [1 '2' 3] -> P-1 k\n"
         "  [xTj'c] -> P0\nç  \n\n"
         "  - -> +"
     )
     expected = [ # The space is an indent token
         "q1", "[", "0", "4", "]", "->", "Pinf", "R", "aba", "\n",
         " ", "[", "1", "'2'", "3", "]", "->", "P-1", "k", "\n",
         " ", "[", "xTj'c", "]", "->", "P0", "ç", "\n",
         " ", "-", "->", "+", "\n",
     ]
     assert list(tokenizer(data)) == expected
Esempio n. 10
0
 def test_square_brackets_in_symbols(self):
     data = ("q1 \n"
             "  [0 4] -> Pinf R aba\n"
             "  [1 '2' 3] -> P-1 k\n"
             "  [xTj'c] -> P0\nç  \n\n"
             "  - -> +")
     expected = [  # The space is an indent token
         "q1",
         "[",
         "0",
         "4",
         "]",
         "->",
         "Pinf",
         "R",
         "aba",
         "\n",
         " ",
         "[",
         "1",
         "'2'",
         "3",
         "]",
         "->",
         "P-1",
         "k",
         "\n",
         " ",
         "[",
         "xTj'c",
         "]",
         "->",
         "P0",
         "ç",
         "\n",
         " ",
         "-",
         "->",
         "+",
         "\n",
     ]
     assert list(tokenizer(data)) == expected
 def test_no_token_at_all(self): # Empty files have a single token
     assert list(tokenizer("\n\n\n")) == ["\n"] == \
            list(tokenizer(""))       == list(tokenizer("\n \n  \n"))
 def test_simple_full_line_rule(self):
     data = "q1 1 -> P0 R q2" # Newline token is always implicit
     expected = ["q1", "1", "->", "P0", "R", "q2", "\n"]
     assert list(tokenizer(data)) == expected
Esempio n. 13
0
 def test_empty_lines(self):
     data = ("\n\nq312 0 -> P1 R\n\n\n" "              P0 L L q2\n\n")
     expected = ["q312", "0", "->", "P1", "R", "P0", "L", "L", "q2", "\n"]
     assert list(tokenizer(data)) == expected
Esempio n. 14
0
 def test_no_token_at_all(self):  # Empty files have a single token
     assert list(tokenizer("\n\n\n")) == ["\n"] == \
            list(tokenizer(""))       == list(tokenizer("\n \n  \n"))
Esempio n. 15
0
 def test_simple_multiline_rule(self):
     data = ("q3 0 -> P1 R\n" "        P0 R q2\n")
     expected = ["q3", "0", "->", "P1", "R", "P0", "R", "q2", "\n"]
     assert list(tokenizer(data)) == expected
Esempio n. 16
0
 def test_simple_full_line_rule(self):
     data = "q1 1 -> P0 R q2"  # Newline token is always implicit
     expected = ["q1", "1", "->", "P0", "R", "q2", "\n"]
     assert list(tokenizer(data)) == expected