def test__tokenize__articles(): assert_equal(parser.tokenize("a"), [("article", "a")]) result = parser.tokenize("an the") assert_equal(result, [("article", "an"), ("article", "the")])
def test__tokenize__errors(): assert_equal(parser.tokenize("ABCDEFG"), [("error", "ABCDEFG")]) result = parser.tokenize("HIJKLMNO C3P0") assert_equal(result, [("error", "HIJKLMNO"), ("error", "C3P0")])
def test__tokenize__prepositions(): assert_equal(parser.tokenize("to"), [("preposition", "to")]) result = parser.tokenize("from into") assert_equal(result, [("preposition", "from"), ("preposition", "into")])
def test__tokenize__numbers(): assert_equal(parser.tokenize("1234"), [("number", 1234)]) result = parser.tokenize("567 789") assert_equal(result, [("number", 567), ("number", 789)])
def test__tokenize__nouns(): assert_equal(parser.tokenize("bear"), [("noun", "bear")]) result = parser.tokenize("bear princess") assert_equal(result, [("noun", "bear"), ("noun", "princess")])
def test__tokenize__verbs(): assert_equal(parser.tokenize("go"), [("verb", "go")]) result = parser.tokenize("go kill eat") assert_equal(result, [("verb", "go"), ("verb", "kill"), ("verb", "eat")])
def test__tokenize__directions(): assert_equal(parser.tokenize("north"), [("direction", "north")]) result = parser.tokenize("north south east") assert_equal(result, [("direction", "north"), ("direction", "south"), ("direction", "east")])