def test_match(): vector = lexicon.scan('princess') assert_equal(parser.match(vector, 'noun'), ('noun', 'princess')) vector = lexicon.scan('go') assert_equal(parser.match(vector, 'verb'), ('verb', 'go')) vector = lexicon.scan('the') assert_equal(parser.match(vector, 'error'), None)
def test_match(): blank=[] assert_equal(parser.match(blank, None),(None)) assert_equal(parser.match([('verb', 'eat'), ('stop','a'), ('noun', 'bagel')], 'verb'), ('verb', 'eat')) assert_equal(parser.match([('verb', 'eat'), ('stop','a'), ('noun', 'bagel')], 'noun'), (None))
def test_match(): wordlist={'direction':"east", 'verb':"eat", 'stop':"in", 'noun':"princess", 'number':324, 'error':"err"} for (wtype,word) in wordlist.items(): result=lexicon.scan(word) assert_equal(parser.match(result,wtype)[1], word) assert_equal(parser.match(result,wtype), None) result=lexicon.scan("kill") assert_equal(parser.match(result, 'noun'), None)
def test_match(): assert_equal(parser.match([('direction', 'north')], 'direction'), ('direction', 'north')) assert_equal(parser.match([('noun', 'princess')], 'direction'), None) assert_equal(parser.match([('noun', 'princess'), ('direction', 'north')], 'direction'), None) assert_equal(parser.match([], 'direction'), None)
def test_match(): # test successful returning case sentence = [('verb', 'go')] assert_equal(parser.match(sentence, 'verb'), ('verb', 'go')) # test with an incorrect expected case sentence = lexicon.scan("north and beyond the wall") assert_equal(parser.match(sentence, 'noun'), None) # test with passing an empty list assert_equal(parser.match([], 'direction'), None)
def test_match(): word_list = [('verb', 'kill'), ('stop', 'the'), ('noun', 'bear')] # When there's a match word = parser.match(word_list[:], 'verb') assert_equal('verb', word[0]) # When there's no match word = parser.match(word_list[:], 'stop') assert_is_none(word) # When word_list is null word = parser.match(None, 'stop') assert_is_none(word)
def test_match(): word_list = lexicon.scan("bear eat cabinet") assert_equal(("noun", "bear"), parser.match(word_list, "noun")) assert_equal(("verb", "eat"), parser.match(word_list, "verb")) assert_equal(("noun", "cabinet"), parser.match(word_list, "noun")) assert_equal(None, parser.match(word_list, "noun")) assert_equal(None, parser.match(word_list, "asfdasdf")) word_list = lexicon.scan("eat north") assert_equal(None, parser.match(word_list, "safasdfa")) assert_equal(("direction", "north"), parser.match(word_list, "direction"))
def test_match(): #one word line1 = "bear" word_tuples1 = lexicon.scan(line1) assert_equal("bear", parser.match(word_tuples1, 'noun')[1]) #Here we should get a None for any type since there is nothing in the word_list assert_equal(None, parser.match(word_tuples1, 'noun')) #multiple words #note that parser.match will 'cut' tupples one by one line2 = "bear eat the princess" word_tuples2 = lexicon.scan(line2) assert_equals(4, len(word_tuples2)) assert_equal("bear", parser.match(word_tuples2, "noun")[1]) assert_equals(3, len(word_tuples2)) assert_equal("eat", parser.match(word_tuples2, "verb")[1]) assert_equals(2, len(word_tuples2)) assert_equal("the", parser.match(word_tuples2, 'stop')[1]) assert_equals(1, len(word_tuples2)) assert_equal("princess", parser.match(word_tuples2, 'noun')[1]) assert_equals(0, len(word_tuples2))
def test_match(): assert_equal(parser.match(lexicon.scan("go"), 'verb'), ('verb', 'go')) result = parser.match(lexicon.scan("bear"), 'noun') assert_equal(result, ('noun', 'bear'))
def test_match(): print(lexicon.sentence) assert_equal(parser.match(lexicon.sentence, 'direction'), ('direction', 'north')) print(lexicon.sentence) assert_equal(parser.match(lexicon.sentence, 'direction'), ('direction', 'south')) print(lexicon.sentence)
def test_match(): assert_equal(parser.match(word_list,'stop'), ('stop','a')) # (word_list,word_list[0][0]) 'stop' assert_equal([parser.match(s, s[0][0]) for s in PHRASE_LIST], [('verb','stop'),('noun','princess'),('verb','open'), ('error','banana')]) force_exception = lexicon.scan('banana banana banana') assert_equal(parser.match(force_exception, 'noun'), None)
def test_match(): assert_equal(parser.match(lexicon.scan("kill stop bear"), "verb"), ('verb', 'kill')) assert_equal(parser.match(lexicon.scan("kill stop bear"), "stop"), None)
def test_match(): word_list = lexicon.scan('princess') assert_equal(parser.match(word_list, 'noun'), ('noun', 'princess')) assert_equal(parser.match(word_list, 'stop'), None) assert_equal(parser.match(None, 'noun'), None)
def test_match(): word_list = [('noun', 'princess'), ('stop', 'to')] assert_equal(parser.match(word_list, 'noun'), ('noun', 'princess')) assert_equal(parser.match(word_list, 'noun'), None) assert_equal(parser.match(word_list, 'stop'), None)
def test_match(): assert_equal(parser.match([('verb', 'run')], 'verb'), ('verb', 'run'))
def test_match(): word_list = [] assert None == parser.match(word_list, 'noun') word_list = lexicon.scan("Princess Kill Bear") assert None == parser.match(word_list, 'verb') assert ('verb', 'Kill') == parser.match(word_list, 'verb')
def test_match(): match = parser.match(['test', 'hunt', 'kill'], 'hunt') assert_equal(match, 'h')
def match_tests(): word_list = [('type_a', 'word_a'), ('type_b', 'word_b')] assert_equals(parser.match(word_list, 'type_a'), ('type_a', 'word_a')) assert_equals(parser.match(word_list, 'type_b'), ('type_b', 'word_b')) assert_equals(parser.match(word_list, 'type_c'), None)
def test_match_with_no_matches(): wordlist = [('noun', 'bear')] result = parser.match(wordlist, 'verb') assert_equal(result, None) assert_equal(wordlist, [])
def test_match_with_none(): result = parser.match(None, 'verb') assert_equal(result, None)
def test_match(): wordlist = [('verb', 'run')] result = parser.match(wordlist, 'verb') assert_equal(result, ('verb', 'run')) assert_equal(wordlist, [])
def test_match_not_expecting(): assert_is_none(parser.match([('error', 'SPAM')], 'noun'))
def test_match(): assert_equal(parser.match([('verb', 'go')], 'verb'), ('verb', 'go')) assert_equal(parser.match([('stop', 'the')], 'noun'), None) assert_equal(parser.match(None, 'stop'), None)
def test_match(): #word_list[('noun', 'bear')] test = parser.match([('noun', 'bear')], 'noun') assert_equal(test, ('noun', 'bear'))
def test_match(): word_list2 = lexicon.scan("in the west") result = parser.match(word_list2, 'stop') assert_equal(result, ('stop', 'in'))
def test_match(): result = parser.match(lexicon.scan("234 445"), 'number') assert_equal(result, ('number', '234')) result = parser.match(lexicon.scan("234 445"), 'noun') assert_equal(result, None)
def test_match(): assert_equal(parser.match(word_list1, 'direction'), None) assert_equal(parser.match(word_list3, 'assss'), ('assss', 'ddddd'))
def test_match(): assert_equal(parser.match([('direction', 'north')], 'direction'), ('direction', 'north')) assert_equal(parser.match([('direction', 'north')], 'verb'), None) assert_equal(parser.match([], ''), None)
def test_match(): assert_equal(parser.match([('obj', 'apple')], 'obj'), ('obj', 'apple'))
def test_match(): assert_equal(parser.match(word_list, 'stop'), ('stop', 'the')) assert_equal(parser.match(word_list, 'verb'), None)
def test_match(): assert_equal(parser.match([], 'what is that'), None) assert_equal(parser.match([('oh', 'run'), ('direction', 'up')], 'oh'), ('oh', 'run')) assert_equal(parser.match([('aiyo', 'aiya'), ('direction', 'up')], 'verb'), None)
def test_none_word_list(): assert_is_none(parser.match(None, 'noun'))
def test_skip(): word_list = [('stop', 'will'), ('stop', 'the'), ('noun', 'bear')] parser.skip(word_list, 'stop') match = parser.match(word_list, 'noun') assert_equal(match, ('noun', 'bear'))
def test_match(): assert_equal(parser.match(word_list_2,'noun'), ('noun', 'princess')) assert_equal(parser.match(word_list_2, 'stop'), None) assert_equal(parser.match('', 'noun'), None)
def test_match_empty_word_list(): empty_word_list = [] assert_equal(parser.match(empty_word_list, 'noun'), None)
def test_match(): assert_equal(parser.match([],''), None) assert_equal(parser.match([('noun', 'Boss')], 'something'), None) assert_equal(parser.match([('noun', 'Boss')], 'noun'), ('noun', 'Boss')) assert_equal(parser.match([('noun', 'Boss'), ('verb', 'kill')], 'noun'), ('noun', 'Boss'))
def test_match(): assert_equal(parser.match(lexicon.scan(test_sentence), 'error'), (('error', 'this')))
def test_match(): assert_equal(parser.match([], ''), None) assert_equal(parser.match([('noun', 'Boss')], 'something'), None) assert_equal(parser.match([('noun', 'Boss')], 'noun'), ('noun', 'Boss')) assert_equal(parser.match([('noun', 'Boss'), ('verb', 'kill')], 'noun'), ('noun', 'Boss'))
def test_match(): assert_equal(parser.match([('stop', 'the'), ('direction', 'north')], 'stop'), ('stop', 'the')) assert_equal(parser.match([('verb', 'run'), ('direction', 'north')], 'noun'), None) match = parser.match([], 'verb') assert_equal(match, None)
def test_match(): word_list = [('verb', 'go'), ('direction', 'north'), ('stop', 'at'), ('noun', 'door')] assert_equal(parser.match(word_list, 'verb'), ('verb', 'go')) assert_equal(parser.match(word_list, 'stop'), None) assert_equal(parser.match(None, 'noun'), None)
def test_match(): assert_equal( parser.match([['verb', 'go'], ['direction', 'north']], 'verb'), ['verb', 'go'])
def test_match(): assert_equal(parser.match(word_list, 'noun'),( 'noun', 'bear'))
def test_match_not_equal(): word_list = [('noun', 'princess'), ('verb', 'throws'), ('object', 'coins')] match_not_equal = parser.match(word_list, ('foo')) assert_equal(match_not_equal, None)