Exemple #1
0
    def test_pattern_4(self):
        inputs_ = [
            "Your robot needs to move this table.",
        ]
        for i, input_ in enumerate(inputs_):
            tokenized_sentence = word_tokenize(input_)
            tagged_sentence = pos_tagger.tag(tokenized_sentence)
            chunker = RegexpParser(pattern_4)
            result = chunker.parse(tagged_sentence)

            pattern_id = "pattern_4"

            error_msg = "Sentence {}-th -- '{}' -- does not contain {} \n {}".format(
                i, tagged_sentence, pattern_id, pattern_4)
            self.assertEqual(hasPattern_x(result, pattern_id), True, error_msg)
Exemple #2
0
    def test_pattern_2(self):
        inputs_ = [
            "Move the hand eventually slowly.",
        ]
        for i, input_ in enumerate(inputs_):
            tokenized_sentence = word_tokenize(input_)
            tagged_sentence = pos_tagger.tag(tokenized_sentence)
            chunker = RegexpParser(pattern_2)
            result = chunker.parse(tagged_sentence)

            pattern_id = "pattern_2"

            error_msg = "Sentence {}-th -- '{}' -- does not contain {} \n {}".format(
                i, tagged_sentence, pattern_id, pattern_2)
            self.assertEqual(hasPattern_x(result, pattern_id), True, error_msg)
Exemple #3
0
    def test_correction_pattern_5(self):
        inputs_ = [("You needs to move this table.",
                    "the user needs to move this table .")]
        for i, (input_, expect_) in enumerate(inputs_):
            tokenized_sentence = word_tokenize(input_)
            tagged_sentence = pos_tagger.tag(tokenized_sentence)
            chunker = RegexpParser(pattern_5)
            result = chunker.parse(tagged_sentence)

            correction = correct_5(result)
            correction = " ".join(correction)

            error_msg = "Sentence {}-th -- '{}' -- expect result: '{}' / while correction return: '{}'".format(
                i, input_, expect_, correction)
            self.assertEqual(expect_, correction, error_msg)
Exemple #4
0
    def test_pattern_1(self):
        inputs_ = [
            "There is a school bus.",
            "Move that apple juice.",
            "Move that apple pie.",
        ]
        for i, input_ in enumerate(inputs_):
            tokenized_sentence = word_tokenize(input_)
            tagged_sentence = pos_tagger.tag(tokenized_sentence)
            chunker = RegexpParser(pattern_1)
            result = chunker.parse(tagged_sentence)

            pattern_id = "pattern_1"

            error_msg = "Sentence {}-th -- '{}' -- does not contain {} \n {}".format(
                i, tagged_sentence, pattern_id, pattern_1)
            self.assertEqual(hasPattern_x(result, pattern_id), True, error_msg)
Exemple #5
0
    def test_correction_pattern_2(self):
        inputs_ = [("Move the hand eventually slowly.",
                    "Move the hand eventually and slowly ."),
                   ("Move the hand eventually slowly gradually.",
                    "Move the hand eventually and slowly and gradually .")]
        for i, (input_, expect_) in enumerate(inputs_):
            tokenized_sentence = word_tokenize(input_)
            tagged_sentence = pos_tagger.tag(tokenized_sentence)
            chunker = RegexpParser(pattern_2)
            result = chunker.parse(tagged_sentence)

            correction = correct_2(result)
            correction = " ".join(correction)

            error_msg = "Sentence {}-th -- '{}' -- expect result: '{}' / while correction return: '{}'".format(
                i, input_, expect_, correction)
            self.assertEqual(expect_, correction, error_msg)
Exemple #6
0
    def test_correct_pattern_1(self):
        inputs_ = [
            ("There is a school bus.", "There is a bus ."),
            ("Move that apple juice.", "Move that juice ."),
            ("Move that apple pie.", "Move that pie ."),
        ]
        for i, (input_, expect_) in enumerate(inputs_):
            tokenized_sentence = word_tokenize(input_)
            tagged_sentence = pos_tagger.tag(tokenized_sentence)
            chunker = RegexpParser(pattern_1)
            result = chunker.parse(tagged_sentence)

            correction = correct_1(result)
            correction = " ".join(correction)

            error_msg = "Sentence {}-th -- '{}' -- expect result: '{}' / while correction return: '{}'".format(
                i, input_, expect_, correction)
            self.assertEqual(expect_, correction, error_msg)