def test_crf_json_from_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor() ext.BILOU_flag = True sentence = u"I need a home cleaning close-by" doc = {"spacy_doc": spacy_nlp(sentence)} r = ext._from_crf_to_json(Message(sentence, doc), [{'O': 1.0}, {'O': 1.0}, {'O': 1.0}, {'B-what': 1.0}, {'L-what': 1.0}, {'B-where': 1.0}, {'I-where': 1.0}, {'L-where': 1.0}]) assert len(r) == 2, "There should be two entities" assert r[0]["confidence"] # confidence should exist del r[0]["confidence"] assert r[0] == {'start': 9, 'end': 22, 'value': 'home cleaning', 'entity': 'what'} assert r[1]["confidence"] # confidence should exist del r[1]["confidence"] assert r[1] == {'start': 23, 'end': 31, 'value': 'close-by', 'entity': 'where'}
def test_crf_json_from_non_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor() ext.BILOU_flag = False sentence = u"I need a home cleaning close-by" doc = {"spacy_doc": spacy_nlp(sentence)} rs = ext._from_crf_to_json(Message(sentence, doc), [{'O': 1.0}, {'O': 1.0}, {'O': 1.0}, {'what': 1.0}, {'what': 1.0}, {'where': 1.0}, {'where': 1.0}, {'where': 1.0}]) # non BILOU will split multi-word entities - hence 5 assert len(rs) == 5, "There should be five entities" for r in rs: assert r['confidence'] # confidence should exist del r['confidence'] assert rs[0] == {'start': 9, 'end': 13, 'value': 'home', 'entity': 'what'} assert rs[1] == {'start': 14, 'end': 22, 'value': 'cleaning', 'entity': 'what'} assert rs[2] == {'start': 23, 'end': 28, 'value': 'close', 'entity': 'where'} assert rs[3] == {'start': 28, 'end': 29, 'value': '-', 'entity': 'where'} assert rs[4] == {'start': 29, 'end': 31, 'value': 'by', 'entity': 'where'}
def test_crf_json_from_non_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor(component_config={"BILOU_flag": False}) sentence = u"I need a home cleaning close-by" doc = {"spacy_doc": spacy_nlp(sentence)} rs = ext._from_crf_to_json(Message(sentence, doc), [{'O': 1.0}, {'O': 1.0}, {'O': 1.0}, {'what': 1.0}, {'what': 1.0}, {'where': 1.0}, {'where': 1.0}, {'where': 1.0}]) # non BILOU will split multi-word entities - hence 5 assert len(rs) == 5, "There should be five entities" for r in rs: assert r['confidence'] # confidence should exist del r['confidence'] assert rs[0] == {'start': 9, 'end': 13, 'value': 'home', 'entity': 'what'} assert rs[1] == {'start': 14, 'end': 22, 'value': 'cleaning', 'entity': 'what'} assert rs[2] == {'start': 23, 'end': 28, 'value': 'close', 'entity': 'where'} assert rs[3] == {'start': 28, 'end': 29, 'value': '-', 'entity': 'where'} assert rs[4] == {'start': 29, 'end': 31, 'value': 'by', 'entity': 'where'}
def test_crf_json_from_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor() ext.BILOU_flag = True sentence = u"I need a home cleaning close-by" r = ext._from_crf_to_json(spacy_nlp(sentence), ['O', 'O', 'O', 'B-what', 'L-what', 'B-where', 'I-where', 'L-where']) assert len(r) == 2, "There should be two entities" assert r[0] == {u'start': 9, u'end': 22, u'value': u'home cleaning', u'entity': u'what'} assert r[1] == {u'start': 23, u'end': 31, u'value': u'close-by', u'entity': u'where'}
def test_crf_json_from_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor() ext.BILOU_flag = True sentence = u"I need a home cleaning close-by" r = ext._from_crf_to_json(Message(sentence, {"spacy_doc": spacy_nlp(sentence)}), ['O', 'O', 'O', 'B-what', 'L-what', 'B-where', 'I-where', 'L-where']) assert len(r) == 2, "There should be two entities" assert r[0] == {u'start': 9, u'end': 22, u'value': u'home cleaning', u'entity': u'what'} assert r[1] == {u'start': 23, u'end': 31, u'value': u'close-by', u'entity': u'where'}
def test_crf_json_from_non_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor() ext.BILOU_flag = False sentence = u"I need a home cleaning close-by" r = ext._from_crf_to_json(spacy_nlp(sentence), ['O', 'O', 'O', 'what', 'what', 'where', 'where', 'where']) assert len(r) == 5, "There should be five entities" # non BILOU will split multi-word entities - hence 5 assert r[0] == {u'start': 9, u'end': 13, u'value': u'home', u'entity': u'what'} assert r[1] == {u'start': 14, u'end': 22, u'value': u'cleaning', u'entity': u'what'} assert r[2] == {u'start': 23, u'end': 28, u'value': u'close', u'entity': u'where'} assert r[3] == {u'start': 28, u'end': 29, u'value': u'-', u'entity': u'where'} assert r[4] == {u'start': 29, u'end': 31, u'value': u'by', u'entity': u'where'}
def test_crf_json_from_non_BILOU(spacy_nlp): from rasa_nlu.extractors.crf_entity_extractor import CRFEntityExtractor ext = CRFEntityExtractor() ext.BILOU_flag = False sentence = u"I need a home cleaning close-by" r = ext._from_crf_to_json(Message(sentence, {"spacy_doc": spacy_nlp(sentence)}), ['O', 'O', 'O', 'what', 'what', 'where', 'where', 'where']) assert len(r) == 5, "There should be five entities" # non BILOU will split multi-word entities - hence 5 assert r[0] == {u'start': 9, u'end': 13, u'value': u'home', u'entity': u'what'} assert r[1] == {u'start': 14, u'end': 22, u'value': u'cleaning', u'entity': u'what'} assert r[2] == {u'start': 23, u'end': 28, u'value': u'close', u'entity': u'where'} assert r[3] == {u'start': 28, u'end': 29, u'value': u'-', u'entity': u'where'} assert r[4] == {u'start': 29, u'end': 31, u'value': u'by', u'entity': u'where'}
import sklearn_crfsuite X_train = [crf._sentence_to_features(sent) for sent in dataset] print("X_Train...", X_train[-1]) y_train = [crf._sentence_to_labels(sent) for sent in dataset] print("Y_Train.......", y_train[-1]) crf.ent_tagger = sklearn_crfsuite.CRF( algorithm='lbfgs', # coefficient for L1 penalty c1=crf.component_config["L1_c"], # coefficient for L2 penalty c2=crf.component_config["L2_c"], # stop earlier max_iterations=crf.component_config["max_iterations"], # include transitions that are possible, but not observed all_possible_transitions=True) crf.ent_tagger.fit(X_train, y_train) test_example = filtered_data[24] test_example.data.pop("intent") test_example.data.pop("entities") print(test_example.as_dict()) text_data = crf._from_text_to_crf(test_example) features = crf._sentence_to_features(text_data) print(text_data) print(features) ents = crf.ent_tagger.predict_marginals_single(features) print(ents) print(crf._from_crf_to_json(test_example, ents))