Esempio n. 1
0
def load_model(model_dir):
    model_dir = pathlib.Path(model_dir)
    nlp = spacy.load('en', parser=False, entity=False, add_vectors=False)
    with (model_dir / 'vocab' / 'strings.json').open('r', encoding='utf8') as file_:
        nlp.vocab.strings.load(file_)
    nlp.vocab.load_lexemes(model_dir / 'vocab' / 'lexemes.bin')
    ner = EntityRecognizer.load(model_dir, nlp.vocab, require=True)
    return (nlp, ner)
Esempio n. 2
0
def load_model(model_dir):
    model_dir = pathlib.Path(model_dir)
    nlp = spacy.load('en', parser=False, entity=False, add_vectors=False)
    with (model_dir / 'vocab' / 'strings.json').open('r',
                                                     encoding='utf8') as file_:
        nlp.vocab.strings.load(file_)
    nlp.vocab.load_lexemes(model_dir / 'vocab' / 'lexemes.bin')
    ner = EntityRecognizer.load(model_dir, nlp.vocab, require=True)
    return (nlp, ner)
Esempio n. 3
0
def load_model(model_dir):
    model_dir = pathlib.Path(model_dir)
    nlp = en_core_web_sm.load()
    with (model_dir / 'vocab' / 'strings.json').open('r',
                                                     encoding='utf8') as file_:
        nlp.vocab.strings.load(file_)
    nlp.vocab.load_lexemes(model_dir / 'vocab' / 'lexemes.bin')
    ner = EntityRecognizer.load(model_dir, nlp.vocab, require=True)
    return nlp, ner
    def load(cls, model_dir, entity_extractor_spacy, fine_tune_spacy_ner, spacy_nlp):
        # type: (Text, Text, bool, Language) -> SpacyEntityExtractor
        from spacy.pipeline import EntityRecognizer

        if model_dir and entity_extractor_spacy:
            ner_dir = os.path.join(model_dir, entity_extractor_spacy)
            ner = EntityRecognizer.load(pathlib.Path(ner_dir), spacy_nlp.vocab)
            return SpacyEntityExtractor(fine_tune_spacy_ner, ner)
        else:
            return SpacyEntityExtractor(fine_tune_spacy_ner)
Esempio n. 5
0
    def __init__(self,
                 nlp=None,
                 extractor_file=None,
                 should_fine_tune_spacy_ner=False):
        self.nlp = nlp
        if extractor_file:
            self.ner = EntityRecognizer.load(pathlib.Path(extractor_file),
                                             nlp.vocab)
        else:
            self.ner = None

        self.should_fine_tune_spacy_ner = should_fine_tune_spacy_ner
Esempio n. 6
0
def predictEnt(query):
    nlp = spacy.load('en', parser=False)
    doc = nlp.make_doc(query)
    vocab_dir = pathlib.Path('ner/vocab')
    with (vocab_dir / 'strings.json').open('r', encoding='utf8') as file_:
        nlp.vocab.strings.load(file_)
    nlp.vocab.load_lexemes(vocab_dir / 'lexemes.bin')
    ner = EntityRecognizer.load(pathlib.Path("ner"), nlp.vocab, require=True)
    nlp.tagger(doc)
    ner(doc)
    for word in doc:
        if word.ent_type_ == 'PRODUCT':
            return word.text
def predict(query):
    # Load NER
    nlp = spacy.load('en', parser=False, entity=False, add_vectors=False)
    vocab_dir = pathlib.Path('ner/vocab')
    with (vocab_dir / 'strings.json').open('r',encoding='utf8') as file_:
        nlp.vocab.strings.load(file_)
    nlp.vocab.load_lexemes(vocab_dir / 'lexemes.bin')


    ner = EntityRecognizer.load(pathlib.Path("ner"), nlp.vocab, require=False)
    doc = nlp.make_doc(query)
    #nlp.tagger(doc)
    ner(doc)
    for word in doc:
        print(word.text, word.orth, word.lower, word.ent_type_)

    for word in doc:
        if word.ent_type_:
            print ('word -> {} and entity-> {}'.format(word.text,word.ent_type_))
 def __init__(self, nlp=None, extractor_file=None):
     if extractor_file:
         self.ner = EntityRecognizer.load(pathlib.Path(extractor_file),
                                          nlp.vocab)
     else:
         self.ner = None
Esempio n. 9
0
def load_ner_model(vocab, path):
    return EntityRecognizer.load(path, vocab)
Esempio n. 10
0
def load_ner_model(vocab, path):
    return EntityRecognizer.load(path, vocab)