def load(**overrides): disable = overrides.get('disable', []) overrides['disable'] = disable + ['neuralcoref'] nlp = load_model_from_init_py(__file__, **overrides) coref = NeuralCoref(nlp.vocab) coref.from_disk(nlp.path / 'neuralcoref') nlp.add_pipe(coref, name='neuralcoref') return nlp
def load(**overrides): return load_model_from_init_py(__file__, **overrides)
def load(**overrides): nlp = load_model_from_init_py(__file__, **overrides) return nlp
def load(**overrides): Language.factories['entity_matcher'] = lambda nlp, **cfg: EntityMatcher( nlp, **cfg) return load_model_from_init_py(__file__, **overrides)
def load(**overrides): Language.factories['en_narrow'] = lambda nlp, **cfg: EnNarrow(nlp, **cfg) Language.factories['person_narrow'] = lambda nlp, **cfg: PersonNarrow( nlp, **cfg) return load_model_from_init_py(__file__, **overrides)
def load(**overrides): model = load_model_from_init_py(__file__, **overrides) lemmatizer = PolishLemmatizer() model.tagger.vocab.morphology.lemmatizer = lemmatizer # loading our custom, lookup-based lemmatizer return model
def load(**overrides): Language.factories[ 'combined_rule_sentence_segmenter'] = lambda nlp, **cfg: combined_rule_sentence_segmenter nlp = load_model_from_init_py(__file__, **overrides) return nlp