Exemplo n.º 1
0
def load(**overrides):
    disable = overrides.get('disable', [])
    overrides['disable'] = disable + ['neuralcoref']
    nlp = load_model_from_init_py(__file__, **overrides)
    coref = NeuralCoref(nlp.vocab)
    coref.from_disk(nlp.path / 'neuralcoref')
    nlp.add_pipe(coref, name='neuralcoref')
    return nlp
Exemplo n.º 2
0
def load(**overrides):
    return load_model_from_init_py(__file__, **overrides)
Exemplo n.º 3
0
def load(**overrides):
    nlp = load_model_from_init_py(__file__, **overrides)
    return nlp
Exemplo n.º 4
0
def load(**overrides):
    Language.factories['entity_matcher'] = lambda nlp, **cfg: EntityMatcher(
        nlp, **cfg)
    return load_model_from_init_py(__file__, **overrides)
Exemplo n.º 5
0
def load(**overrides):
    Language.factories['en_narrow'] = lambda nlp, **cfg: EnNarrow(nlp, **cfg)
    Language.factories['person_narrow'] = lambda nlp, **cfg: PersonNarrow(
        nlp, **cfg)
    return load_model_from_init_py(__file__, **overrides)
Exemplo n.º 6
0
def load(**overrides):
    model = load_model_from_init_py(__file__, **overrides)
    lemmatizer = PolishLemmatizer()
    model.tagger.vocab.morphology.lemmatizer = lemmatizer
    # loading our custom, lookup-based lemmatizer
    return model
Exemplo n.º 7
0
def load(**overrides):
    Language.factories[
        'combined_rule_sentence_segmenter'] = lambda nlp, **cfg: combined_rule_sentence_segmenter
    nlp = load_model_from_init_py(__file__, **overrides)
    return nlp