Esempio n. 1
0
def _core_parsers(klearner, unique_real_root=True):
    """Our basic parser configurations
    """
    # joint
    if ((not klearner.attach.payload.can_predict_proba or
         not klearner.label.payload.can_predict_proba)):
        joint = []
    else:
        joint = [
            mk_joint(klearner, d) for d in [
                # decoder_last(),
                # DECODER_LOCAL,
                # decoder_mst(),
                Keyed('eisner',
                      EisnerDecoder(unique_real_root=unique_real_root,
                                    use_prob=True)),
            ]
        ]

    # postlabeling
    use_prob = klearner.attach.payload.can_predict_proba
    post = [
        mk_post(klearner, d) for d in [
            # decoder_last() ,
            # DECODER_LOCAL,
            # decoder_mst(),
            Keyed('eisner',
                  EisnerDecoder(unique_real_root=unique_real_root,
                                use_prob=use_prob)),
        ]
    ]

    return joint + post
Esempio n. 2
0
def attach_learner_dp_struct_perc(decoder):
    "structured perceptron learning"
    learner = StructuredPerceptron(decoder,
                                   n_iter=STRUC_N_ITER,
                                   verbose=VERBOSE,
                                   cost=STRUC_COST,
                                   average=STRUC_AVG,
                                   use_prob=STRUC_USE_PROB)
    return Keyed('dp-struct-perc', learner)
Esempio n. 3
0
def label_learner_dp_perc():
    "return a keyed instance of perceptron learner"
    return Keyed(
        'dp-perc',
        SklearnLabelClassifier(
            Perceptron(n_iter=LOCAL_N_ITER,
                       verbose=VERBOSE,
                       average=LOCAL_AVG,
                       use_prob=LOCAL_USE_PROB)))
Esempio n. 4
0
def attach_learner_dp_pa():
    "return a keyed instance of passive aggressive learner"
    return Keyed(
        'dp-pa',
        SklearnAttachClassifier(
            PassiveAggressive(C=LOCAL_C,
                              n_iter=LOCAL_N_ITER,
                              verbose=VERBOSE,
                              average=LOCAL_AVG,
                              use_prob=LOCAL_USE_PROB)))
Esempio n. 5
0
def attach_learner_dp_struct_pa(decoder):
    "structured passive-aggressive learning"
    learner = StructuredPassiveAggressive(decoder,
                                          C=STRUC_C,
                                          n_iter=STRUC_N_ITER,
                                          verbose=VERBOSE,
                                          loss=STRUC_LOSS,
                                          cost=STRUC_COST,
                                          average=STRUC_AVG,
                                          use_prob=STRUC_USE_PROB)
    return Keyed('dp-struct-pa', learner)
Esempio n. 6
0
def _mk_last_intras(klearner, kconf):
    """Intra/inter parsers based on a single core parser
    and the last baseline
    """
    kconf = Keyed(key=combined_key('last', kconf), payload=kconf.payload)
    econf_last = mk_post(klearner, decoder_last())
    return [
        combine_intra(IntraInterPair(intra=econf_last, inter=p),
                      kconf,
                      primary='inter') for p in _core_parsers(klearner)
    ]
Esempio n. 7
0
def mk_joint(klearner, kdecoder):
    "return a joint decoding parser config"
    settings = _core_settings('AD.L-jnt', klearner)
    parser_key = combined_key(settings, kdecoder)
    key = combined_key(klearner, parser_key)
    parser = JointPipeline(learner_attach=klearner.attach.payload,
                           learner_label=klearner.label.payload,
                           decoder=kdecoder.payload)
    return EvaluationConfig(key=key,
                            settings=settings,
                            learner=klearner,
                            parser=Keyed(parser_key, parser))
Esempio n. 8
0
def mk_post(klearner, kdecoder):
    "return a post label parser"
    settings = _core_settings('AD.L-pst', klearner)
    parser_key = combined_key(settings, kdecoder)
    key = combined_key(klearner, parser_key)
    parser = PostlabelPipeline(learner_attach=klearner.attach.payload,
                               learner_label=klearner.label.payload,
                               decoder=kdecoder.payload)
    return EvaluationConfig(key=key,
                            settings=settings,
                            learner=klearner,
                            parser=Keyed(parser_key, parser))
Esempio n. 9
0
def _mk_last_intras(klearner, kconf):
    """Parsers using "last" for intra and a core decoder for inter.
    """
    if ((not klearner.attach.payload.can_predict_proba or
         not klearner.label.payload.can_predict_proba)):
        return []

    kconf = Keyed(key=combined_key('last', kconf),
                  payload=kconf.payload)
    econf_last = mk_joint(klearner, decoder_last())
    parsers = [IntraInterPair(intra=econf_last, inter=y) for y in
               _core_parsers(klearner)]
    return [combine_intra(p, kconf, primary='inter') for p in parsers]
Esempio n. 10
0
def mk_bypass(klearner, kdecoder):
    """ Return a bypass decoder config

    Used if the decoder itself also labels the pairs """
    settings = _core_settings('AD.L-byp', klearner)
    parser_key = combined_key(settings, kdecoder)
    key = combined_key(klearner, parser_key)
    steps = [
        ('attach weights', AttachClassifierWrapper(klearner.attach.payload)),
        ('label weights', LabelClassifierWrapper(klearner.label.payload)),
        ('decode', kdecoder.payload),
    ]
    parser = Pipeline(steps=steps)
    return EvaluationConfig(key=key,
                            settings=settings,
                            learner=klearner,
                            parser=Keyed(parser_key, parser))
Esempio n. 11
0
def combine_intra(econfs, kconf, primary='intra', verbose=False):
    """Combine a pair of EvaluationConfig into a single IntraInterParser

    Parameters
    ----------
    econfs: IntraInterPair(EvaluationConfig)

    kconf: Keyed(parser constructor)

    primary: ['intra', 'inter']
        Treat the intra/inter config as the primary one for the key
    verbose: boolean, optional
        Verbosity of the intra/inter parser

    Returns
    -------
    econf : EvaluationConfig
        Evaluation configuration for the IntraInterParser.
    """
    if primary == 'intra':
        econf = econfs.intra
    elif primary == 'inter':
        econf = econfs.inter
    else:
        raise ValueError("'primary' should be one of intra/inter: " + primary)

    parsers = econfs.fmap(lambda e: e.parser.payload)
    subsettings = econfs.fmap(lambda e: e.settings)
    learners = econfs.fmap(lambda e: e.learner)
    settings = Settings(key=combined_key(kconf, econf.settings),
                        intra=True,
                        oracle=econf.settings.oracle,
                        children=subsettings)
    iiparser_type, sel_inter = kconf.payload
    kparser = Keyed(
        combined_key(kconf, econf.parser),
        iiparser_type(parsers, sel_inter=sel_inter, verbose=verbose))
    if learners.intra.key == learners.inter.key:
        learner_key = learners.intra.key
    else:
        learner_key = '{}S_D{}'.format(learners.intra.key, learners.inter.key)
    return EvaluationConfig(key=combined_key(learner_key, kparser),
                            settings=settings,
                            learner=learners,
                            parser=kparser)
Esempio n. 12
0
def _evaluations():
    "the evaluations we want to run"
    # non-prob mst decoder (dp learners don't do probs)
    nonprob_mst = Keyed('', MstDecoder(MstRootStrategy.fake_root, False))
    nonprob_mst = tc_decoder(nonprob_mst)
    nonprob_mst = nonprob_mst.payload
    #
    learners = []
    learners.extend(_LOCAL_LEARNERS)
    learners.extend(l(nonprob_mst) for l in _STRUCTURED_LEARNERS)
    ipairs = list(itr.product(learners, _INTRA_INTER_CONFIGS))
    res = concat_l([
        concat_l(_core_parsers(l) for l in learners),
        concat_l(_mk_basic_intras(l, x) for l, x in ipairs),
        concat_l(_mk_sorc_intras(l, x) for l, x in ipairs),
        concat_l(_mk_dorc_intras(l, x) for l, x in ipairs),
        concat_l(_mk_last_intras(l, x) for l, x in ipairs),
    ])
    return [x for x in res if not _is_junk(x)]
Esempio n. 13
0
def tc_learner(klearner):
    "turn constrained version of a learner"
    return Keyed(key='tc-' + klearner.key,
                 payload=TC_LearnerWrapper(klearner.payload))
Esempio n. 14
0
def attach_learner_rndforest():
    "return a keyed instance of random forest learner"
    return Keyed('rndforest',
                 SklearnAttachClassifier(RandomForestClassifier()))
Esempio n. 15
0
def label_learner_rndforest():
    "return a keyed instance of decision tree learner"
    return Keyed('rndforest', SklearnLabelClassifier(RandomForestClassifier()))
Esempio n. 16
0
def attach_learner_dectree():
    "return a keyed instance of decision tree learner"
    return Keyed('dectree', SklearnAttachClassifier(DecisionTreeClassifier()))
Esempio n. 17
0
def label_learner_dectree():
    "return a keyed instance of decision tree learner"
    return Keyed('dectree', SklearnLabelClassifier(DecisionTreeClassifier()))
Esempio n. 18
0
def attach_learner_maxent():
    "return a keyed instance of maxent learner"
    return Keyed('maxent', SklearnAttachClassifier(LogisticRegression()))
Esempio n. 19
0
def label_learner_maxent():
    "return a keyed instance of maxent learner"
    return Keyed('maxent', SklearnLabelClassifier(LogisticRegression()))
Esempio n. 20
0
def label_learner_perc():
    "return a keyed instance of perceptron learner"
    learner = sk.Perceptron(n_iter=LOCAL_N_ITER,
                            class_weight=LOCAL_CLASS_WEIGHT)
    return Keyed('perc', SklearnLabelClassifier(learner))
Esempio n. 21
0
def attach_learner_dp_struct_perc(decoder):
    "structured perceptron learning"
    learner = StructuredPerceptron(decoder, STRUCT_PERC_ARGS)
    return Keyed('dp-struct-perc', learner)
Esempio n. 22
0
def label_learner_dp_perc():
    "return a keyed instance of perceptron learner"
    return Keyed('dp-perc',
                 SklearnLabelClassifier(Perceptron(LOCAL_PERC_ARGS)))
Esempio n. 23
0
def label_learner_perc():
    "return a keyed instance of perceptron learner"
    learner = sk.Perceptron(n_iter=LOCAL_PERC_ARGS.iterations)
    return Keyed('perc', SklearnLabelClassifier(learner))
Esempio n. 24
0
def label_learner_pa():
    "return a keyed instance of passive aggressive learner"
    learner = sk.PassiveAggressiveClassifier(C=LOCAL_C,
                                             n_iter=LOCAL_N_ITER,
                                             class_weight=LOCAL_CLASS_WEIGHT)
    return Keyed('pa', SklearnLabelClassifier(learner))
Esempio n. 25
0
def label_learner_pa():
    "return a keyed instance of passive aggressive learner"
    learner = sk.PassiveAggressiveClassifier(n_iter=LOCAL_PA_ARGS.iterations)
    return Keyed('pa', SklearnLabelClassifier(learner))
Esempio n. 26
0
            mk_bypass(klearner, decoder_ilp()),
            mk_bypass(klearner, tc_decoder(decoder_ilp())),
        ]
    else:
        # you need to install SCIP and provide the path to its
        # binaries in SCIP_BIN_DIR in ilp.py
        bypass = []

    if klearner.attach.payload.can_predict_proba:
        return joint + post + bypass
    else:
        return post


_INTRA_INTER_CONFIGS = [
    Keyed('iheads', HeadToHeadParser),
    # Keyed('ionly', SentOnlyParser),
    Keyed('isoft', SoftParser),
]

# -------------------------------------------------------------------------------
# maybe less to edit below but still worth having a glance
# -------------------------------------------------------------------------------

HARNESS_NAME = 'irit-stac'


def _mk_basic_intras(klearner, kconf):
    """Intra/inter parser based on a single core parser
    """
    return [
Esempio n. 27
0
def label_learner_dp_pa():
    "return a keyed instance of passive aggressive learner"
    return Keyed('dp-pa',
                 SklearnLabelClassifier(PassiveAggressive(LOCAL_PA_ARGS)))
Esempio n. 28
0
def decoder_mst():
    "our instantiation of the mst decoder"
    return Keyed('mst', MstDecoder(MstRootStrategy.fake_root, True))
Esempio n. 29
0
def attach_learner_dp_struct_pa(decoder):
    "structured passive-aggressive learning"
    learner = StructuredPassiveAggressive(decoder, STRUCT_PA_ARGS)
    return Keyed('dp-struct-pa', learner)
Esempio n. 30
0
def decoder_ilp():
    return Keyed('ilp', ILPDecoder())