Exemple #1
0
 def __init__(self):
     is_pair = True
     class_labels = ['not_entailment', 'entailment']
     metric = CompositeEvalMetric()
     metric.add(Accuracy())
     super(AXgTask, self).__init__(class_labels,
                                   metric, is_pair, output_format="jsonl")
Exemple #2
0
 def __init__(self):
     is_pair = True
     class_labels = ['0', '1']
     metric = CompositeEvalMetric()
     metric.add(F1())
     metric.add(Accuracy())
     super(QQPTask, self).__init__(class_labels, metric, is_pair)
Exemple #3
0
 def __init__(self):
     is_pair = True
     class_labels = ['0', '1']
     metric = CompositeEvalMetric()
     metric.add(F1(average='micro'))
     super(MultiRCTask, self).__init__(class_labels,
                                       metric, is_pair, output_format="jsonl")
Exemple #4
0
def train():
    if pretrain_weight_path is not None:
        net_args, net_auxs = load_params_from_file(pretrain_weight_path)
    else:
        net_args, net_auxs = None, None
    to_model = osp.join(model_save_dir, '{}_ep'.format('FCN'))
    mod = get_module()
    opt = get_optimizer()

    dataiter = CustomIter(data_lst=data_list,
                          dataset='pascal',
                          data_root=data_root,
                          batch_size=batch_size,
                          crop_h=input_h,
                          crop_w=input_w,
                          label_stride=8,
                          sampler='random')
    custom_eval = CompositeEvalMetric()
    custom_eval.add(Custom_Accuracy())
    dataiter.reset()
    mod.fit(
        dataiter,
        eval_metric=custom_eval,
        batch_end_callback=mx.callback.Speedometer(batch_size, 1),
        epoch_end_callback=mx.callback.do_checkpoint(to_model),
        kvstore='local',
        begin_epoch=from_epoch,
        num_epoch=num_epochs,
        optimizer=opt,
        initializer=mx.init.Xavier(),
        arg_params=net_args,
        aux_params=net_auxs,
        allow_missing=True,
    )
Exemple #5
0
 def __init__(self):
     is_pair = True
     class_labels = ['0', '1']
     metric = CompositeEvalMetric()
     metric.add(F1())
     metric.add(Accuracy())
     super(ReCoRDTask, self).__init__(class_labels,
                                      metric, is_pair, output_format="jsonl")
Exemple #6
0
 def __init__(self, *args, **kwargs):  # passthrough arguments to TSVDataset
     # (filename, field_separator=nlp.data.Splitter(','), num_discard_samples=1, field_indices=[2,1])
     self.args = args
     self.kwargs = kwargs
     is_pair = False
     class_labels = ['0', '1']
     metric = CompositeEvalMetric()
     metric.add(F1())
     metric.add(Accuracy())
     super(TSVClassificationTask, self).__init__(class_labels, metric,
                                                 is_pair)
     dataset = nlp.data.TSVDataset(*self.args, **self.kwargs)
     # do the split
     train_sampler, val_sampler = get_split_samplers(dataset,
                                                     split_ratio=0.8)
     self.trainset = SampledDataset(dataset, train_sampler)
     self.valset = SampledDataset(dataset, val_sampler)
Exemple #7
0
 def get_metric():
     """Get metrics Accuracy and F1"""
     metric = CompositeEvalMetric()
     for child_metric in [Accuracy(), F1()]:
         metric.add(child_metric)
     return metric
Exemple #8
0
        t_modules.append(t_module)

    s_module = KTModule(symbol=s_symbol,
                        context=devices,
                        logger=logger,
                        data_names=data_names,
                        data_shapes=data_shapes,
                        label_names=label_names,
                        label_shapes=label_shapes,
                        is_transfer=False,
                        teacher_module=t_modules)

    # eval_metric = MultiMetric(loss_types=args.eval_metric)
    eval_metric = CompositeEvalMetric()
    eval_metric.add(
        Accuracy(output_names=["softmax_output"],
                 label_names=["softmax_label"]))
    eval_metric.add(Loss(output_names=["lmnn_output"], label_names=[]))

    s_module.fit(train_data=train,
                 eval_metric=eval_metric,
                 kvstore=kv,
                 initializer=init,
                 optimizer=sgd,
                 num_epoch=args.num_epochs,
                 arg_params=s_arg_params,
                 aux_params=s_aux_params,
                 epoch_end_callback=epoch_end_callback,
                 batch_end_callback=batch_end_callback,
                 allow_missing=True)
Exemple #9
0
 def get_metric(cls):
     """Get metrics Accuracy and F1"""
     metric = CompositeEvalMetric()
     for child_metric in [Accuracy(), F1(average='micro')]:
         metric.add(child_metric)
     return metric