exit(1208)
    print_info('{}init pram{}'.format('*' * 15, '*' * 15))

loss = LossInForward()
encoding_type = 'bmeso'
f1_metric = SpanFPreRecMetric(vocabs['label'],
                              pred='pred',
                              target='target',
                              seq_len='seq_len',
                              encoding_type=encoding_type)
acc_metric = AccuracyMetric(
    pred='pred',
    target='target',
    seq_len='seq_len',
)
acc_metric.set_metric_name('label_acc')
metrics = [f1_metric, acc_metric]
if args.self_supervised:
    chars_acc_metric = AccuracyMetric(pred='chars_pred',
                                      target='chars_target',
                                      seq_len='seq_len')
    chars_acc_metric.set_metric_name('chars_acc')
    metrics.append(chars_acc_metric)

if args.see_param:
    for n, p in model.named_parameters():
        print_info('{}:{}'.format(n, p.size()))
    print_info('see_param mode: finish')
    if not args.debug:
        exit(1208)
datasets['train'].apply
Exemplo n.º 2
0
                elif args.init == 'norm':
                    logging.info('xavier norm init : {}'.format(n))
                    nn.init.xavier_normal_(p)
            except:
                logging.info(n)
                exit(1208)
    logging.info('{}init pram{}\n'.format('*' * 15, '*' * 15))

loss = LossInForward()
f1_metric = SpanFPreRecMetric(vocabs['label'],
                              pred='pred',
                              target='target',
                              seq_len='seq_len',
                              encoding_type='bio')
acc_metric = AccuracyMetric(pred='pred', target='target', seq_len='seq_len')
acc_metric.set_metric_name('label_acc')
metrics = [f1_metric, acc_metric]

# if args.self_supervised:
#     chars_acc_metric = AccuracyMetric(pred='chars_pred', target='chars_target', seq_len='seq_len')
#     chars_acc_metric.set_metric_name('chars_acc')
#     metrics.append(chars_acc_metric)

embedding_param = list(model.bigram_embed.parameters()) + list(
    model.lattice_embed.parameters())
embedding_param_ids = list(map(id, embedding_param))
non_embedding_param = list(
    filter(lambda x: id(x) not in embedding_param_ids, model.parameters()))

param_ = [{
    'params': non_embedding_param