Exemple #1
0
def train(path):
    # Trainer
    trainer = Trainer(**train_args.data)

    def _define_optim(obj):
        lr = optim_args.data['lr']
        embed_params = set(obj._model.word_embedding.parameters())
        decay_params = set(obj._model.arc_predictor.parameters()) | set(
            obj._model.label_predictor.parameters())
        params = [
            p for p in obj._model.parameters()
            if p not in decay_params and p not in embed_params
        ]
        obj._optimizer = torch.optim.Adam([{
            'params': list(embed_params),
            'lr': lr * 0.1
        }, {
            'params': list(decay_params),
            **optim_args.data
        }, {
            'params': params
        }],
                                          lr=lr,
                                          betas=(0.9, 0.9))
        obj._scheduler = torch.optim.lr_scheduler.LambdaLR(
            obj._optimizer, lambda ep: max(.75**(ep / 5e4), 0.05))

    def _update(obj):
        # torch.nn.utils.clip_grad_norm_(obj._model.parameters(), 5.0)
        obj._scheduler.step()
        obj._optimizer.step()

    trainer.define_optimizer = lambda: _define_optim(trainer)
    trainer.update = lambda: _update(trainer)
    trainer.set_validator(
        Tester(**test_args.data, evaluator=ParserEvaluator(ignore_label)))

    model.word_embedding = torch.nn.Embedding.from_pretrained(embed,
                                                              freeze=False)
    model.word_embedding.padding_idx = word_v.padding_idx
    model.word_embedding.weight.data[word_v.padding_idx].fill_(0)
    model.pos_embedding.padding_idx = pos_v.padding_idx
    model.pos_embedding.weight.data[pos_v.padding_idx].fill_(0)

    # try:
    #     ModelLoader.load_pytorch(model, "./save/saved_model.pkl")
    #     print('model parameter loaded!')
    # except Exception as _:
    #     print("No saved model. Continue.")
    #     pass

    # Start training
    trainer.train(model, train_data, dev_data)
    print("Training finished!")

    # Saver
    saver = ModelSaver("./save/saved_model.pkl")
    saver.save_pytorch(model)
    print("Model saved!")
Exemple #2
0
def train():
    # Trainer
    trainer = Trainer(**train_args.data)

    def _define_optim(obj):
        obj._optimizer = torch.optim.Adam(obj._model.parameters(),
                                          **optim_args.data)
        obj._scheduler = torch.optim.lr_scheduler.LambdaLR(
            obj._optimizer, lambda ep: .75**(ep / 5e4))

    def _update(obj):
        obj._scheduler.step()
        obj._optimizer.step()

    trainer.define_optimizer = lambda: _define_optim(trainer)
    trainer.update = lambda: _update(trainer)
    trainer.get_loss = lambda predict, truth: trainer._loss_func(
        **predict, **truth)
    trainer._create_validator = lambda x: MyTester(**test_args.data)

    # Model
    model = BiaffineParser(**model_args.data)

    # use pretrain embedding
    embed, _ = EmbedLoader.load_embedding(
        model_args['word_emb_dim'], emb_file_name, 'glove', word_v,
        os.path.join(processed_datadir, 'word_emb.pkl'))
    model.word_embedding = torch.nn.Embedding.from_pretrained(embed,
                                                              freeze=False)
    model.word_embedding.padding_idx = word_v.padding_idx
    model.word_embedding.weight.data[word_v.padding_idx].fill_(0)
    model.pos_embedding.padding_idx = pos_v.padding_idx
    model.pos_embedding.weight.data[pos_v.padding_idx].fill_(0)

    try:
        ModelLoader.load_pytorch(model, "./save/saved_model.pkl")
        print('model parameter loaded!')
    except Exception as _:
        print("No saved model. Continue.")
        pass

    # Start training
    trainer.train(model, train_data, dev_data)
    print("Training finished!")

    # Saver
    saver = ModelSaver("./save/saved_model.pkl")
    saver.save_pytorch(model)
    print("Model saved!")