コード例 #1
0
ファイル: weibo_iarnet_main.py プロジェクト: serryuer/IARNet
            no_decay = ['bias', 'LayerNorm.weight']
            optimizer_grouped_parameters = [
                {'params': [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
                 'weight_decay': 0.0},
                {'params': [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)],
                 'weight_decay': 0.0}
            ]

            optimizer = AdamW(optimizer_grouped_parameters, lr=2e-5, eps=1e-8)
            crit = torch.nn.CrossEntropyLoss()

            trainer = Train(model_name=model_name,
                            train_loader=train_loader,
                            val_loader=val_loader,
                            test_loader=test_loader,
                            model=model,
                            optimizer=optimizer,
                            loss_fn=crit,
                            epochs=12,
                            print_step=1,
                            early_stop_patience=3,
                            # save_model_path=f"./save_model/{params['model_name']}",
                            save_model_path=f"/sdd/yujunshuai/save_model/{model_name}",
                            save_model_every_epoch=False,
                            metric=accuracy_score,
                            num_class=2,
                            # tensorboard_path='./tensorboard_log')
                            tensorboard_path='/sdd/yujunshuai/tensorboard_log')
            print(trainer.train())
            print(trainer.test())
コード例 #2
0
ファイル: hgcn_main.py プロジェクト: serryuer/qa_4_gaokao
        'weight_decay':
        0.0
    }]

    optimizer = AdamW(optimizer_grouped_parameters, lr=args.lr, eps=1e-8)

    trainer = Train(
        model_name=args.model_name,
        train_loader=train_loader,
        test_loader=test_loader,
        device=args.device,
        model=model,
        optimizer=optimizer,
        epochs=args.epochs,
        print_step=1,
        early_stop_patience=args.early_stop_patience,
        save_model_path=f"./experiments/save_model/{args.model_name}",
        save_model_every_epoch=False,
        metric=accuracy_score,
        num_class=2,
        tensorboard_path=f'./experiments/tensorboard_log/{args.model_name}')

    if args.test and args.best_model_path:
        print(
            trainer.test(test_loader,
                         './data/processed_modified_test_data.json',
                         './data/processed_modified_test_data_result.json',
                         args.best_model_path))
    else:
        print(trainer.train())
コード例 #3
0
    optimizer_grouped_parameters = [
        {'params': [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
         'weight_decay': 0.0},
        {'params': [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
    ]

    optimizer = AdamW(optimizer_grouped_parameters, lr=2e-5, eps=1e-8)
    crit = torch.nn.CrossEntropyLoss()

    trainer = Train(model_name='weibo_bert_cat',
                    train_loader=train_loader,
                    val_loader=val_loader,
                    test_loader=test_loader,
                    model=model,
                    optimizer=optimizer,
                    loss_fn=crit,
                    epochs=10,
                    print_step=1,
                    early_stop_patience=3,
                    # save_model_path=f"./save_model/{params['model_name']}",
                    save_model_path=f"/sdd/yujunshuai/save_model/weibo_bert_cat",
                    save_model_every_epoch=False,
                    metric=accuracy_score,
                    num_class=2,
                    # tensorboard_path='./tensorboard_log')
                    tensorboard_path='/sdd/yujunshuai/tensorboard_log')
    trainer.train()
    trainer.test()