Esempio n. 1
0
    set_random_seed(_env['seed'])

    project_name = _root.split("/")[-1]
    run_name = (f"{_model['name']}_{_model['size']}-"
                f"lr_{_training['lr']}-bsz_{_training['batch_size']}-"
                f"seed_{_env['seed']}")
    now = datetime.now().strftime('%Y-%m-%d_%Hh%Mm%Ss')

    tokenizer = get_tokenizer(_model['name'], _model['size'])

    train_dataset = CustomDataset(_root, 'train', tokenizer, _training["max_len"])
    dev_dataset = CustomDataset(_root, 'dev', tokenizer, _training["max_len"])

    Model = get_model_class(_model['name'])
    Opt = get_optim_class(_model['opt'])
    Loss_fn = get_loss_fn_class(_model['loss'])
    model = Model(n_outputs=train_dataset.n_outputs, size=_model['size'],
                  pretrained_model_path=str2bool(_model['pretrained_model_path']))

    metric_dic = {
        "acc": Accuracy(),
        "precision": Precision()
    }
    callbacks = [
        ModelCheckpoint(f"{_save_model_root}/{run_name}.pth", monitor='dev_loss', mode="min")
    ]

    trainer = Trainer(model=model, loss_fn_class=Loss_fn, optimizer_class=Opt, metrics=metric_dic)
    trainer.fit(train_dataset, dev_dataset, lr=_training['lr'], epochs=_training['epochs'],
                batch_size=_training['batch_size'], callbacks=callbacks)
Esempio n. 2
0
    """ load model """
    model = prepare_model(args)
    model.cuda()

    """ define loss """
    criterion = nn.CrossEntropyLoss()

    """ setup metrics """
    metric = MeanIOUScore(9)

    """ setup optimizer """
    optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.weight_decay)

    """ setup tensorboard """
    writer = SummaryWriter(os.path.join(args.save_dir, "train_info"))

    """ setup trainer """
    trainer = Trainer(
        model,
        optimizer,
        criterion,
        args.accumulate_gradient,
        train_loader,
        val_loader,
        writer,
        metric,
        args.save_dir,
    )

    trainer.fit(args.epochs)