예제 #1
0
                                          eval_model_fn,
                                          loss_fn,
                                          optimizer,
                                          dataloader,
                                          batch_preprocessing_fn,
                                          log_interval=1)
    test_loss[epoch - 1] = process_epoch('test',
                                         epoch,
                                         eval_model_fn,
                                         loss_fn,
                                         optimizer,
                                         dataloader_test,
                                         batch_preprocessing_fn,
                                         log_interval=1)

    # remember best loss
    is_best = test_loss[epoch - 1] < best_test_loss
    best_test_loss = min(test_loss[epoch - 1], best_test_loss)
    save_checkpoint(
        {
            'epoch': epoch,
            'args': args,
            'state_dict': model.state_dict(),
            'best_test_loss': best_test_loss,
            'optimizer': optimizer.state_dict(),
            'train_loss': train_loss,
            'test_loss': test_loss,
        }, is_best, checkpoint_name)

print('Done!')
예제 #2
0
파일: train.py 프로젝트: yang620/ncnet
    )
    test_loss[epoch - 1] = process_epoch(
        "test",
        epoch,
        model,
        loss_fn,
        optimizer,
        dataloader_test,
        batch_preprocessing_fn=None,
        log_interval=1,
    )

    # remember best loss
    is_best = test_loss[epoch - 1] < best_test_loss
    best_test_loss = min(test_loss[epoch - 1], best_test_loss)
    save_checkpoint(
        {
            "epoch": epoch,
            "args": args,
            "state_dict": model.state_dict(),
            "best_test_loss": best_test_loss,
            "optimizer": optimizer.state_dict(),
            "train_loss": train_loss,
            "test_loss": test_loss,
        },
        is_best,
        checkpoint_name,
    )

print("Done!")