Пример #1
0
            test_loss_, test_acc_ = test(language_model, test_loader,
                                         criterion)
            test_losses.append(test_loss_)
            test_accs.append(test_acc_)

            cfg.logger.debug(
                "[Epoch %d/%d] ----------------> [Test Loss: %f] [Test Acc: %f]"
                % (epoch, cfg.num_epochs, test_losses[-1], test_accs[-1]))
        else:
            cfg.logger.debug("-" * 74)

        schedular.step(train_losses[-1])

    # Save language model, losses and training accuracies
    cfg.logger.info('Saving language model.')
    torch.save(language_model.state_dict(), cfg.save_lm_dir)

    cfg.logger.info('Saving training losses.')
    saving_train_losses = pd.DataFrame({'Training Loss': train_losses})
    saving_train_losses.to_csv(cfg.save_tr_l_dir, index=False)

    cfg.logger.info('Saving training accuracies.')
    saving_train_accs = pd.DataFrame({'Training Accuracy': train_accs})
    saving_train_accs.to_csv(cfg.save_tr_a_dir, index=False)

    if cfg.test_ratio > 0:
        cfg.logger.info('Saving testing losses.')
        saving_test_losses = pd.DataFrame({'Testing Loss': test_losses})
        saving_test_losses.to_csv(cfg.save_tst_l_dir, index=False)

        cfg.logger.info('Saving testing accuracies.')