Пример #1
0
        cfg.logger.debug('Pre-trained language model: {}'.format(
            cfg.pretrained_lm_dir))
    else:
        cfg.logger.debug('Pre-trained language model: initial training')

    # Training
    language_model = LanguageModel(wv_dict, cfg.hidden_dim).to(cfg.device)
    criterion = nn.NLLLoss()
    optimizer = optim.Adam(language_model.parameters(), lr=cfg.lr)
    schedular = optim.lr_scheduler.ReduceLROnPlateau(optimizer,
                                                     mode='min',
                                                     factor=cfg.sch_factor,
                                                     patience=cfg.sch_patience,
                                                     verbose=cfg.sch_verbose)
    if cfg.pretrained_lm_dir:
        lm_loading_res = language_model.load_state_dict(
            torch.load(cfg.pretrained_lm_dir))
        cfg.logger.debug('Loading language model: {}'.format(lm_loading_res))

    train_losses, train_accs = [], []  # losses & accuracies to save
    if cfg.test_ratio > 0:
        test_losses, test_accs = [], []

    cfg.logger.info('Training.')
    for epoch in range(1, cfg.num_epochs + 1):
        train_losses_, train_accs_ = train_dis_epoch(epoch, language_model,
                                                     train_loader, criterion,
                                                     optimizer)
        train_losses += train_losses_
        train_accs += train_accs_

        if cfg.test_ratio > 0: