Ejemplo n.º 1
0
                                               params['trainer']['patience']):
        print("Stopping at epoch {:2d}".format(epoch_num))
        break
    save_checkpoint(
        model,
        optimizer,
        args.folder,
        epoch_num,
        val_metric_per_epoch,
        is_best=int(
            np.argmax(val_metric_per_epoch)) == (len(val_metric_per_epoch) -
                                                 1))

print("STOPPING. now running the best model on the validation set", flush=True)
# Load best
restore_best_checkpoint(model, args.folder)
model.eval()
val_probs = []
val_labels = []
for b, (time_per_batch, batch) in enumerate(time_batch(val_loader)):
    with torch.no_grad():
        batch = _to_gpu(batch)
        output_dict = model(**batch)
        val_probs.append(output_dict['label_probs'].detach().cpu().numpy())
        val_labels.append(batch['label'].detach().cpu().numpy())
val_labels = np.concatenate(val_labels, 0)
val_probs = np.concatenate(val_probs, 0)
acc = float(np.mean(val_labels == val_probs.argmax(1)))
print("Final val accuracy is {:.3f}".format(acc))
np.save(os.path.join(args.folder, f'valpreds.npy'), val_probs)
Ejemplo n.º 2
0
        is_best=int(
            np.argmax(val_metric_per_epoch)) == (len(val_metric_per_epoch) -
                                                 1),
        q_att1=q_att1,
        a_att1=a_att1,
        q_att2=q_att2,
        a_att2=a_att2)
    writer.add_scalar('val_loss', val_loss_avg, global_step=epoch_num)
    writer.add_scalar('val_accuracy',
                      val_metric_per_epoch[-1],
                      global_step=epoch_num)

print("STOPPING. now running the best model on the validation set", flush=True)
logger.write("STOPPING. now running the best model on the validation set")
# Load best
restore_best_checkpoint(model, folder)
model.eval()
val_probs = []
val_labels = []
for b, (time_per_batch, batch) in enumerate(time_batch(val_loader)):
    with torch.no_grad():
        batch = _to_gpu(batch)
        output_dict = model(**batch)
        val_probs.append(output_dict['label_probs'].detach().cpu().numpy())
        val_labels.append(batch['label'].detach().cpu().numpy())
val_labels = np.concatenate(val_labels, 0)
val_probs = np.concatenate(val_probs, 0)
acc = float(np.mean(val_labels == val_probs.argmax(1)))
print("Final val accuracy is {:.4f}".format(acc))
logger.write("Final val accuracy is {:.4f}".format(acc))
np.save(os.path.join(folder, f'valpreds.npy'), val_probs)