Ejemplo n.º 1
0
def load_model(config):
    device = torch.device("cuda")
    n_gpu = torch.cuda.device_count()
    infersent = load_infersent()
    infersent.to(device)
    if not config.data_sign == 'SST-2':
        model = NLINet(config)
    else:
        model = ClassificationNet(config)
    model.to(device)
    if config.mode == 'score' or config.mode == 'attack':
        model_dict_path = os.path.join(config.output_dir,
                                         "{}_{}.bin".format(config.data_sign, config.target_model))
        model.load_state_dict(torch.load(model_dict_path))
    optimizer = optim.Adam(model.parameters(), lr=config.learning_rate)

    if n_gpu > 1:
        model = torch.nn.DataParallel(model)
    return infersent, model, optimizer, device, n_gpu
Ejemplo n.º 2
0
print("================== Classifier =====================")
print(nli_net)
print("================== Encoder =====================")
print(nli_net.encoder)

# loss
weight = torch.FloatTensor(params.n_classes).fill_(1)
loss_fn = nn.CrossEntropyLoss(weight=weight)
loss_fn.size_average = False

# optimizer
optim_fn, optim_params = get_optimizer(params.optimizer)
optimizer = optim_fn(nli_net.parameters(), **optim_params)

# cuda by default
nli_net.to(device)
loss_fn.to(device)


"""
TRAIN
"""
val_acc_best = -1e10
adam_stop = False
stop_training = False
times_not_improve = 0
lr = optim_params['lr'] if 'sgd' in params.optimizer else None


def trainepoch(epoch):
    print('\nTRAINING : Epoch ' + str(epoch))
Ejemplo n.º 3
0
assert params.encoder_type in encoder_types, "encoder_type must be in " + \
                                             str(encoder_types)
nli_net = NLINet(config_nli_model)
print('\nmodel:\n', nli_net, '\n')

# loss
weight = torch.FloatTensor(params.n_classes).fill_(1)
loss_fn = nn.CrossEntropyLoss(weight=weight)
loss_fn.size_average = False

# optimizer
optim_fn, optim_params = get_optimizer(params.optimizer)
optimizer = optim_fn(nli_net.parameters(), **optim_params)

# cuda, if available
nli_net.to(DEVICE)
loss_fn.to(DEVICE)
"""
TRAIN
"""
val_acc_best = -1e10
adam_stop = False
stop_training = False
lr = optim_params['lr'] if 'sgd' in params.optimizer else None


def train_epoch(epoch, log):
    run_info.update_stats({'epoch': epoch})

    print('\nTRAINING : Epoch ' + str(epoch))
    nli_net.train()