Example #1
0
def nao_valid(queue, model):
    inputs = []
    targets = []
    predictions = []
    archs = []
    with torch.no_grad():
        model.eval()
        for step, sample in enumerate(queue):
            encoder_input = sample['encoder_input']
            encoder_target = sample['encoder_target']
            decoder_target = sample['decoder_target']

            encoder_input = encoder_input.cuda()
            encoder_target = encoder_target.cuda()
            decoder_target = decoder_target.cuda()

            predict_value, logits, arch = model(encoder_input)
            n = encoder_input.size(0)
            inputs += encoder_input.data.squeeze().tolist()
            targets += encoder_target.data.squeeze().tolist()
            predictions += predict_value.data.squeeze().tolist()
            archs += arch.data.squeeze().tolist()
    pa = utils.pairwise_accuracy(targets, predictions)
    hd = utils.hamming_distance(inputs, archs)
    return pa, hd
Example #2
0
def nao_valid(queue, model):
    pa = utils.AvgrageMeter()
    hs = utils.AvgrageMeter()
    with torch.no_grad():
        model.eval()
        for step, sample in enumerate(queue):
            encoder_input = sample['encoder_input']
            encoder_target = sample['encoder_target']
            decoder_target = sample['decoder_target']

            encoder_input = encoder_input.cuda()
            encoder_target = encoder_target.cuda()
            decoder_target = decoder_target.cuda()

            predict_value, logits, arch = model(encoder_input)
            n = encoder_input.size(0)
            pairwise_acc = utils.pairwise_accuracy(
                encoder_target.data.squeeze().tolist(),
                predict_value.data.squeeze().tolist())
            hamming_dis = utils.hamming_distance(
                decoder_target.data.squeeze().tolist(),
                arch.data.squeeze().tolist())
            pa.update(pairwise_acc, n)
            hs.update(hamming_dis, n)
    return pa.avg, hs.avg
Example #3
0
def nao_valid(queue, model):
    pa = utils.AvgMeter()
    hs = utils.AvgMeter()
    mse = utils.AvgMeter()

    for step, sample in enumerate(queue):
        encoder_input = sample['encoder_input']
        encoder_target = sample['encoder_target']
        decoder_target = sample['decoder_target']

        predict_value, logits, arch = model(encoder_input)
        n = encoder_input.shape[0]
        pairwise_acc = utils.pairwise_accuracy(encoder_target.data.squeeze().tolist(),
                                               predict_value.data.squeeze().tolist())
        hamming_dis = utils.hamming_distance(decoder_target.data.squeeze().tolist(), arch.data.squeeze().tolist())
        mse.update(keras.losses.MSE(encoder_target.data.squeeze(), predict_value.data.squeeze()), n)
        pa.update(pairwise_acc, n)
        hs.update(hamming_dis, n)

    return mse.avg, pa.avg, hs.avg