Ejemplo n.º 1
0
    '3_6', '3_7', '4_1', '4_2', '4_3', '4_4', '4_5', '4_6', '4_7', '5_1',
    '5_2', '5_3', '5_4', '5_5', '5_6', '5_7', '6_1', '6_2', '6_3', '6_4',
    '6_5', '6_6', '6_7', '7_1', '7_2', '7_3', '7_4', '7_5', '7_6', '7_7'
]

transform_test = T.Compose([
    T.Resize([224, 224]),
    T.ToTensor(),
    T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])

model_name = 'AlexNet'
model_path = './models/1_AlexNet_0.18444_14.pth'

model = Baseline(model='test', model_name=model_name)
model.load_param(model_path)
model = model.cuda()
model = model.eval()

records = open('./faces_224/anns/val_ld.txt').read().strip().split('\n')

result_file = open("predictions.txt", 'w')
with torch.no_grad():
    for rec in records:
        rec = rec.strip('\n').split()
        img_path = rec[0]

        landmark = rec[1:]
        landmark = np.array(list(map(float, landmark)), dtype=np.float32)
        landmark = torch.tensor(landmark, dtype=torch.float32).unsqueeze(0)
Ejemplo n.º 2
0
    '6_AlexNet_0.23467_18.pth', '7_AlexNet_0.25146_19.pth',
    '8_AlexNet_0.18022_7.pth', '9_AlexNet_0.19684_34.pth'
]
model = Baseline(model='test', model_name=model_name)

test_data = TestDataset('./faces_224/anns/test_ld.txt')
test_loader = DataLoader(dataset=test_data,
                         batch_size=48,
                         shuffle=False,
                         num_workers=2,
                         collate_fn=test_collate)

for train_model in model_paths:
    print('-------------------model name: {}-------------------'.format(
        train_model))
    model.load_param('models/' + train_model)
    model = model.cuda()
    model = model.eval()

    result = defaultdict(list)
    with torch.no_grad():
        for img, lms in test_loader:
            img = img.cuda()
            lms = lms.cuda()

            prds, _ = model(img, lms)
            prds = F.softmax(prds)
            prds = prds.cpu().numpy()

            for pred in prds:
                pred = list(pred)