Example #1
0
    transforms.Resize((360, 480)),
    transforms.ToTensor(),
    transforms.Normalize(mean=[0.4372, 0.4372, 0.4373],
                         std=[0.2479, 0.2475, 0.2485])
])

datasets_test = KeyPointDatasets(root_dir="./data", transforms=transforms_test)

dataloader_test = DataLoader(datasets_test,
                             batch_size=4,
                             shuffle=True,
                             collate_fn=datasets_test.collect_fn)

model = KeyPointModel()

model.load_state_dict(torch.load("weights/epoch_290_0.232.pt"))

img_list = glob.glob(os.path.join("./data/images", "*.jpg"))

save_path = "./output"

img_tensor_list = []
img_name_list = []

for i in range(len(img_list)):
    img_path = img_list[i]
    img_name = os.path.basename(img_path)
    img_name_list.append(img_name)

    img = cv2.imread(img_path)
    img_tensor = transforms_test(img)
Example #2
0
        transforms.ToPILImage(),
        transforms.Resize((360, 480)),
        transforms.ToTensor(),
        transforms.Normalize(mean=[0.4372, 0.4372, 0.4373],
                             std=[0.2479, 0.2475, 0.2485])
    ])

    dataset = KeyPointDatasets(root_dir="./data", transforms=transforms_all)

    dataloader = DataLoader(dataset,
                            shuffle=True,
                            batch_size=1,
                            collate_fn=dataset.collect_fn)

    model = KeyPointModel()
    model.load_state_dict(torch.load(args.model))

    for iter, (image, label) in enumerate(dataloader):
        # print(image.shape)
        bs = image.shape[0]
        hm = model(image)

        hm = _nms(hm)

        scores, inds, clses, ys, xs = _topk(hm, K=1)

        print(scores, '\n', inds, '\n', clses, '\n', ys, '\n', xs)

        hm = hm.detach().numpy()

        for i in range(bs):