def visualize_shanghaitech_keepfull():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader_shanghaitech"
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_keepfull",
        visualize_mode=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    for i in range(10):
        img, label = next(train_loader_iter)
        save_img(img, os.path.join(saved_folder,
                                   "train_img" + str(i) + ".png"))
        save_density_map(
            label.numpy()[0][0],
            os.path.join(saved_folder, "train_label" + str(i) + ".png"))
def visualize_shanghaitech_nonoverlap_downsample():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.SHANGHAITECH_PATH_PART_B
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_non_overlap_downsample",
        visualize_mode=True)

    img, label = next(iter(train_loader))

    print(img.shape)
    save_img(img, os.path.join(saved_folder,
                               "overlap_downsample_loader_1.png"))
    save_density_map(
        label[0].numpy()[0],
        os.path.join(saved_folder,
                     "overlap_downsample_loader_with_p_density1.png"))

    print("count1 ", label.numpy()[0].sum())
    print("count2 ", label.numpy()[0].sum())
    print("count3 ", label.numpy()[0].sum())

    print("s1 ", label.shape)
def visualize_shanghaitech_pacnn_with_perspective():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.SHANGHAITECH_PATH_PART_A
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")
    train_loader_pacnn = torch.utils.data.DataLoader(ListDataset(
        train_list,
        shuffle=True,
        transform=transforms.Compose([transforms.ToTensor()]),
        train=True,
        batch_size=1,
        num_workers=4,
        dataset_name="shanghaitech_pacnn_with_perspective",
        debug=True),
                                                     batch_size=1,
                                                     num_workers=4)

    img, label = next(iter(train_loader_pacnn))

    print(img.shape)
    save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
    save_density_map(
        label[0].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density1.png"))
    save_density_map(
        label[1].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density2.png"))
    save_density_map(
        label[2].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density3.png"))
    save_density_map(label[3].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_p_s_4.png"))
    save_density_map(label[4].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_p_5.png"))
    print("count1 ", label[0].numpy()[0].sum())
    print("count2 ", label[1].numpy()[0].sum())
    print("count3 ", label[2].numpy()[0].sum())
    print("count4 ", label[3].numpy()[0].sum())
    print("count5 ", label[4].numpy()[0].sum())

    print("s1 ", label[0].shape)
    print("s2 ", label[1].shape)
    print("s3 ", label[2].shape)
    print("s4 ", label[3].shape)
    print("s5 ", label[4].shape)
Пример #4
0
def visualize_evaluation_shanghaitech_keepfull(
        path=None,
        dataset="shanghaitech_keepfull_r50",
        output="visualize/verify_dataloader_shanghaitech",
        meta_data="data_info.txt"):
    HARD_CODE = HardCodeVariable()
    if path == None:
        shanghaitech_data = ShanghaiTechDataPath(
            root=HARD_CODE.SHANGHAITECH_PATH)
        shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train(
        ).get()
        path = shanghaitech_data_part_a_train
    saved_folder = output
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(path, test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name=dataset,
        visualize_mode=True,
        debug=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    f = open(meta_data, "w")
    total = len(train_loader)
    for i in range(len(train_loader)):
        img, label, debug_data = next(train_loader_iter)
        p_count = debug_data["p_count"]
        name = debug_data["name"][0]
        item_number = img_name_to_int(name)
        density_map_count = label.sum()
        log_str = str(item_number) + " " + str(
            density_map_count.item()) + " " + str(p_count.item())
        print(log_str)
        f.write(log_str + "\n")
        save_img(
            img,
            os.path.join(saved_folder,
                         "train_img_" + str(item_number) + ".png"))
        save_path = os.path.join(saved_folder,
                                 "train_label_" + str(item_number) + ".png")
        save_density_map(label.numpy()[0][0], save_path)
        print(str(i) + "/" + str(total))
    f.close()
def visualize_ucf_cc_50_pacnn():
    HARD_CODE = HardCodeVariable()
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.UCF_CC_50_PATH
    train_list, val_list = get_train_val_list(DATA_PATH, test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")
    train_loader_pacnn = torch.utils.data.DataLoader(ListDataset(
        train_list,
        shuffle=True,
        transform=transforms.Compose([transforms.ToTensor()]),
        train=True,
        batch_size=1,
        num_workers=4,
        dataset_name="shanghaitech_pacnn",
        debug=True),
                                                     batch_size=1,
                                                     num_workers=4)

    img, label = next(iter(train_loader_pacnn))

    print(img.shape)
    save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
    save_density_map(label[0].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density1.png"))
    save_density_map(label[1].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density2.png"))
    save_density_map(label[2].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density3.png"))
    print("count1 ", label[0].numpy()[0].sum())
    print("count2 ", label[1].numpy()[0].sum())
    print("count3 ", label[2].numpy()[0].sum())
    print("done ", i, "pred ", pred_sum, " gt ", np.sum(groundtruth))

    max_people_per_pix = 0
    if density_1.max() > max_people_per_pix:
        max_people_per_pix = density_1.max()
    if density_2.max() > max_people_per_pix:
        max_people_per_pix = density_2.max()
    if density_3.max() > max_people_per_pix:
        max_people_per_pix = density_3.max()
    if density_4.max() > max_people_per_pix:
        max_people_per_pix = density_4.max()

    ## print out visual
    if IS_VISUAL:
        name_prefix = os.path.join(saved_folder, "sample_" + str(i))
        save_img(img_original_1, name_prefix + "_img1.png")
        save_img(img_original_2, name_prefix + "_img2.png")
        save_img(img_original_3, name_prefix + "_img3.png")
        save_img(img_original_4, name_prefix + "_img4.png")

        save_density_map_with_colorrange(density_1.squeeze(),
                                         name_prefix + "_pred1.png", 0, 0.18)
        save_density_map_with_colorrange(density_2.squeeze(),
                                         name_prefix + "_pred2.png", 0, 0.18)
        save_density_map_with_colorrange(density_3.squeeze(),
                                         name_prefix + "_pred3.png", 0, 0.18)
        save_density_map_with_colorrange(density_4.squeeze(),
                                         name_prefix + "_pred4.png", 0, 0.18)
    ##

print(len(pred))