def visualize_shanghaitech_keepfull():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader_shanghaitech"
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_keepfull",
        visualize_mode=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    for i in range(10):
        img, label = next(train_loader_iter)
        save_img(img, os.path.join(saved_folder,
                                   "train_img" + str(i) + ".png"))
        save_density_map(
            label.numpy()[0][0],
            os.path.join(saved_folder, "train_label" + str(i) + ".png"))
def visualize_shanghaitech_nonoverlap_downsample():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.SHANGHAITECH_PATH_PART_B
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_non_overlap_downsample",
        visualize_mode=True)

    img, label = next(iter(train_loader))

    print(img.shape)
    save_img(img, os.path.join(saved_folder,
                               "overlap_downsample_loader_1.png"))
    save_density_map(
        label[0].numpy()[0],
        os.path.join(saved_folder,
                     "overlap_downsample_loader_with_p_density1.png"))

    print("count1 ", label.numpy()[0].sum())
    print("count2 ", label.numpy()[0].sum())
    print("count3 ", label.numpy()[0].sum())

    print("s1 ", label.shape)
def visualize_shanghaitech_pacnn_with_perspective():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.SHANGHAITECH_PATH_PART_A
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")
    train_loader_pacnn = torch.utils.data.DataLoader(ListDataset(
        train_list,
        shuffle=True,
        transform=transforms.Compose([transforms.ToTensor()]),
        train=True,
        batch_size=1,
        num_workers=4,
        dataset_name="shanghaitech_pacnn_with_perspective",
        debug=True),
                                                     batch_size=1,
                                                     num_workers=4)

    img, label = next(iter(train_loader_pacnn))

    print(img.shape)
    save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
    save_density_map(
        label[0].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density1.png"))
    save_density_map(
        label[1].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density2.png"))
    save_density_map(
        label[2].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density3.png"))
    save_density_map(label[3].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_p_s_4.png"))
    save_density_map(label[4].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_p_5.png"))
    print("count1 ", label[0].numpy()[0].sum())
    print("count2 ", label[1].numpy()[0].sum())
    print("count3 ", label[2].numpy()[0].sum())
    print("count4 ", label[3].numpy()[0].sum())
    print("count5 ", label[4].numpy()[0].sum())

    print("s1 ", label[0].shape)
    print("s2 ", label[1].shape)
    print("s3 ", label[2].shape)
    print("s4 ", label[3].shape)
    print("s5 ", label[4].shape)
Example #4
0
def visualize_evaluation_shanghaitech_keepfull(
        path=None,
        dataset="shanghaitech_keepfull_r50",
        output="visualize/verify_dataloader_shanghaitech",
        meta_data="data_info.txt"):
    HARD_CODE = HardCodeVariable()
    if path == None:
        shanghaitech_data = ShanghaiTechDataPath(
            root=HARD_CODE.SHANGHAITECH_PATH)
        shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train(
        ).get()
        path = shanghaitech_data_part_a_train
    saved_folder = output
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(path, test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name=dataset,
        visualize_mode=True,
        debug=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    f = open(meta_data, "w")
    total = len(train_loader)
    for i in range(len(train_loader)):
        img, label, debug_data = next(train_loader_iter)
        p_count = debug_data["p_count"]
        name = debug_data["name"][0]
        item_number = img_name_to_int(name)
        density_map_count = label.sum()
        log_str = str(item_number) + " " + str(
            density_map_count.item()) + " " + str(p_count.item())
        print(log_str)
        f.write(log_str + "\n")
        save_img(
            img,
            os.path.join(saved_folder,
                         "train_img_" + str(item_number) + ".png"))
        save_path = os.path.join(saved_folder,
                                 "train_label_" + str(item_number) + ".png")
        save_density_map(label.numpy()[0][0], save_path)
        print(str(i) + "/" + str(total))
    f.close()
Example #5
0
def visualize_evaluation_shanghaitech_keepfull(model):
    model = model.cuda()
    model.eval()
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/evaluation_dataloader_shanghaitech"
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_keepfull",
        visualize_mode=False,
        debug=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    for i in range(10):
        img, label, count = next(train_loader_iter)
        # save_img(img, os.path.join(saved_folder, "train_img_" + str(i) +".png"))
        save_path = os.path.join(saved_folder,
                                 "train_label_" + str(i) + ".png")
        save_pred_path = os.path.join(saved_folder,
                                      "train_pred_" + str(i) + ".png")
        save_density_map(label.numpy()[0][0], save_path)
        pred = model(img.cuda())
        predicted_density_map = pred.detach().cpu().clone().numpy()
        predicted_density_map_enlarge = cv2.resize(
            np.squeeze(predicted_density_map[0][0]),
            (int(predicted_density_map.shape[3] * 8),
             int(predicted_density_map.shape[2] * 8)),
            interpolation=cv2.INTER_CUBIC) / 64
        save_density_map(predicted_density_map_enlarge, save_pred_path)
        print("pred " + save_pred_path + " value " +
              str(predicted_density_map.sum()))
        print("cont compare " + str(predicted_density_map.sum()) + " " +
              str(predicted_density_map_enlarge.sum()))
        print("shape compare " + str(predicted_density_map.shape) + " " +
              str(predicted_density_map_enlarge.shape))
def visualize_ucf_cc_50_pacnn():
    HARD_CODE = HardCodeVariable()
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.UCF_CC_50_PATH
    train_list, val_list = get_train_val_list(DATA_PATH, test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")
    train_loader_pacnn = torch.utils.data.DataLoader(ListDataset(
        train_list,
        shuffle=True,
        transform=transforms.Compose([transforms.ToTensor()]),
        train=True,
        batch_size=1,
        num_workers=4,
        dataset_name="shanghaitech_pacnn",
        debug=True),
                                                     batch_size=1,
                                                     num_workers=4)

    img, label = next(iter(train_loader_pacnn))

    print(img.shape)
    save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
    save_density_map(label[0].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density1.png"))
    save_density_map(label[1].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density2.png"))
    save_density_map(label[2].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density3.png"))
    print("count1 ", label[0].numpy()[0].sum())
    print("count2 ", label[1].numpy()[0].sum())
    print("count3 ", label[2].numpy()[0].sum())
from ignite.metrics import Loss, MeanAbsoluteError, MeanSquaredError
from crowd_counting_error_metrics import CrowdCountingMeanAbsoluteError, CrowdCountingMeanSquaredError
import torch
from torch import nn
from models import CSRNet
import os

if __name__ == "__main__":
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    print(device)
    args = real_args_parse()
    print(args)
    DATA_PATH = args.input

    # create list
    train_list, val_list = get_train_val_list(DATA_PATH, test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")

    # model
    model = CSRNet()
    model = model.to(device)

    # loss function
    loss_fn = nn.MSELoss(size_average=False).cuda()

    optimizer = torch.optim.SGD(model.parameters(),
                                args.lr,
from torch import nn
from models import CSRNet
import os

if __name__ == "__main__":
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    print(device)
    args = real_args_parse()
    print(args)
    DATA_PATH = args.input
    TRAIN_PATH = os.path.join(DATA_PATH, "train_data")
    TEST_PATH = os.path.join(DATA_PATH, "test_data")


    # create list
    train_list, val_list = get_train_val_list(TRAIN_PATH)
    test_list = create_training_image_list(TEST_PATH)

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(train_list, val_list, test_list)


    # model
    model = CSRNet()
    model = model.to(device)

    # loss function
    loss_fn = nn.MSELoss(size_average=False).cuda()

    optimizer = torch.optim.SGD(model.parameters(), args.lr,
                                momentum=args.momentum,
Example #9
0
def visualize_evaluation_shanghaitech_keepfull(model, args):
    """

    :param model: model with param, if not model then do not output pred
    :param args:
    :return:
    """
    if model is not None:
        model = model.cuda()
        model.eval()
    saved_folder = args.output
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(args.input, test_size=0.2)
    test_list = create_image_list(args.input)
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_keepfull_r50",
        visualize_mode=False,
        debug=True)

    log_f = open(args.meta_data, "w")
    mae_s = 0
    mse_s = 0
    n = 0
    train_loader_iter = iter(train_loader)
    _, gt_density, _ = next(train_loader_iter)
    with torch.no_grad():
        for item in test_loader:
            img, gt_density, debug_info = item
            gt_count = debug_info["p_count"]
            file_name = debug_info["name"]
            print(file_name[0].split(".")[0])
            file_name_only = file_name[0].split(".")[0]
            save_path = os.path.join(saved_folder,
                                     "label_" + file_name_only + ".png")
            save_pred_path = os.path.join(saved_folder,
                                          "pred_" + file_name_only + ".png")
            save_density_map(gt_density.numpy()[0], save_path)
            if model is not None:
                pred = model(img.cuda())
                predicted_density_map = pred.detach().cpu().clone().numpy()
                predicted_density_map_enlarge = cv2.resize(
                    np.squeeze(predicted_density_map[0][0]),
                    (int(predicted_density_map.shape[3] * 8),
                     int(predicted_density_map.shape[2] * 8)),
                    interpolation=cv2.INTER_CUBIC) / 64
                save_density_map(predicted_density_map_enlarge, save_pred_path)
                print("pred " + save_pred_path + " value " +
                      str(predicted_density_map.sum()))

                print("cont compare " + str(predicted_density_map.sum()) +
                      " " + str(predicted_density_map_enlarge.sum()))
                print("shape compare " + str(predicted_density_map.shape) +
                      " " + str(predicted_density_map_enlarge.shape))

                pred_count = pred.detach().cpu().sum()
                pred_count_num = pred_count.item()

                error = abs(pred_count_num - gt_count_num)
            else:
                error = 0
                pred_count = 0

            mae_s += error
            mse_s += error * error
            density_map_count = gt_density.detach().sum()
            density_map_count_num = density_map_count.item()
            gt_count_num = gt_count.item()
            if model is not None:
                log_str = str(file_name_only) + " " + str(
                    density_map_count_num) + " " + str(
                        gt_count.item()) + " " + str(pred_count.item())
            else:
                log_str = str(file_name_only) + " " + str(
                    density_map_count_num) + " " + str(gt_count.item())
            print(log_str)
            log_f.write(log_str + "\n")
    log_f.close()
    mae = mae_s / n
    mse = math.sqrt(mse_s / n)
    print("mae ", mae)
    print("mse", mse)