def real_args_parse():
    """
    this is not dummy
    if you are going to make all-in-one notebook, ignore this
    :return:
    """
    parser = argparse.ArgumentParser(description='CrowdCounting')
    parser.add_argument("--task_id", action="store", default="dev")
    parser.add_argument('-a', action="store_true", default=False)

    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    parser.add_argument('--output',
                        action="store",
                        type=str,
                        default="saved_model")
    parser.add_argument('--model', action="store", default="pacnn")

    # args with default value
    parser.add_argument('--load_model', action="store", default="", type=str)
    parser.add_argument('--lr', action="store", default=1e-8, type=float)
    parser.add_argument('--momentum', action="store", default=0.9, type=float)
    parser.add_argument('--decay',
                        action="store",
                        default=5 * 1e-3,
                        type=float)
    parser.add_argument('--epochs', action="store", default=1, type=int)
    parser.add_argument('--test', action="store_true", default=False)

    # pacnn setting only
    parser.add_argument('--PACNN_PERSPECTIVE_AWARE_MODEL',
                        action="store",
                        default=0,
                        type=int)
    parser.add_argument(
        '--PACNN_MUTILPLE_SCALE_LOSS',
        action="store",
        default=1,
        type=int,
        help=
        "1: compare each of  density map/perspective map scale with gt for loss."
        "0: only compare final density map and final density perspective map")

    # args.original_lr = 1e-7
    # args.lr = 1e-7
    # args.batch_size = 1
    # args.momentum = 0.95
    # args.decay = 5 * 1e-4
    # args.start_epoch = 0
    # args.epochs = 120
    # args.steps = [-1, 1, 100, 150]
    # args.scales = [1, 1, 1, 1]
    # args.workers = 4
    # args.seed = time.time()
    # args.print_freq = 30

    arg = parser.parse_args()
    return arg
def visualize_shanghaitech_keepfull():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader_shanghaitech"
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_keepfull",
        visualize_mode=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    for i in range(10):
        img, label = next(train_loader_iter)
        save_img(img, os.path.join(saved_folder,
                                   "train_img" + str(i) + ".png"))
        save_density_map(
            label.numpy()[0][0],
            os.path.join(saved_folder, "train_label" + str(i) + ".png"))
def visualize_shanghaitech_nonoverlap_downsample():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.SHANGHAITECH_PATH_PART_B
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_non_overlap_downsample",
        visualize_mode=True)

    img, label = next(iter(train_loader))

    print(img.shape)
    save_img(img, os.path.join(saved_folder,
                               "overlap_downsample_loader_1.png"))
    save_density_map(
        label[0].numpy()[0],
        os.path.join(saved_folder,
                     "overlap_downsample_loader_with_p_density1.png"))

    print("count1 ", label.numpy()[0].sum())
    print("count2 ", label.numpy()[0].sum())
    print("count3 ", label.numpy()[0].sum())

    print("s1 ", label.shape)
def train_test_split_parse():
    parser = argparse.ArgumentParser(description='Dataloader')
    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    arg = parser.parse_args()
    return arg
def visualize_shanghaitech_pacnn_with_perspective():
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.SHANGHAITECH_PATH_PART_A
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")
    train_loader_pacnn = torch.utils.data.DataLoader(ListDataset(
        train_list,
        shuffle=True,
        transform=transforms.Compose([transforms.ToTensor()]),
        train=True,
        batch_size=1,
        num_workers=4,
        dataset_name="shanghaitech_pacnn_with_perspective",
        debug=True),
                                                     batch_size=1,
                                                     num_workers=4)

    img, label = next(iter(train_loader_pacnn))

    print(img.shape)
    save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
    save_density_map(
        label[0].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density1.png"))
    save_density_map(
        label[1].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density2.png"))
    save_density_map(
        label[2].numpy()[0],
        os.path.join(saved_folder, "pacnn_loader_with_p_density3.png"))
    save_density_map(label[3].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_p_s_4.png"))
    save_density_map(label[4].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_p_5.png"))
    print("count1 ", label[0].numpy()[0].sum())
    print("count2 ", label[1].numpy()[0].sum())
    print("count3 ", label[2].numpy()[0].sum())
    print("count4 ", label[3].numpy()[0].sum())
    print("count5 ", label[4].numpy()[0].sum())

    print("s1 ", label[0].shape)
    print("s2 ", label[1].shape)
    print("s3 ", label[2].shape)
    print("s4 ", label[3].shape)
    print("s5 ", label[4].shape)
def sanity_check_dataloader_parse():
    parser = argparse.ArgumentParser(description='Dataloader')
    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    parser.add_argument('--datasetname',
                        action="store",
                        default="shanghaitech_keepfull")
    arg = parser.parse_args()
    return arg
示例#7
0
def visualize_evaluation_shanghaitech_keepfull(
        path=None,
        dataset="shanghaitech_keepfull_r50",
        output="visualize/verify_dataloader_shanghaitech",
        meta_data="data_info.txt"):
    HARD_CODE = HardCodeVariable()
    if path == None:
        shanghaitech_data = ShanghaiTechDataPath(
            root=HARD_CODE.SHANGHAITECH_PATH)
        shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train(
        ).get()
        path = shanghaitech_data_part_a_train
    saved_folder = output
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(path, test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name=dataset,
        visualize_mode=True,
        debug=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    f = open(meta_data, "w")
    total = len(train_loader)
    for i in range(len(train_loader)):
        img, label, debug_data = next(train_loader_iter)
        p_count = debug_data["p_count"]
        name = debug_data["name"][0]
        item_number = img_name_to_int(name)
        density_map_count = label.sum()
        log_str = str(item_number) + " " + str(
            density_map_count.item()) + " " + str(p_count.item())
        print(log_str)
        f.write(log_str + "\n")
        save_img(
            img,
            os.path.join(saved_folder,
                         "train_img_" + str(item_number) + ".png"))
        save_path = os.path.join(saved_folder,
                                 "train_label_" + str(item_number) + ".png")
        save_density_map(label.numpy()[0][0], save_path)
        print(str(i) + "/" + str(total))
    f.close()
示例#8
0
def visualize_evaluation_shanghaitech_keepfull(model):
    model = model.cuda()
    model.eval()
    HARD_CODE = HardCodeVariable()
    shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
    shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get(
    )
    saved_folder = "visualize/evaluation_dataloader_shanghaitech"
    os.makedirs(saved_folder, exist_ok=True)
    train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train,
                                              test_size=0.2)
    test_list = None
    train_loader, val_loader, test_loader = get_dataloader(
        train_list,
        val_list,
        test_list,
        dataset_name="shanghaitech_keepfull",
        visualize_mode=False,
        debug=True)

    # do with train loader
    train_loader_iter = iter(train_loader)
    for i in range(10):
        img, label, count = next(train_loader_iter)
        # save_img(img, os.path.join(saved_folder, "train_img_" + str(i) +".png"))
        save_path = os.path.join(saved_folder,
                                 "train_label_" + str(i) + ".png")
        save_pred_path = os.path.join(saved_folder,
                                      "train_pred_" + str(i) + ".png")
        save_density_map(label.numpy()[0][0], save_path)
        pred = model(img.cuda())
        predicted_density_map = pred.detach().cpu().clone().numpy()
        predicted_density_map_enlarge = cv2.resize(
            np.squeeze(predicted_density_map[0][0]),
            (int(predicted_density_map.shape[3] * 8),
             int(predicted_density_map.shape[2] * 8)),
            interpolation=cv2.INTER_CUBIC) / 64
        save_density_map(predicted_density_map_enlarge, save_pred_path)
        print("pred " + save_pred_path + " value " +
              str(predicted_density_map.sum()))
        print("cont compare " + str(predicted_density_map.sum()) + " " +
              str(predicted_density_map_enlarge.sum()))
        print("shape compare " + str(predicted_density_map.shape) + " " +
              str(predicted_density_map_enlarge.shape))
示例#9
0
def _parse():
    parser = argparse.ArgumentParser(description='verify_sha')
    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    parser.add_argument('--output',
                        action="store",
                        type=str,
                        default="visualize/verify_dataloader_shanghaitech")
    parser.add_argument('--meta_data',
                        action="store",
                        type=str,
                        default="data_info.txt")
    parser.add_argument('--datasetname',
                        action="store",
                        default="shanghaitech_keepfull_r50")
    arg = parser.parse_args()
    return arg
示例#10
0
 def testIntergration(self):
     hard_code = HardCodeVariable()
     self.test_root = hard_code.SHANGHAITECH_PATH
     self.data = ShanghaiTechDataPath(root=self.test_root)
     if os.path.exists(self.data.get()):
         print("exist " + self.data.get())
         print("let see if we have train, test folder")
         train_path_a = self.data.get_a().get_train().get()
         train_path_b = self.data.get_b().get_train().get()
         test_path_a = self.data.get_a().get_test().get()
         test_path_b = self.data.get_a().get_test().get()
         if os.path.exists(train_path_a):
             print("exist " + train_path_a)
         if os.path.exists(train_path_b):
             print("exist " + train_path_b)
         if os.path.exists(test_path_a):
             print("exist " + test_path_a)
         if os.path.exists(test_path_b):
             print("exist " + test_path_b)
         print("count number of image")
         image_folder_list = [
             train_path_a, train_path_b, test_path_a, test_path_b
         ]
         for image_root_path in image_folder_list:
             image_path_list = glob.glob(
                 os.path.join(image_root_path, "images", "*.jpg"))
             density_path_list = glob.glob(
                 os.path.join(image_root_path, "ground-truth-h5", "*.h5"))
             count_img = len(image_path_list)
             count_density_map = len(density_path_list)
             first_img = image_path_list[0]
             first_density_map = density_path_list[0]
             print("in folder " + image_root_path)
             print("--- total image" + str(count_img))
             print('--- first img ' + first_img)
             print("--- total density map " + str(count_density_map))
             print("--- first density map " + str(first_density_map))
             if count_img == count_density_map:
                 print("--- number of density map = number of image")
             else:
                 print("xxxxx number of density map !!!!!= number of image")
             assert count_img == count_density_map
def my_args_parse():
    parser = argparse.ArgumentParser(
        description='CrowdCounting Context Aware Network')
    parser.add_argument("--task_id", action="store", default="dev")
    parser.add_argument('--note', action="store", default="write anything")

    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    parser.add_argument('--datasetname',
                        action="store",
                        default="shanghaitech_keepfull")

    # args with default value
    parser.add_argument('--load_model', action="store", default="", type=str)
    parser.add_argument('--lr', action="store", default=1e-8, type=float)
    parser.add_argument('--momentum', action="store", default=0.9, type=float)
    parser.add_argument('--decay',
                        action="store",
                        default=5 * 1e-3,
                        type=float)
    parser.add_argument('--epochs', action="store", default=1, type=int)
    parser.add_argument(
        '--batch_size',
        action="store",
        default=1,
        type=int,
        help="only set batch_size > 0 for dataset with image size equal")
    parser.add_argument('--test', action="store_true", default=False)
    parser.add_argument(
        '--no_norm',
        action="store_true",
        default=False,
        help="if true, does not use transforms.Normalize in dataloader")
    arg = parser.parse_args()
    return arg
def visualize_ucf_cc_50_pacnn():
    HARD_CODE = HardCodeVariable()
    saved_folder = "visualize/test_dataloader"
    os.makedirs(saved_folder, exist_ok=True)
    DATA_PATH = HARD_CODE.UCF_CC_50_PATH
    train_list, val_list = get_train_val_list(DATA_PATH, test_size=0.2)
    test_list = None

    # create data loader
    train_loader, val_loader, test_loader = get_dataloader(
        train_list, val_list, test_list, dataset_name="ucf_cc_50")
    train_loader_pacnn = torch.utils.data.DataLoader(ListDataset(
        train_list,
        shuffle=True,
        transform=transforms.Compose([transforms.ToTensor()]),
        train=True,
        batch_size=1,
        num_workers=4,
        dataset_name="shanghaitech_pacnn",
        debug=True),
                                                     batch_size=1,
                                                     num_workers=4)

    img, label = next(iter(train_loader_pacnn))

    print(img.shape)
    save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
    save_density_map(label[0].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density1.png"))
    save_density_map(label[1].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density2.png"))
    save_density_map(label[2].numpy()[0],
                     os.path.join(saved_folder, "pacnn_loader_density3.png"))
    print("count1 ", label[0].numpy()[0].sum())
    print("count2 ", label[1].numpy()[0].sum())
    print("count3 ", label[2].numpy()[0].sum())
def context_aware_network_args_parse():
    """
    this is not dummy
    if you are going to make all-in-one notebook, ignore this
    :return:
    """
    parser = argparse.ArgumentParser(
        description='CrowdCounting Context Aware Network')
    parser.add_argument("--task_id", action="store", default="dev")
    parser.add_argument('-a', action="store_true", default=False)

    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    parser.add_argument('--output',
                        action="store",
                        type=str,
                        default="saved_model/context_aware_network")
    parser.add_argument('--datasetname',
                        action="store",
                        default="shanghaitech_keepfull")

    # args with default value
    parser.add_argument('--load_model', action="store", default="", type=str)
    parser.add_argument('--lr', action="store", default=1e-8, type=float)
    parser.add_argument('--momentum', action="store", default=0.9, type=float)
    parser.add_argument('--decay',
                        action="store",
                        default=5 * 1e-3,
                        type=float)
    parser.add_argument('--epochs', action="store", default=1, type=int)
    parser.add_argument('--test', action="store_true", default=False)

    arg = parser.parse_args()
    return arg
示例#14
0
from data_flow import get_dataloader, create_image_list
from hard_code_variable import HardCodeVariable
import os

hard_code = HardCodeVariable()

TRAIN_PATH = os.path.join(hard_code.SHANGHAITECH_PATH_PART_B,
                          hard_code.SHANGHAITECH_PATH_TRAIN_POSTFIX)
TEST_PATH = os.path.join(hard_code.SHANGHAITECH_PATH_PART_B,
                         hard_code.SHANGHAITECH_PATH_TEST_POSTFIX)

train_list = create_image_list(TRAIN_PATH)
test_list = create_image_list(TEST_PATH)

train, valid, test = get_dataloader(train_list,
                                    None,
                                    test_list,
                                    dataset_name="shanghaitech",
                                    batch_size=5)

for img, label in train:
    print("img shape:" + str(img.shape) + " == " + "label shape " +
          str(label.shape))
示例#15
0
 def test_get_b_hc(self):
     hc = HardCodeVariable()
     self.test_root = hc.SHANGHAITECH_PATH
     self.data = ShanghaiTechDataPath(root=self.test_root)
     print(self.data.get_b())
def meow_parse():
    parser = argparse.ArgumentParser(
        description='CrowdCounting Context Aware Network')
    parser.add_argument("--task_id", action="store", default="dev")
    parser.add_argument("--model", action="store", default="dev")
    parser.add_argument('--note', action="store", default="write anything")

    parser.add_argument('--input',
                        action="store",
                        type=str,
                        default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
    parser.add_argument('--output',
                        action="store",
                        type=str,
                        default="/data/meow")
    parser.add_argument('--datasetname',
                        action="store",
                        default="shanghaitech_keepfull")

    # args with default value
    parser.add_argument('--load_model', action="store", default="", type=str)
    parser.add_argument('--lr', action="store", default=1e-8, type=float)
    parser.add_argument('--momentum', action="store", default=0.9, type=float)
    parser.add_argument('--decay',
                        action="store",
                        default=5 * 1e-3,
                        type=float)
    parser.add_argument('--epochs', action="store", default=1, type=int)
    parser.add_argument(
        '--batch_size',
        action="store",
        default=1,
        type=int,
        help="only set batch_size > 0 for dataset with image size equal")
    parser.add_argument('--test', action="store_true", default=False)
    parser.add_argument(
        '--no_norm',
        action="store_true",
        default=False,
        help="if true, does not use transforms.Normalize in dataloader")
    parser.add_argument(
        '--cache',
        action="store_true",
        default=False,
        help=
        "use cache for dataloader, recommend True if the data does not change every epoch"
    )
    parser.add_argument('--pin_memory',
                        action="store_true",
                        default=False,
                        help="don't know what is it")
    parser.add_argument(
        '--skip_train_eval',
        action="store_true",
        default=False,
        help="if true, do not run eval on training set to save time")
    parser.add_argument(
        '--lr_scheduler',
        action="store_true",
        default=False,
        help="use lr scheduler, should config step_list and lr_list")
    parser.add_argument('--step_list',
                        action="store",
                        default="10,20,30",
                        type=str)
    parser.add_argument('--lr_list',
                        action="store",
                        default="1e-1,1e-2,1e-3",
                        type=str)
    # parser.add_argument('--use_ssim', action="store_true", default=False,
    #                     help="if true, use mse and negative ssim as loss function")
    parser.add_argument('--loss_fn', action="store", default="MSE", type=str)
    parser.add_argument('--optim', action="store", default="adam", type=str)
    parser.add_argument('--eval_only',
                        action="store_true",
                        default=False,
                        help="only evaluate no train")
    parser.add_argument('--eval_density',
                        action="store_true",
                        default=False,
                        help="only evaluate no train")
    arg = parser.parse_args()
    return arg