def dump(): all_loader = torch.utils.data.DataLoader(Fashion_attr_prediction( type="all", transform=data_transform_test), batch_size=cfg.EXTRACT_BATCH_SIZE, num_workers=cfg.NUM_WORKERS, pin_memory=True) deep_feats = [] color_feats = [] labels = [] dump_dataset(all_loader, deep_feats, color_feats, labels) if cfg.ENABLE_INSHOP_DATASET: inshop_loader = torch.utils.data.DataLoader( Fashion_inshop(type="all", transform=data_transform_test), batch_size=cfg.EXTRACT_BATCH_SIZE, num_workers=cfg.NUM_WORKERS, pin_memory=True) dump_dataset(inshop_loader, deep_feats, color_feats, labels) feat_all = os.path.join(cfg.DATASET_BASE, 'all_feat.npy') color_feat_all = os.path.join(cfg.DATASET_BASE, 'all_color_feat.npy') feat_list = os.path.join(cfg.DATASET_BASE, 'all_feat.list') with open(feat_list, "w") as fw: fw.write("\n".join(labels)) np.save(feat_all, np.vstack(deep_feats)) np.save(color_feat_all, np.vstack(color_feats)) print("Dumped to all_feat.npy, all_color_feat.npy and all_feat.list.")
def get_inshop_test_db(): print("Extracting features of in-shop test images.") db_dict = dict( ) # dataset_type: dataset_db. dataset_db is (deep_feats, color feats, labels) for dataset_type in ("test_gallery", "test_query"): dataset = Fashion_inshop(type=dataset_type, transform=data_transform_test) loader = torch.utils.data.DataLoader(dataset, batch_size=EXTRACT_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) classes = [] deep_feats = [] color_feats = [] labels = [] dump_dataset(loader, classes, deep_feats, color_feats, labels) db_dict[dataset_type] = (deep_feats, color_feats, labels) deep_feats, color_feats, labels = db_dict['test_query'] length = dataset.test_query_len deep_feats, color_feats, labels = deep_feats[-length:], color_feats[ -length:], labels[-length:] query_feat_dict = { labels[i]: (deep_feats[i], color_feats[i]) for i in range(len(labels)) } deep_feats, color_feats, labels = db_dict['test_gallery'] length = dataset.test_gallery_len deep_feats, color_feats, labels = deep_feats[-length:], color_feats[ -length:], labels[-length:] return query_feat_dict, (deep_feats, color_feats, labels)
def eval(retrieval_top_n=2): dataset = Fashion_inshop() length = dataset.test_len deep_feats, color_feats, labels = load_feat_db() deep_feats, color_feats, labels = deep_feats[-length:], color_feats[ -length:], labels[-length:] feat_dict = { labels[i]: (deep_feats[i], color_feats[i]) for i in range(len(labels)) } include_once = 0 include_zero = 0 include_times = 0 should_include_times = 0 for iter_id, item_id in enumerate(dataset.test_list): item_imgs = dataset.test_dict[item_id] item_img = random.choice(item_imgs) result = get_deep_color_top_n(feat_dict[item_img], deep_feats, color_feats, labels, retrieval_top_n) keys = list(map(lambda x: x[0], result)) included = list(map(lambda x: x in item_imgs, keys)) should_include_times += (len(item_imgs) - 1) include_once += (1 if included.count(True) >= 2 else 0) include_zero += (1 if included.count(True) <= 1 else 0) include_times += (included.count(True) - 1) if iter_id % 10 == 0: print("{}/{}, is included: {}/{}, included times: {}/{}".format( iter_id, len(dataset.test_list), include_once, include_once + include_zero, include_times, should_include_times)) return include_times, should_include_times, include_once, include_zero
test_loader = torch.utils.data.DataLoader(Fashion_attr_prediction( type="test", transform=data_transform_test), batch_size=TEST_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) triplet_loader = torch.utils.data.DataLoader(Fashion_attr_prediction( type="triplet", transform=data_transform_train), batch_size=TRIPLET_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) if ENABLE_INSHOP_DATASET: triplet_in_shop_loader = torch.utils.data.DataLoader( Fashion_inshop(type="train", transform=data_transform_train), batch_size=TRIPLET_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) model = f_model(freeze_param=FREEZE_PARAM, model_path=DUMPED_MODEL).cuda(GPU_ID) optimizer = optim.SGD(filter(lambda p: p.requires_grad, model.parameters()), lr=LR, momentum=MOMENTUM) if DUMPED_MODEL: start_epoch = int(DUMPED_MODEL.split('/')[-1].split('_')[0]) + 1 else: start_epoch = 1 # start_epoch = 1
def dump(custom=False): print(f'dump function called with custom: {custom}') if not custom: all_loader = torch.utils.data.DataLoader(Fashion_attr_prediction( type="all", transform=data_transform_test), batch_size=EXTRACT_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) classes = [] deep_feats = [] color_feats = [] labels = [] # dump_dataset(all_loader, deep_feats, color_feats, labels) dump_dataset(all_loader, classes, deep_feats, color_feats, labels) if ENABLE_INSHOP_DATASET: inshop_loader = torch.utils.data.DataLoader( Fashion_inshop(type="all", transform=data_transform_test), batch_size=EXTRACT_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) dump_dataset(inshop_loader, classes, deep_feats, color_feats, labels) feat_all = os.path.join(DATASET_BASE, 'all_feat.npy') color_feat_all = os.path.join(DATASET_BASE, 'all_color_feat.npy') feat_list = os.path.join(DATASET_BASE, 'all_feat.list') with open(feat_list, "w") as fw: fw.write("\n".join(labels)) np.save(feat_all, np.vstack(deep_feats)) np.save(color_feat_all, np.vstack(color_feats)) print("Dumped to all_feat.npy, all_color_feat.npy and all_feat.list.") else: all_loader = torch.utils.data.DataLoader(Fashion_attr_prediction( type="all", transform=data_transform_test, custom=True, crop=True), batch_size=EXTRACT_BATCH_SIZE, num_workers=NUM_WORKERS, pin_memory=True) classes = [] deep_feats = [] color_feats = [] labels = [] # Classes with items in the dataset allowed_inds = np.array( [4, 9, 10, 15, 16, 17, 18, 21, 25, 28, 31, 33, 37]) dump_dataset(all_loader, classes, deep_feats, color_feats, labels, allowed_inds) class_all = os.path.join(DATASET_BASE, 'custom_all_class.npy') feat_all = os.path.join(DATASET_BASE, 'custom_all_feat.npy') color_feat_all = os.path.join(DATASET_BASE, 'custom_all_color_feat.npy') feat_list = os.path.join(DATASET_BASE, 'custom_all_feat.list') with open(feat_list, "w") as fw: fw.write("\n".join(labels)) np.save(class_all, np.vstack(classes)) np.save(feat_all, np.vstack(deep_feats)) np.save(color_feat_all, np.vstack(color_feats)) print( "Dumped to custom_all_class.npy, custom_all_feat.npy, custom_all_color_feat.npy and custom_all_feat.list." )