print(
    f'VGE-NV-ImageOnly training: image limit: {IMAGE_LIMIT} bs: {BATCH_SIZE} lr gamma: {LR_GAMMA} embed-dim: {EMBED_DIM} shuffle: {SHUFFLE} margin: {MARGIN} lr:{LR}'
)

transform = transforms.Compose([
    #transforms.Resize((950,1000)),
    transforms.ToTensor(),
    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,
                                                          0.225]),
])

data_set = Semantic3dDataset('data/pointcloud_images_o3d_merged',
                             'train',
                             transform=transform,
                             image_limit=IMAGE_LIMIT,
                             load_viewObjects=True,
                             load_sceneGraphs=True,
                             return_graph_data=True)
#Option: shuffle, pin_memory crashes on my system, CARE: shuffle for PairWiseRankingLoss(!)
data_loader = DataLoader(data_set,
                         batch_size=BATCH_SIZE,
                         num_workers=2,
                         pin_memory=False,
                         shuffle=SHUFFLE)

loss_dict = {}
best_loss = np.inf
best_model = None

#for lr in (5e-4,1e-4,5e-5,1e-5):
    #         #Append the average pos&ori. errors *for the cases that the scene was hit*
    #         pos_results[k].append( np.mean( topk_pos_dists[scene_correct==True]) if np.sum(scene_correct)>0 else None )
    #         ori_results[k].append( np.mean( topk_ori_dists[scene_correct==True]) if np.sum(scene_correct)>0 else None )
    #         scene_results[k].append( np.mean(scene_correct) ) #Always append the scene-scores
    
    # assert len(pos_results[k])==len(ori_results[k])==len(scene_results[k])==len(test_indices)

    # print('Saving retrieval results...')
    # pickle.dump(retrieval_dict, open('retrievals_netvlad_plus_sceneGraph.pkl','wb'))

    # return evaluate_topK(pos_results, ori_results, scene_results)    

if __name__ == "__main__":
    IMAGE_LIMIT=3000
    dataset_train=Semantic3dDataset('data/pointcloud_images_o3d_merged','train',transform=None, image_limit=IMAGE_LIMIT, load_viewObjects=True, load_sceneGraphs=True)
    dataset_test =Semantic3dDataset('data/pointcloud_images_o3d_merged','test', transform=None, image_limit=IMAGE_LIMIT, load_viewObjects=True, load_sceneGraphs=True)    

    if 'gather' in sys.argv:
        gather_sceneGraph2viewObjects(dataset_train, dataset_test, ablation=None)
        # gather_sceneGraph2viewObjects(dataset_train, dataset_test, ablation='colors')
        # gather_sceneGraph2viewObjects(dataset_train, dataset_test, ablation='relationships')
        #gather_sceneGraph2sceneGraph(dataset_train, dataset_test)

    if 'gather-occ' in sys.argv:
        IMAGE_LIMIT=3000
        dataset_train=Semantic3dDataset('data/pointcloud_images_o3d_merged_occ','train',transform=None, image_limit=IMAGE_LIMIT, load_viewObjects=True, load_sceneGraphs=True)
        dataset_test =Semantic3dDataset('data/pointcloud_images_o3d_merged_occ','test', transform=None, image_limit=IMAGE_LIMIT, load_viewObjects=True, load_sceneGraphs=True)    
        gather_sceneGraph2viewObjects(dataset_train, dataset_test, ablation=None)

Example #3
0
        cv2.imwrite(f'cases_hit_{i}.png', img_hit)
        cv2.imwrite(f'cases_miss_{i}.png', img_miss)
        cv2.waitKey()


#Results sanity-checked (NetVLAD) ✓
#CARE: retrievals in main or retrieval dir, re-create if in doubt!
if __name__ == "__main__":
    IMAGE_LIMIT = 3000
    BATCH_SIZE = 6
    NUM_CLUSTERS = 8
    TEST_SPLIT = 4
    ALPHA = 10.0

    dataset_train = Semantic3dDataset('data/pointcloud_images_o3d_merged',
                                      'train',
                                      transform=None,
                                      image_limit=IMAGE_LIMIT)
    dataset_test = Semantic3dDataset('data/pointcloud_images_o3d_merged',
                                     'test',
                                     transform=None,
                                     image_limit=IMAGE_LIMIT)

    retrievals_netvlad = pickle.load(open('retrievals_NV-S3D.pkl', 'rb'))
    retrievals_sg_scoring = pickle.load(open('retrievals_PureSG.pkl', 'rb'))
    quit()
    results_hit, results_miss = get_top3_all_scene_hit_miss(
        retrievals_sg_scoring, retrievals_netvlad, dataset_train, dataset_test)

    print(len(results_hit), len(results_miss))
    for key in results_hit.keys():
        print('hit', results_hit[key], 'miss', results_miss[key])